大数据-Kafka-数据录制到本地文件
数据录制
即将kafak内数据录制一部分到文件中供查看
POM文件
<dependencies>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.8.0</version>
</dependency>
</dependencies>
Java代码
import java.io.FileWriter;
import java.io.IOException;
import java.util.Collections;
import java.util.Properties;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
public class KafkaConsumerToFile {
private static final String TOPIC_NAME = "my-topic"; // 消费者订阅的Topic名称
private static final String FILE_PATH = "kafka.txt"; // 输出文件路径
public static void main(String[] args) {
Properties props = new Properties();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092"); // Kafka集群的地址和端口号
props.put(ConsumerConfig.GROUP_ID_CONFIG, "my-group"); // 消费者组ID
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); // 消费者从最早的消息开始消费
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); // 键的反序列化器
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer"); // 值的反序列化器
Consumer<String, String> consumer = new KafkaConsumer<>(props);
consumer.subscribe(Collections.singletonList(TOPIC_NAME)); // 订阅Topic
try {
FileWriter writer = new FileWriter(FILE_PATH); // 创建输出文件流
while (true) {
ConsumerRecords<String, String> records = consumer.poll(100); // 消费消息
for (ConsumerRecord<String, String> record : records) {
String message = record.value(); // 获取消息的值
writer.write(message + "\n"); // 将消息写入文件
}
writer.flush(); // 刷新输出流
}
} catch (IOException e) {
e.printStackTrace();
} finally {
consumer.close(); // 关闭消费者
}
}
}
THE END