package cn.atguigu.comsumer;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.ArrayList;
import java.util.Properties;

/**
 * @author JiDingXia
 * @date 2025-02-26 02:59:46
 */
public class CustomConsumer {
	public static void main(String[] args) {
		Properties properties = new Properties();
		//生产环境中,建议配置多个broker地址
		properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"hadoop102:9092,hadoop103:9092,hadoop104:9092");
		//反序列化,必须
		properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
		properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
		//消费者组,必须
		properties.put(ConsumerConfig.GROUP_ID_CONFIG,"test");
		//创建消费者对象
		KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
		//订阅主题
		ArrayList<String> topics = new ArrayList<>();
		topics.add("first");
		kafkaConsumer.subscribe(topics);
		//循环拉取broker中的数据
		while	(true){
			//每隔1s拉取一次数据
			ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1L));
			//在控制台打印每次拉取的数据
			for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
				System.out.println("消息:"+consumerRecord.value()+", 主题:"+consumerRecord.topic()+", 分区:"+consumerRecord.partition()+", 偏移量:"+consumerRecord.offset());
			}
		}
	}
}
