package clients.consumer;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;

/**
 * kafka消费者1
 *
 * @author kerry dong
 * @date 2022/4/23
 */
public class KafkaConsumer1Debug {

	public static void main(String[] args) {
		Properties properties = new Properties();
		// 服务端地址
		properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
		// key反序列化方式
		properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
		// value反序列化方式
		properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
		// 消费者组id
		properties.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group1");
		// 创建消费者
		KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
		// 订阅主题
		List<String> topics = new ArrayList<>();
		topics.add("fish");
		kafkaConsumer.subscribe(topics);

		// 消费数据
		while (true) {
			ConsumerRecords<String, String> consumerRecords = kafkaConsumer.poll(Duration.ofSeconds(1));
			for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
				// 模拟处理业务逻辑 begin
				System.out.println(consumerRecord);
				// 模拟处理业务逻辑 end
			}
			// 提交客户端处理响应
			kafkaConsumer.commitAsync();
		}
	}


}
