package com.niit.kafka;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;

import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;

public class KafkaConsumerTest4 {

    public static void main(String[] args) throws InterruptedException {
        //1.创建Kafka消费者的连接
        Properties props = new Properties();
        //1.1配置连接机器地址
        props.put("bootstrap.servers","node1:9092");
        //1.2指定消费者组（可以使用消费则组将若干消费者组织到一起），共同消费主题中的数据
        //每一个消费者都需要指定一个消费者组，如果消费者的组名相同，表示这几个消费者是一组的
        props.put("group.id","T1");
        //自动提交 offset
        props.put("enable.auto.commit","true");
        //自动提交offset时间间隔
        props.put("auto.commit.interval.ms","1000");
        //直接消费历史数据
        //props.put("auto.offset.reset","earliest");
        props.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer");
        props.put("value.deserializer","org.apache.kafka.common.serialization.StringDeserializer");

        //2.创建Kafka的消费者
        KafkaConsumer<String,String> consumer = new KafkaConsumer<String, String>(props);

        //3.订阅要消费的主题
        consumer.subscribe(Arrays.asList("BD11"));

        //4.使用while循环，不断的从kafka中获取
        while (true){
            //4.1Kafka的消费者一次拉取一批的数据
            ConsumerRecords<String, String> poll = consumer.poll(Duration.ofSeconds(5));
            //4.2将拉取出来的数据进行打印 offset key value
            for (ConsumerRecord<String,String> data :poll) {
                //获取主题
                String topic = data.topic();
                //获取偏移量
                long offset = data.offset();
                //获取键值
                String key = data.key();
                String value = data.value();
                System.out.println("主题："+topic + ",偏移量："+offset +",键："+key +",值："+value);

                Thread.sleep(1000);
            }
        }
    }
}
