package com.shujia.kafka;

import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;

import java.util.ArrayList;
import java.util.Properties;

public class Demo02KafkaConsumer {
    public static void main(String[] args) {


        Properties prop = new Properties();

        prop.setProperty("bootstrap.servers", "master:9092,node2:9092,node2:9092");
        prop.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringDeserializer");
        prop.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
        /**
         * earliest 类似from-beginning
         * 当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，从头开始消费
         * latest  默认
         * 当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，消费新产认值生的该分区下的数据
         * none
         * topic各分区都存在已提交的offset时，从offset后开始消费；只要有一个分区不存在已提交的offset，则抛出异常
         *
         */
        prop.setProperty("auto.offset.reset", "earliest");
        // 设置消费者组id
        prop.setProperty("group.id", "idea-consumer03");

        KafkaConsumer<String, String> consumer = new KafkaConsumer<>(prop);
        ArrayList<String> list = new ArrayList<>();
        list.add("t3");
//        list.add("t2");
//        list.add("t1");
        // 指定订阅的topic，可以指定一批，或者是通过正则匹配
        consumer.subscribe(list);
        while(true){
            ConsumerRecords<String, String> poll = consumer.poll(0);
            for (ConsumerRecord<String, String> t3 : poll) {
                String key = t3.key();
                String value = t3.value();
                String topic = t3.topic();
                long ts = t3.timestamp();
                System.out.println(key + "," + value + "," + ts + "," + topic);
            }
        }


    }
}
