package com.doit.day03;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.record.TimestampType;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.Arrays;
import java.util.Properties;

public class ConsumerInterceptorTest {
    public static void main(String[] args) {
        Properties props = new Properties();
        /**
         * 必配的
         */
        props.setProperty(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"linux01:9092,linux02:9092,linux03:9092");
        //key和value的反序列化方式
        props.setProperty(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.setProperty(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        //消费者组
        props.setProperty(ConsumerConfig.GROUP_ID_CONFIG,"test");
        props.setProperty("interceptor.classes","com.doit.day03.MyConsumerInterceptor");
        props.setProperty("allow.auto.create.topics","true");
        props.setProperty("auto.offset.reset","earliest");
        props.setProperty("enable.auto.commit","true");  //如果是true，有一定的可能会丢数据

        //创建kafka的消费者对象
        KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(props);

        //订阅主题  第一种方式
        consumer.subscribe(Arrays.asList("event-log"));


        while (true){
            //拉取broker中的数据
            ConsumerRecords<String, String> poll = consumer.poll(Duration.ofMillis(Integer.MAX_VALUE));
            //todo 看一下拦截器有没有起到作用
            for (ConsumerRecord<String, String> record : poll) {
                System.out.println(record.value());
            }

        }



    }
}
