package com.snopy.mq.kafka;

import com.snopy.mq.kafka.intercept.ConsumerByInterceptor;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;

import java.time.Duration;
import java.util.Collections;
import java.util.Properties;
import java.util.concurrent.TimeUnit;

/**
 * @author snopy
 * @version 1.0.0
 * @classname InterceptConsumer
 * @description
 * @email 77912204@qq.com
 * @date 2022/06/09 17:43
 */
@Slf4j
public class InterceptConsumer {
    public static void main(String[] args) throws InterruptedException {
        //1.配置生产者参数
        Properties props = new Properties();
        //1.1消息服务器地址
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"192.168.164.101:9092");

        //1.2 key 和 val 反序列化
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,  StringDeserializer.class.getName());
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,StringDeserializer.class.getName());

        //1.3 加入消费者过滤器
        props.put(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG, ConsumerByInterceptor.class.getName());
        //1.4 group id
        props.put(ConsumerConfig.GROUP_ID_CONFIG,"start-test");
        props.put(ConsumerConfig.RETRY_BACKOFF_MS_CONFIG,50000);
        //2.构建生产者
        KafkaConsumer<String,String> consumer = new KafkaConsumer<String, String>(props);
        //3.订阅消息
        consumer.subscribe(Collections.singleton("test"));
        //4.接收消息记录处理消息
        ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(10000));
        while(true){
            TimeUnit.SECONDS.sleep(5);
            records.forEach(record->{
              log.info("--------value:{}--------",record.value());
            });
        }

    }
}
