package com.example.springbootkafka.consumer;

import ch.qos.logback.classic.Level;
import ch.qos.logback.classic.LoggerContext;
import com.example.springbootkafka.bean.User;
import com.example.springbootkafka.ext.UserDeserializable;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.serialization.IntegerDeserializer;
import org.apache.kafka.common.serialization.IntegerSerializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;

import java.time.Duration;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;

/**
 * @ClassName DeSerializableConsumer
 * @Description 反序列化
 * @Auth ying.xiao
 * @Date 2021/7/13
 * @Version 1.0.0
 */
@Component
public class DeSerializableConsumer {

    private static Logger logger = LoggerFactory.getLogger(DeSerializableConsumer.class);

    static {
        LoggerContext loggerContext = (LoggerContext) LoggerFactory.getILoggerFactory();
        List<ch.qos.logback.classic.Logger> loggerList = loggerContext.getLoggerList();
        loggerList.forEach(logger -> {
            logger.setLevel(Level.INFO);
        });
    }

    public static void main(String[] args) {
        Map<String, Object> hashMap = new HashMap<>();
        //连接bootserver 地址，多个逗号分隔
        hashMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"8.142.4.185:9092");
        //消费组
        hashMap.put(ConsumerConfig.GROUP_ID_CONFIG,"serial_consumer_01");
        //反序列化设置，和序列化保持一致
        hashMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class);
        hashMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, UserDeserializable.class);
        hashMap.put(ConsumerConfig.CLIENT_ID_CONFIG, "con1");
        hashMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        hashMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,"true");
        try (
                KafkaConsumer<Integer, String> kafkaConsumer = new KafkaConsumer<>(hashMap);
        ){
            kafkaConsumer.subscribe(Collections.singleton("serial_topic"));
            while (true) {
                ConsumerRecords<Integer, String> poll = kafkaConsumer.poll(Duration.ofSeconds(3));
                poll.forEach(integerStringConsumerRecord -> logger.info("topic:{} key:{} value:{}", integerStringConsumerRecord.topic(), integerStringConsumerRecord.key(), integerStringConsumerRecord.value()));
            }
        }catch (Exception e){
                logger.error(e.toString());
        }
    }

}
