package cn.zwx.canal.tools.config;

import cn.zwx.canal.tools.utils.KafKaRegisterConsumerFactory;
import cn.zwx.canal.tools.utils.KafKaRegisterProducerFactory;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.support.serializer.JsonDeserializer;

import java.util.Map;

/**
 * @description: kafka初始化类
 * @projectName:learn
 * @see:cn.zwx.canal.tools.config
 * @author:zhangwenxue
 * @createTime:2021/1/12 14:51
 * @version:1.0
 */
@Configuration
public class KafKaConfig {

    /**该类是在spring boot autoconfig 中就初始化了**/
    @Autowired
    KafkaProperties kafkaProperties;
    @Bean
    public KafKaRegisterProducerFactory kafKaRegisterProducerFactory(){
        Map<String,Object> producerProperties = kafkaProperties.buildProducerProperties();
        producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,kafkaProperties.getBootstrapServers());
        producerProperties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, kafkaProperties.getProducer().getKeySerializer());
        producerProperties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, kafkaProperties.getProducer().getValueSerializer());
        return new KafKaRegisterProducerFactory(producerProperties);
    }

    @Bean("registerTemplate")
    public KafkaTemplate registerTemplate(@Autowired KafKaRegisterProducerFactory kafKaRegisterProducerFactory){
        KafkaTemplate template =  new KafkaTemplate<>(kafKaRegisterProducerFactory);
        return template;
    }


    @Bean
    public KafKaRegisterConsumerFactory kafKaRegisterConsumerFactory(){
        Map<String,Object>  consumerProperties = kafkaProperties.buildProducerProperties();
        consumerProperties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,kafkaProperties.getBootstrapServers());
        consumerProperties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, kafkaProperties.getConsumer().getKeyDeserializer());

        consumerProperties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, kafkaProperties.getConsumer().getValueDeserializer());
        consumerProperties.put(ConsumerConfig.GROUP_ID_CONFIG,kafkaProperties.getConsumer().getGroupId());
        consumerProperties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,kafkaProperties.getConsumer().getEnableAutoCommit());//消费者的自动提交方式关闭
        consumerProperties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG,kafkaProperties.getConsumer().getAutoCommitInterval());
        /*
        earliest:当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，从头开始消费
        latest: 当各分区下有已提交的offset时，从提交的offset开始消费；无提交的offset时，消费新产生的该分区下的数据
        none: topic各分区都存在已提交的offset时，从offset后开始消费；只要有一个分区不存在已提交的offset，则抛出异常
        */
        consumerProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,kafkaProperties.getConsumer().getAutoOffsetReset());
        return new KafKaRegisterConsumerFactory(consumerProperties);
    }

    /**
     * 消费则的监听工厂
     * @return
     */
    @Bean
    public KafkaListenerContainerFactory registerKafkaListenerContainerFactory(@Autowired KafKaRegisterConsumerFactory kafKaRegisterConsumerFactory){
        ConcurrentKafkaListenerContainerFactory conFactory = new ConcurrentKafkaListenerContainerFactory<>();
        conFactory.setConsumerFactory(kafKaRegisterConsumerFactory);
        //  设置消费者消费消息后的提交方式为手动提交
        conFactory.getContainerProperties().setAckMode(ContainerProperties.AckMode.MANUAL);
        return conFactory;
    }

}
