package com.ytktt.study.week12.task4.kafka;

import com.ytktt.study.week12.task4.Order;
import com.ytktt.study.week12.task4.api.ConsumerService;
import com.ytktt.study.week12.task4.api.ProducerService;
import com.ytktt.study.week12.task4.kafka.constant.KafkaConstant;
import com.ytktt.study.week12.task4.kafka.consumer.DefaultKafkaConsumerErrorHandler;
import com.ytktt.study.week12.task4.kafka.consumer.KafkaConsumerServiceImpl;
import com.ytktt.study.week12.task4.kafka.consumer.KafkaSeqConsumerInterceptor;
import com.ytktt.study.week12.task4.kafka.producer.KafkaProducerServiceImpl;
import com.ytktt.study.week12.task4.kafka.producer.KafkaSeqProducerInterceptor;
import com.ytktt.study.week12.task4.properties.MQProperties;
import com.ytktt.study.week12.task4.redis.RedisOperator;
import com.ytktt.study.week12.task4.util.CurDateStringUtil;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.StringRedisTemplate;
import org.springframework.kafka.config.TopicBuilder;
import org.springframework.kafka.core.*;
import org.springframework.kafka.listener.ConsumerAwareListenerErrorHandler;

import java.util.Map;

/**
 * Kafka配置
 *
 * @author ytk
 */
@Configuration
public class KafkaConfiguration {

    @Bean
    @ConditionalOnProperty(prefix = MQProperties.PREFIX, name = "type", havingValue = "kafka", matchIfMissing = true)
    public NewTopic kafkaTopic(MQProperties mqProperties) {
        MQProperties.FinancialKafkaProperties kafkaProperties = mqProperties.getKafka();
        return TopicBuilder.name(kafkaProperties.getTopic()).partitions(kafkaProperties.getPartition())
                .replicas(kafkaProperties.getReplicas()).build();
    }

    @Bean
    @ConditionalOnProperty(prefix = MQProperties.PREFIX, name = "kafka.producer.enable", havingValue = "true")
    public ProducerService kafkaProducerService(KafkaTemplate<String, Order> template, MQProperties mqProperties) {
        return new KafkaProducerServiceImpl(template, mqProperties);
    }

    @Bean
    @ConditionalOnProperty(prefix = MQProperties.PREFIX, name = "kafka.consumer.enable", havingValue = "true")
    public ConsumerService kafkaConsumerService(RedisOperator redisOperator) {
        return new KafkaConsumerServiceImpl(redisOperator);
    }

    @Bean
    @ConditionalOnProperty(prefix = MQProperties.PREFIX, name = "kafka.producer.enable", havingValue = "true")
    public ProducerFactory<?, ?> kafkaProducerFactory(KafkaProperties properties, CurDateStringUtil curDateStringUtil,
                                                      RedisTemplate<String, Object> redisTemplate) {
        Map<String, Object> producerProperties = properties.buildProducerProperties();
        producerProperties.put(ProducerConfig.INTERCEPTOR_CLASSES_CONFIG, KafkaSeqProducerInterceptor.class.getName());
        producerProperties.put(KafkaConstant.CUR_DATE_STRING_UTIL_CONFIG, curDateStringUtil);
        producerProperties.put(KafkaConstant.REDIS_TEMPLATE_CONFIG, redisTemplate);
        DefaultKafkaProducerFactory<?, ?> factory = new DefaultKafkaProducerFactory<>(producerProperties);
        String transactionIdPrefix = properties.getProducer()
                .getTransactionIdPrefix();
        if (transactionIdPrefix != null) {
            factory.setTransactionIdPrefix(transactionIdPrefix);
        }
        return factory;
    }

    @Bean
    @ConditionalOnProperty(prefix = MQProperties.PREFIX, name = "kafka.consumer.enable", havingValue = "true")
    public ConsumerFactory<?, ?> kafkaConsumerFactory(KafkaProperties properties,
                                                      RedisTemplate<String, Object> redisTemplate,
                                                      StringRedisTemplate stringRedisTemplate) {
        Map<String, Object> consumerProperties = properties.buildConsumerProperties();
        consumerProperties.put(ConsumerConfig.INTERCEPTOR_CLASSES_CONFIG, KafkaSeqConsumerInterceptor.class.getName());
        consumerProperties.put(KafkaConstant.REDIS_TEMPLATE_CONFIG, redisTemplate);
        consumerProperties.put(KafkaConstant.REDIS_STRING_TEMPLATE_CONFIG, stringRedisTemplate);
        return new DefaultKafkaConsumerFactory<>(consumerProperties);
    }

    @Bean(name = "defaultKafkaConsumerErrorHandler")
    @ConditionalOnProperty(prefix = MQProperties.PREFIX, name = "kafka.consumer.enable", havingValue = "true")
    public ConsumerAwareListenerErrorHandler defaultKafkaConsumerErrorHandler() {
        return new DefaultKafkaConsumerErrorHandler();
    }

//    @Bean(name = "manualListenerContainerFactory")
//    @ConditionalOnProperty(prefix = MQProperties.PREFIX, name = "kafka.consumer.enable", havingValue = "true")
//    public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, Order>> kafkaListenerContainerFactory(
//            ConsumerFactory<String, Order> consumerFactory, MQProperties mqProperties) {
//        MQProperties.FinancialKafkaConsumerProperties consumer = mqProperties.getKafka().getConsumer();
//        ConcurrentKafkaListenerContainerFactory<String, Order> factory = new ConcurrentKafkaListenerContainerFactory<>();
//        factory.setConsumerFactory(consumerFactory);
//        factory.setConcurrency(consumer.getConcurrency());
//        factory.getContainerProperties().setPollTimeout(consumer.getPollTimeout());
//        // 当使用手动提交时必须设置ackMode为MANUAL,否则会报错No Acknowledgment available as an argument, the listener container must have a MANUAL AckMode to populate the Acknowledgment.
//        factory.getContainerProperties().setAckMode(consumer.getAckMode());
//        return factory;
//    }
}
