package com.young.springboot.kafka;

import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.common.serialization.Serializer;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.context.EnvironmentAware;
import org.springframework.core.env.Environment;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.*;
import org.springframework.kafka.listener.AfterRollbackProcessor;
import org.springframework.kafka.listener.ContainerProperties;
import org.springframework.kafka.listener.ContainerStoppingBatchErrorHandler;
import org.springframework.kafka.listener.ContainerStoppingErrorHandler;
import org.springframework.kafka.listener.adapter.RecordFilterStrategy;
import org.springframework.kafka.transaction.KafkaTransactionManager;
import org.springframework.retry.RetryPolicy;
import org.springframework.retry.backoff.ExponentialBackOffPolicy;
import org.springframework.retry.policy.SimpleRetryPolicy;
import org.springframework.retry.support.RetryTemplate;
import org.springframework.util.StringUtils;

import java.util.List;

/**
 * @description：
 * @author: yangyahui01
 * @date: 12/5/24 2:43 PM
 */
public class KafkaClientConfiguration <K,V> implements InitializingBean, EnvironmentAware {


    private final KafkaClientProperties properties;

    private Environment environment;

    private Deserializer<K> keyDeserializer;

    private Deserializer<V> valueDeserializer;

    private Serializer<K> keySerializer;

    private Serializer<V> valueSerializer;

    //全局过滤器，对于单个过滤器，请在每个listener中设置
    private RecordFilterStrategy<K,V> recordFilterStrategy;


    public void keyDeserializer(Deserializer<K> keyDeserializer) {
        this.keyDeserializer = keyDeserializer;
    }

    public void valueDeserializer(Deserializer<V> valueDeserializer) {
        this.valueDeserializer = valueDeserializer;
    }

    public void keySerializer(Serializer<K> keySerializer) {
        this.keySerializer = keySerializer;
    }

    public void valueSerializer(Serializer<V> valueSerializer) {
        this.valueSerializer = valueSerializer;
    }

    public void recordFilterStrategy(RecordFilterStrategy<K,V> filterStrategy) {
        this.recordFilterStrategy = filterStrategy;
    }

    public KafkaClientConfiguration(KafkaClientProperties clientProperties) {
        this.properties = clientProperties;
    }




    @Override
    public void afterPropertiesSet() throws Exception {
        boolean autoCommit = this.properties.getConsumer().isEnableAutoCommit();
        boolean transactional = this.properties.isEnableTransaction();
        if (autoCommit == true && transactional) {
            throw new IllegalStateException("'enable.auto.commit' and 'enable-transaction' cant be 'true' at the same time!");
        }
        if (!transactional) {
            return;
        }
        //如果开启了事务，则准备有关事务ID的信息
        String transactionIdPrefix = properties.getProducer().getTransactionIdPrefix();
        if (transactionIdPrefix != null) {
            return;
        }
        StringBuilder sb = new StringBuilder();
        if (environment != null) {
            String project = environment.getProperty("project");
            if (StringUtils.isEmpty(project)) {
                project = environment.getProperty("spring.application.name", "");
            }
            sb.append(project).append("-");
        }
        sb.append(Utils.localIp()).append("-");
        this.properties.getProducer().setTransactionIdPrefix(sb.toString());
    }

    @Override
    public void setEnvironment(Environment environment) {
        this.environment = environment;
    }

    public KafkaTemplate<K,V> kafkaTemplate() {
        KafkaTemplate<K,V> kafkaTemplate = new KafkaTemplate<>(kafkaProducerFactory());
        kafkaTemplate.setDefaultTopic(this.properties.getTemplate().getDefaultTopic());
        return kafkaTemplate;
    }
    public ConsumerFactory<K,V> kafkaConsumerFactory() {
        return new DefaultKafkaConsumerFactory<K,V>(
                this.properties.buildConsumerProperties(),keyDeserializer,valueDeserializer);
    }

    public ProducerFactory<K,V> kafkaProducerFactory() {
        DefaultKafkaProducerFactory<K,V> factory = new DefaultKafkaProducerFactory<>(
                this.properties.buildProducerProperties(),keySerializer,valueSerializer);

        //开启事务时，设置tx
        if (this.properties.isEnableTransaction()) {
            String transactionIdPrefix = this.properties.getProducer().getTransactionIdPrefix();
            factory.setTransactionIdPrefix(transactionIdPrefix);
        }
        return factory;
    }

    public KafkaTransactionManager<K,V> kafkaTransactionManager(
            ProducerFactory<K,V> producerFactory) {
        return new KafkaTransactionManager<K,V>(producerFactory);
    }

    /**
     * @return 创建和管理Kafka消息监听器容器的工厂
     */
    public ConcurrentKafkaListenerContainerFactory<K,V> kafkaListenerContainerFactory() {
        KafkaClientProperties.Listener listener = this.properties.getListener();
        ConcurrentKafkaListenerContainerFactory<K,V> factory = new ScheduledConcurrentKafkaListenerContainerFactory<>(listener.isEnableAutoRestart());
        factory.setConsumerFactory(kafkaConsumerFactory());

        ContainerProperties containerProperties = factory.getContainerProperties();

        if (listener.getAckMode() != null) {
            containerProperties.setAckMode(listener.getAckMode());
        }
        if (listener.getAckCount() != null) {
            containerProperties.setAckCount(listener.getAckCount());
        }
        if (listener.getAckTime() != null) {
            containerProperties.setAckTime(listener.getAckTime().toMillis());
        }
        if (listener.getPollTimeout() != null) {
            containerProperties.setPollTimeout(listener.getPollTimeout().toMillis());
        }
        if (listener.getConcurrency() != null) {
            factory.setConcurrency(listener.getConcurrency());
        }
        containerProperties.setShutdownTimeout(6000);
        boolean batchListener = listener.isBatchListener();
        factory.setBatchListener(batchListener);
        factory.setAckDiscarded(true);
        factory.setAutoStartup(true);

        //如果开启了事务，则准备事务管理器
        if (this.properties.isEnableTransaction()) {
            containerProperties.setTransactionManager(kafkaTransactionManager(kafkaProducerFactory()));
            //在开启事务时，如果异常，则直接中断
            factory.setAfterRollbackProcessor(new AbortedAfterRollbackProcessor<>());
            //异常处理器，直接中断当前container
            if (batchListener) {
                factory.setBatchErrorHandler(new ContainerStoppingBatchErrorHandler());
            } else {
                factory.setErrorHandler(new ContainerStoppingErrorHandler());
            }
        }
        factory.setRetryTemplate(retryTemplate());
        factory.setRecordFilterStrategy(recordFilterStrategy);

        return factory;
    }

    public RetryTemplate retryTemplate() {
        //batch模式，不支持重试
        if(this.properties.getListener().isBatchListener()) {
            return null;
        }

        RetryTemplate retryTemplate = new RetryTemplate();
        ExponentialBackOffPolicy backOffPolicy = new ExponentialBackOffPolicy();
        backOffPolicy.setMaxInterval(12000);
        backOffPolicy.setInitialInterval(500);
        retryTemplate.setBackOffPolicy(backOffPolicy);

        RetryPolicy retryPolicy = new SimpleRetryPolicy(10);//最多重试10次
        retryTemplate.setRetryPolicy(retryPolicy);
        return retryTemplate;
    }


    public static class AbortedAfterRollbackProcessor<K, V> implements AfterRollbackProcessor<K, V> {

        @Override
        public void process(List<ConsumerRecord<K, V>> records, Consumer<K, V> consumer, Exception exception, boolean recoverable) {
            throw new RuntimeException("Transaction rollback,cant be recovered,ListenerContainer will be aborted,you should restart application!",exception);
        }
    }

}
