package com.gome.boot.config.mq;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.*;
import org.apache.kafka.common.utils.Bytes;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.*;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;

import java.util.HashMap;
import java.util.Map;

/**
 * KafkaMQ基础配置.
 *
 * @author baoxiufeng
 */
public abstract class BaseKafkaConfig {
    private static final String KEY_KAFKA_BOOTSTRAP_SERVERS = "kafka.bootstrap.servers";
    private static final String KEY_KAFKA_LISTENER_CONCURRENCY = "kafka.listener.concurrency";
    private static final String KEY_KAFKA_PRODUCER_BATCH_SIZE = "kafka.producer.batch_size";
    private static final String KEY_KAFKA_PRODUCER_BUFFER_MEMORY = "kafka.producer.buffer.memory";
    private static final String KEY_KAFKA_CONSUMER_DEFAULT_GROUP = "kafka.consumer.default_group";
    private static final String KEY_KAFKA_CONSUMER_ENABLE_AUTO_COMMIT = "kafka.consumer.enable.auto.commit";
    private static final String KEY_KAFKA_CONSUMER_AUTO_COMMIT_INTERVAL = "kafka.consumer.auto.commit.interval.ms";
    private static final String KEY_KAFKA_CONSUMER_SESSION_TIMEOUT =  "kafka.consumer.session.timeout.ms";
    private static final String KEY_KAFKA_CONSUMER_AUTO_OFFSET_RESET = "kafka.consumer.auto.offset.reset";

    private static final Map<Class, Class> CACHE_SERIALIZER_CLASS_MAP = new HashMap<Class, Class>(0);
    private static final Map<Class, Class> CACHE_DESERIALIZER_CLASS_MAP = new HashMap<Class, Class>(0);

    @Autowired
    protected Environment env;

    /**
     * Kafka生产者操作模板实例对象创建方法.
     *
     * @param keyClazz 消息键的序列化类型
     * @param valueClazz 消息值的序列化类型
     * @param <K> 键的序列化类型泛型
     * @param <V> 值的序列化类型泛型
     * @return Kafka生产者操作模板实例对象
     */
    protected <K, V> KafkaTemplate<K, V> createKafkaTemplate(Class<K> keyClazz, Class<V> valueClazz) {
        return createKafkaTemplate(env.getRequiredProperty(KEY_KAFKA_BOOTSTRAP_SERVERS), keyClazz, valueClazz);
    }

    /**
     * Kafka生产者操作模板实例对象创建方法.
     *
     * @param bootstrapServers kafka集群地址
     * @param keyClazz 消息键的序列化类型
     * @param valueClazz 消息值的序列化类型
     * @param <K> 键的序列化类型泛型
     * @param <V> 值的序列化类型泛型
     * @return Kafka生产者操作模板实例对象
     */
    protected <K, V> KafkaTemplate<K, V> createKafkaTemplate(String bootstrapServers, Class<K> keyClazz, Class<V> valueClazz) {
        ProducerFactory<K, V> factory = producerFactory(bootstrapServers, keyClazz, valueClazz);
        return new KafkaTemplate<K, V>(factory);
    }

    /**
     * Kafka消费监听容器工厂实例对象创建方法.
     *
     * @param keyClazz 消息键的反序列化类型
     * @param valueClazz 消息值的反序列化类型
     * @param <K> 键的反序列化类型泛型
     * @param <V> 键的反序列化类型泛型
     * @return Kafka监听容器工厂实例对象
     */
    protected <K, V> KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<K, V>> createKafkaListenerContainerFactory(Class<K> keyClazz, Class<V> valueClazz) {
        return createKafkaListenerContainerFactory(env.getRequiredProperty(KEY_KAFKA_BOOTSTRAP_SERVERS), keyClazz, valueClazz);
    }

    /**
     * Kafka消费监听容器工厂实例对象创建方法.
     *
     * @param bootstrapServers kafka集群地址
     * @param keyClazz 消息键的反序列化类型
     * @param valueClazz 消息值的反序列化类型
     * @param <K> 键的反序列化类型泛型
     * @param <V> 键的反序列化类型泛型
     * @return Kafka监听容器工厂实例对象
     */
    protected <K, V> KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<K, V>> createKafkaListenerContainerFactory(String bootstrapServers, Class<K> keyClazz, Class<V> valueClazz) {
        ConcurrentKafkaListenerContainerFactory<K, V> factory = new ConcurrentKafkaListenerContainerFactory<K, V>();
        factory.setConsumerFactory(buildConsumerFactory(bootstrapServers, keyClazz, valueClazz));
        factory.setConcurrency(env.getProperty(KEY_KAFKA_LISTENER_CONCURRENCY, Integer.class, 4));
        factory.getContainerProperties().setPollTimeout(4000);
        return factory;
    }

    private <K, V> ProducerFactory<K, V> producerFactory(String bootstrapServers, Class<K> keyClazz, Class<V> valueClazz) {
        return new DefaultKafkaProducerFactory<K, V>(producerConfigs(bootstrapServers, keyClazz, valueClazz));
    }

    private <K, V> ConsumerFactory<K, V> buildConsumerFactory(String bootstrapServers, Class<K> keyClazz, Class<V> valueClazz) {
        return new DefaultKafkaConsumerFactory<K, V>(consumerConfigs(bootstrapServers, keyClazz, valueClazz));
    }

    private Map<String, Object> producerConfigs(String bootstrapServers, Class keyClazz, Class valueClazz) {
        if (StringUtils.isEmpty(bootstrapServers)) {
            bootstrapServers = env.getRequiredProperty(KEY_KAFKA_BOOTSTRAP_SERVERS);
        }
        Map<String, Object> properties = new HashMap<String, Object>(6);
        properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        properties.put(ProducerConfig.BATCH_SIZE_CONFIG, env.getProperty(KEY_KAFKA_PRODUCER_BATCH_SIZE, Integer.class, 65536));
        properties.put(ProducerConfig.LINGER_MS_CONFIG, 1);
        properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, env.getProperty(KEY_KAFKA_PRODUCER_BUFFER_MEMORY, Integer.class, 524288));
        properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, findSerializerType(keyClazz));
        properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, findSerializerType(valueClazz));
        return properties;
    }

    private Map<String, Object> consumerConfigs(String bootstrapServers, Class keyClazz, Class valueClazz) {
        if (StringUtils.isEmpty(bootstrapServers)) {
            bootstrapServers = env.getRequiredProperty(KEY_KAFKA_BOOTSTRAP_SERVERS);
        }
        Map<String, Object> properties = new HashMap<String, Object>(8);
        properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, env.getProperty(KEY_KAFKA_CONSUMER_ENABLE_AUTO_COMMIT, Boolean.class, false));
        properties.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, env.getProperty(KEY_KAFKA_CONSUMER_AUTO_COMMIT_INTERVAL, "100"));
        properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, env.getProperty(KEY_KAFKA_CONSUMER_SESSION_TIMEOUT, "15000"));
        properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, findDeserializerType(keyClazz));
        properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, findDeserializerType(valueClazz));
        properties.put(ConsumerConfig.GROUP_ID_CONFIG, env.getRequiredProperty(KEY_KAFKA_CONSUMER_DEFAULT_GROUP));
        properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, env.getProperty(KEY_KAFKA_CONSUMER_AUTO_OFFSET_RESET, "earliest"));
        return properties;
    }

    private <T> T findSerializerType(Class clazz) {
        if (clazz.isAssignableFrom(String.class)) {
            return (T) StringSerializer.class;
        } else if (clazz.isAssignableFrom(Integer.class)) {
            return (T) IntegerSerializer.class;
        } else if (clazz.isAssignableFrom(LongSerializer.class)) {
            return (T) LongSerializer.class;
        } else if (clazz.isAssignableFrom(Double.class)) {
            return (T) DoubleSerializer.class;
        } else if (clazz.isAssignableFrom(Bytes.class)) {
            return (T) BytesSerializer.class;
        } else if (CACHE_SERIALIZER_CLASS_MAP.containsKey(clazz)) {
            return (T) CACHE_SERIALIZER_CLASS_MAP.get(clazz);
        }
        return null;
    }

    private <T> T findDeserializerType(Class clazz) {
        if (clazz.isAssignableFrom(String.class)) {
            return (T) StringDeserializer.class;
        } else if (clazz.isAssignableFrom(Integer.class)) {
            return (T) IntegerDeserializer.class;
        } else if (clazz.isAssignableFrom(Long.class)) {
            return (T) LongDeserializer.class;
        } else if (clazz.isAssignableFrom(Double.class)) {
            return (T) DoubleDeserializer.class;
        } else if (clazz.isAssignableFrom(Bytes.class)) {
            return (T) BytesDeserializer.class;
        } else if (CACHE_DESERIALIZER_CLASS_MAP.containsKey(clazz)) {
            return (T) CACHE_DESERIALIZER_CLASS_MAP.get(clazz);
        }
        return null;
    }

    /**
     * 注册新的序列化类型.
     * <br/>用于需要自定义序列化类型的场景.
     *
     * @param clazz 序列化的内容类型
     * @param serializerClazz 序列化对象类型
     */
    protected static void registerSerializer(Class clazz, Class serializerClazz) {
        CACHE_SERIALIZER_CLASS_MAP.put(clazz, serializerClazz);
    }

    /**
     * 注册新的反序列化类型.
     * <br/>用于需要自定义反序列化类型的场景.
     *
     * @param clazz 反序列化的内容类型
     * @param deserializerClazz 反序列化对象类型
     */
    protected static void registerDeserializer(Class clazz, Class deserializerClazz) {
        CACHE_DESERIALIZER_CLASS_MAP.put(clazz, deserializerClazz);
    }
}
