package com.oct.ywsc.core.config;

import com.alibaba.druid.pool.DruidDataSource;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.config.SslConfigs;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.*;

import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.kafka.listener.ContainerProperties;

import javax.sql.DataSource;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;


@Import({KafkaConsumerConfig.class, KafkaProducerConfig.class})
@Configuration
public class ApplicationBeanConfig {

    @Autowired
    private KafkaConsumerConfig kafkaConsumerConfig;
    @Autowired
    private KafkaProducerConfig kafkaProducerConfig;

//    @Autowired
//    private KafkaLintenerConfig kafkaLintenerConfig;

    /**
     * 构建数据源
     * 添加 DruidDataSource 组件到容器中，并绑定属性
     */
    @Bean
    @ConfigurationProperties(prefix = "spring.datasource")
    @ConditionalOnProperty(name = "spring.datasource.type", havingValue = "com.alibaba.druid.pool.DruidDataSource")
    public DataSource druid(){
        return new DruidDataSource();
    }




//    @Bean
//    public KafkaProducer<String, String> kafkaProducer(){
//        Properties props = new Properties();
//        // 设置接入点，请通过控制台获取对应 Topic 的接入点
//        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaProducerConfig.getBootstrapServers());
//        // Kafka 消息的序列化方式
//        Class<?> keySerializer = null;
//        Class<?> valueSerializer = null;
//        try {
//            keySerializer = Class.forName(this.kafkaProducerConfig.getKeySerializer());
//            valueSerializer = Class.forName(this.kafkaProducerConfig.getValueSerializer());
//        } catch (ClassNotFoundException e) {
//            throw new RuntimeException(e);
//        }
//        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, keySerializer.getClass() );
//        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, valueSerializer.getClass());
//        // 请求的最长等待时间
//        props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 30 * 1000);
//        // 构造 Producer 对象，注意，该对象是线程安全的
//        // 一般来说，一个进程内一个 Producer 对象即可
//        // 如果想提高性能，可构造多个对象，但最好不要超过 5 个
//        props.put(ProducerConfig.RETRIES_CONFIG, this.kafkaProducerConfig.getRetries());
//        props.put(ProducerConfig.BATCH_SIZE_CONFIG, this.kafkaProducerConfig.getBatchSize());
//        props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, this.kafkaProducerConfig.getBufferMemory());
//
//        return new KafkaProducer<>(props);
//    }


//    /**
//     * 构建消费者工厂
//     * @return
//     */
//    @Bean
//    public ConsumerFactory<String, String> consumerFactory(){
//        Map<String, Object> props = customerProps();
//        return new DefaultKafkaConsumerFactory<>(props);
//    }

    /**
     * 构建消费者配置
     * @return
     */
    @Bean("customerProps")
    public Map<String, Object> customerProps() {
        Map<String, Object> props = new HashMap<>();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaConsumerConfig.getBootstrapServers());
        props.put(ConsumerConfig.GROUP_ID_CONFIG, this.kafkaConsumerConfig.getGroupId());
        props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, this.kafkaConsumerConfig.getAutoCommitInterval());
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, this.kafkaConsumerConfig.getAutoOffsetReset());
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, this.kafkaConsumerConfig.getEnableAutoCommit());

        props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, this.kafkaConsumerConfig.getMaxPollRecords());
        props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, this.kafkaConsumerConfig.getMaxPollIntervalMs());
        props.put(ConsumerConfig.HEARTBEAT_INTERVAL_MS_CONFIG, this.kafkaConsumerConfig.getHeartbeatInterval());
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());


        // 配置kafka身份鉴权
        if (this.kafkaProducerConfig.getEnableSecurity()) {
            props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, this.kafkaConsumerConfig.getSecurityProtocol());
            props.put(SaslConfigs.SASL_MECHANISM, this.kafkaConsumerConfig.getSaslMechanism());
            props.put(SaslConfigs.SASL_JAAS_CONFIG,this.kafkaConsumerConfig.getSaslJaasConfig());


            // SSL Truststore 配置注意我使用的是本地磁盘路径,实际情况按照你的走
            props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, this.kafkaConsumerConfig.getSslTruststoreLocation());
            props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, this.kafkaConsumerConfig.getSslTruststorePassword());
            props.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, this.kafkaConsumerConfig.getSslEndpointIdentificationAlgorithm());
        }

        return props;
    }




//    /**
//     * 构建kafka监听容器工厂
//     * @return
//     */
//    @Bean
//    public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
//        ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
//        factory.setConsumerFactory(consumerFactory());
//        factory.getContainerProperties().setAckMode(kafkaLintenerConfig.getAckMode());
//        factory.setConcurrency(kafkaLintenerConfig.getConcurrency());
//
//
//        factory.
//        return factory;
//    }


    @Bean
    public KafkaTemplate<String, String> kafkaTemplate(){

        Map<String, Object> props = new HashMap<>();
        // 设置接入点，请通过控制台获取对应 Topic 的接入点
        props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaProducerConfig.getBootstrapServers());
        // Kafka 消息的序列化方式
        Class<?> keySerializer = null;
        Class<?> valueSerializer = null;
        try {
            keySerializer = Class.forName(this.kafkaProducerConfig.getKeySerializer());
            valueSerializer = Class.forName(this.kafkaProducerConfig.getValueSerializer());
        } catch (ClassNotFoundException e) {
            throw new RuntimeException(e);
        }
        props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, this.kafkaProducerConfig.getKeySerializer());
        props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, this.kafkaProducerConfig.getValueSerializer());
        // 请求的最长等待时间
        props.put(ProducerConfig.MAX_BLOCK_MS_CONFIG, 30 * 1000);
        // 构造 Producer 对象，注意，该对象是线程安全的
        // 一般来说，一个进程内一个 Producer 对象即可
        // 如果想提高性能，可构造多个对象，但最好不要超过 5 个
        props.put(ProducerConfig.RETRIES_CONFIG, this.kafkaProducerConfig.getRetries());
        props.put(ProducerConfig.BATCH_SIZE_CONFIG, this.kafkaProducerConfig.getBatchSize());
        props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, this.kafkaProducerConfig.getBufferMemory());



        // 配置kafka身份鉴权
        if (this.kafkaProducerConfig.getEnableSecurity()) {
            props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, this.kafkaProducerConfig.getSecurityProtocol());
            props.put(SaslConfigs.SASL_MECHANISM, this.kafkaProducerConfig.getSaslMechanism());
            props.put(SaslConfigs.SASL_JAAS_CONFIG,this.kafkaProducerConfig.getSaslJaasConfig());


            // SSL Truststore 配置注意我使用的是本地磁盘路径,实际情况按照你的走
            props.put(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, this.kafkaProducerConfig.getSslTruststoreLocation());
            props.put(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, this.kafkaProducerConfig.getSslTruststorePassword());
            props.put(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, this.kafkaProducerConfig.getSslEndpointIdentificationAlgorithm());
        }

        ProducerFactory<String, String> producerFactory = new DefaultKafkaProducerFactory<>(props);

        return new KafkaTemplate<>(producerFactory);
    }



}
