package start.spring.basic.dynamic;

import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.bind.BindResult;
import org.springframework.boot.context.properties.bind.Binder;
import org.springframework.boot.context.properties.source.ConfigurationPropertySource;
import org.springframework.boot.context.properties.source.ConfigurationPropertySources;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.EnvironmentAware;
import org.springframework.core.env.Environment;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Repository;

import lombok.extern.slf4j.Slf4j;
import start.spring.basic.exception.common.CommonException;
import start.spring.basic.util.common.StringUtil;

/**
 * 
 * ****************************************************************************
 * kafka生产者初始化
 *
 * @author(作者)：xuyongyun	
 * @date(创建日期)：2023年2月14日
 ******************************************************************************
 */
@Slf4j
@RefreshScope
@Repository
public class DynamicKafkaRegister implements EnvironmentAware {

    private static final String SPLIT_VALUE = ",";

    @Autowired
    private DynamicJdbc dao;


    /**
     * 缓存Kafka
     */
    private static Map<String, KafkaTemplate<String, String>> kafkaTemplateMap = new HashMap<>();

    @Override
    public void setEnvironment(Environment environment) {
        log.info("DynamicKafkaRegister开始初始化Kafka配置！");
        initCustomKafka(environment);
    }

    /**
     * 初始化Kafka
     */
    private void initCustomKafka(Environment env) {

        //读取默认数据库配置
        Iterable<ConfigurationPropertySource> sources = ConfigurationPropertySources.get(env);
        Binder binder = new Binder(sources);
        BindResult<Properties> dbBindResult = binder.bind("spring.kafka", Properties.class);
        Properties properties = dbBindResult.get();

        //读取配置文件获取更多kafka配置
        String kafkaNames = properties.getProperty("customnames");
        if (!StringUtil.isEmpty(kafkaNames)) {
            String[] kafkaNameArray = kafkaNames.split(SPLIT_VALUE);
            for (String kafkaName : kafkaNameArray) {
                //数据库查询配置信息，设置Kafka的配置信息KafkaProperties对应application.yml中配置
                String sql = "SELECT KAFKA_ID, BOOTSTRAP_SERVERS, PRODUCER_ACKS, PRODUCER_RETRIES, PRODUCER_BATCH_SIZE, PRODUCER_BUFFER_MEMORY, ZSDZ FROM GG_PZ_KAFKA WHERE KAFKA_ID LIKE ? ORDER BY KAFKA_ID ASC";
                String[] prams = {kafkaName + "%"};
                List<Map<String, Object>> kafkaConfList = dao.query(DsConstant.SYSCONFIG, sql, prams);
                if (kafkaConfList != null && kafkaConfList.size() > 0) {
                    for (Map<String, Object> kafkaConf : kafkaConfList) {
                        KafkaTemplate<String, String> kafkaTemplate = buildKafkaTemplate(kafkaConf);
                        kafkaTemplateMap.put(String.valueOf(kafkaConf.get("KAFKA_ID")), kafkaTemplate);
                        log.info("Kafka:{}初始化完毕！", kafkaConf.get("KAFKA_ID"));
                    }
                } else {
                    log.info("Kafka:配置未在数据库获取到！");
                }
            }
        } else {
            log.info("Kafka:属性文件未配置需要初始化的Kafka！");
        }
    }

    /**
     * 构建KafkaTemplate对象
     */
    private KafkaTemplate<String, String> buildKafkaTemplate(Map<String, Object> kafkaConf) {
        HashMap<String, Object> configs = new HashMap<>();
        //连接证书信息
        if (!StringUtil.isEmpty(String.valueOf(kafkaConf.get("ZSDZ")))) {
            System.setProperty("java.security.auth.login.config", String.valueOf(kafkaConf.get("ZSDZ")));
            configs.put("security.protocol", "SASL_PLAINTEXT");
            configs.put("sasl.mechanism", "PLAIN");
        }

        configs.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaConf.get("BOOTSTRAP_SERVERS"));
        configs.put(ProducerConfig.ACKS_CONFIG, kafkaConf.get("PRODUCER_ACKS"));
        configs.put(ProducerConfig.RETRIES_CONFIG, kafkaConf.get("PRODUCER_RETRIES"));
        configs.put(ProducerConfig.BATCH_SIZE_CONFIG, kafkaConf.get("PRODUCER_BATCH_SIZE"));
        configs.put(ProducerConfig.BUFFER_MEMORY_CONFIG, Integer.parseInt(kafkaConf.get("PRODUCER_BUFFER_MEMORY").toString()) * 1024 * 1024);
        configs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        configs.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());

        DefaultKafkaProducerFactory<String, String> kafkaProducerFactory = new DefaultKafkaProducerFactory<>(configs);
        return new KafkaTemplate<>(kafkaProducerFactory);
    }

    public boolean kafkaExsit(String kafkaId) {
    	return kafkaTemplateMap.containsKey(kafkaId);
    }

    public KafkaTemplate<String, String> getKafkaTemplate(String kafkaId) {
        if (StringUtil.isEmpty(kafkaId)) {
            throw new CommonException("请指定 kafkaId！");
        } else {
            KafkaTemplate<String, String> kafkaTemplate = kafkaTemplateMap.get(kafkaId);
            if (kafkaTemplate == null) {
                log.info("Kafka:{}的配置未获取到！", kafkaId);
                throw new CommonException("Kafka:" + kafkaId + "的配置未获取到！");
            }
            return kafkaTemplate;
        }
    }
}
