package start.spring.basic.dynamic;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import javax.annotation.PreDestroy;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.bind.BindResult;
import org.springframework.boot.context.properties.bind.Binder;
import org.springframework.boot.context.properties.source.ConfigurationPropertySource;
import org.springframework.boot.context.properties.source.ConfigurationPropertySources;
import org.springframework.context.EnvironmentAware;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Component;

import lombok.extern.slf4j.Slf4j;
import start.spring.basic.service.IKfkConsumerHandleService;
import start.spring.basic.service.KafkaConsumerContextHolder;
import start.spring.basic.util.common.StringUtil;

/**
 * 
 * ****************************************************************************
 * kafka消费者初始化
 *
 * @author(作者)：xuyongyun	
 * @date(创建日期)：2021年11月16日
 ******************************************************************************
 */
@Slf4j
@Component
public class DynamicKafkaConsumer implements EnvironmentAware {
	
	@Autowired
	DynamicJdbc dao;
	
	@Autowired
	KafkaConsumerContextHolder holder;
	
	private static List<KafkaConsumerRunner> consumerList = new ArrayList<KafkaConsumerRunner>();
	
	@Override
	public void setEnvironment(Environment environment) {
		log.info("DynamicKafkaConsumer开始初始化Kafka消费者！");
        initKafkaConsumer(environment);
	}
	
	private void initKafkaConsumer(Environment env) {
		Iterable<ConfigurationPropertySource> sources = ConfigurationPropertySources.get(env);
        Binder binder = new Binder(sources);
        BindResult<Properties> dbBindResult = binder.bind("spring.kafka", Properties.class);
        Properties properties = dbBindResult.get();
		
        String dyKafkaConf = properties.getProperty("dyconsumer");
        if(StringUtil.isEmpty(dyKafkaConf)) {
			log.info("未设置需要初始化的kafka消费者！");
		}else {
			String[] dyConsumers = dyKafkaConf.split(";");
			//循环初始化消费者
			for(int i=0;i<dyConsumers.length;i++) {
				handlerConsumer(dyConsumers[i]);
				log.info("{}初始化完毕！", dyConsumers[i]);
			}
		}
	}

	private void handlerConsumer(String kafkaTopic) {
		
		if(!StringUtil.isEmpty(kafkaTopic)) {
			String[] kafkaTopics = kafkaTopic.split("-");
			String sql = "select a.kafka_id,a.topic,a.group_id,a.consumer_enable_auto_commit,"
					+ "a.consumer_auto_offset_reset,a.consumer_service_name,a.consumer_max_poll_records,"
					+ "b.bootstrap_servers,b.zsdz from gg_pz_kfkconsumer a "
					+ "left join gg_pz_kafka b on a.kafka_id = b.kafka_id "
					+ "where a.kafka_id = ? and a.topic = ? and group_id = ? ";
			String[] prams = {kafkaTopics[0],kafkaTopics[1],kafkaTopics[2]};
			Map<String, Object> consumerConfig = dao.getOne(DsConstant.SYSCONFIG, sql, prams);
			if(consumerConfig!=null&&!consumerConfig.isEmpty()) {
				Properties props = new Properties();
		        //kafka服务器地址
		        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, String.valueOf(consumerConfig.get("bootstrap_servers")));
		        //必须指定消费者组
		        props.put("group.id", String.valueOf(consumerConfig.get("group_id")));
		        //设置数据key和value的序列化处理类
		        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
		        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
		        //连接证书信息
                if (!StringUtil.isEmpty(String.valueOf(consumerConfig.get("ZSDZ")))) {
                    System.setProperty("java.security.auth.login.config", String.valueOf(consumerConfig.get("ZSDZ")));
                    props.put("security.protocol", "SASL_PLAINTEXT");
                    props.put("sasl.mechanism", "PLAIN");
                }
                props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, String.valueOf(consumerConfig.get("consumer_auto_offset_reset")));
                boolean autoCommit = true;
                if(consumerConfig.get("consumer_enable_auto_commit")!=null&&
                		!StringUtil.isEmpty(consumerConfig.get("consumer_enable_auto_commit").toString())) {
                	autoCommit = Boolean.parseBoolean(consumerConfig.get("consumer_enable_auto_commit").toString());
                }
                props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, autoCommit);
                
                Integer maxPollRecords = null;
                if(consumerConfig.get("consumer_max_poll_records")!=null&&
                		!StringUtil.isEmpty(consumerConfig.get("consumer_max_poll_records").toString())) {
                	maxPollRecords = Integer.parseInt(consumerConfig.get("consumer_max_poll_records").toString());
                }
                
                if(maxPollRecords!=null&&maxPollRecords>0) {
                	props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, String.valueOf(maxPollRecords));
                }
                
		        //创建消息者实例
		        KafkaConsumer<String,String> consumer = new KafkaConsumer<>(props);
		        
		        IKfkConsumerHandleService handlerService = holder.getHandlerService(String.valueOf(consumerConfig.get("consumer_service_name")));
		        
		        KafkaConsumerRunner consumerRunner = new KafkaConsumerRunner(consumer,String.valueOf(consumerConfig.get("topic")),handlerService,autoCommit);
		        consumerRunner.start();
		        consumerList.add(consumerRunner);
			}
		}
	}

	@PreDestroy
    public void close(){
	    if(consumerList!=null&&consumerList.size()>0) {
	    	for(int i=0;i<consumerList.size();i++) {
	    		try {
	    			consumerList.get(i).shutdown();
	    		}catch(Exception e) {
	    			
	    		}
	    	}
	    }
	}
}
