package com.huaming.kafka.kafka;

import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.BytesDeserializer;
import org.apache.kafka.common.utils.Bytes;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.AbstractMessageListenerContainer;
import org.springframework.kafka.listener.BatchAcknowledgingMessageListener;
import org.springframework.kafka.listener.KafkaMessageListenerContainer;
import org.springframework.kafka.listener.MessageListener;
import org.springframework.kafka.listener.config.ContainerProperties;
import org.springframework.kafka.support.Acknowledgment;

import javax.annotation.PostConstruct;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;

//import org.apache.kafka.common.utils.Bytes;

/**
 * Created by yu on 2017/5/15.
 */
@Configuration
public class KafkaTemplateDefinition {

    @Autowired
    private KafkaConfig kafkaConfig;


//    @Value("${si.kafka.consumerTopic.process-threads}")
//    private int processThreads;

    private final static Logger logger = LoggerFactory.getLogger(KafkaTemplateDefinition.class);

//    @Autowired
//    private DispatchMsgHandler dispatchMsgHandler;


    private ExecutorService executorChainPool;
    private ThreadPoolExecutor messageProcessor;

    public KafkaTemplateDefinition() {
    }

    @PostConstruct
    private void init() {
//        executorChainPool = Executors.newFixedThreadPool(2);
        messageProcessor = (ThreadPoolExecutor) Executors.newFixedThreadPool(2);
    }

//    private ExecuteChain initExcutorChain() {
//        ExecuteChain excutorChain = new ExecuteChain();
//        excutorChain.
//                addHandler(receiveMsgHandler).
//                addHandler(dispatchMsgHandler);
//        System.out.println("********** excutorChain:" + excutorChain.toString());
//        return excutorChain;
//    }

    /**
     * 配置监听多个topic数据，接收到数据统一格式，然后配置Handler，进行消息处理
     *
     * @return
     */
    private ContainerProperties getContainerProps_1() {
        ContainerProperties containerProps = new ContainerProperties(kafkaConfig.getVehicleStatusTopic());

        final CountDownLatch latch = new CountDownLatch(2);
        containerProps.setMessageListener((MessageListener<byte[], Bytes>) message -> {
            MessageKafka messageKafka = new MessageKafka();
            messageKafka.setTopic(message.topic());
            messageKafka.setContent(new String(message.value().get(), Charset.forName("UTF-8")));
            messageKafka.setExtendInfo(new HashMap<String, String>() {{
                put("partition", String.valueOf(message.partition()));
                put("offset", String.valueOf(message.offset()));
            }});
//            executorChainPool.submit(() -> messageHandler.processMessage(messageKafka));
            latch.countDown();
        });



        return containerProps;
    }
    private ContainerProperties getContainerProps() {
        ContainerProperties containerProps = new ContainerProperties(kafkaConfig.getVehicleStatusTopic());

//        final CountDownLatch latch = new CountDownLatch(2);
//        containerProps.setMessageListener((MessageListener<byte[], Bytes>) message -> {
//            MessageKafka messageKafka = new MessageKafka();
//            messageKafka.setTopic(message.topic());
//            messageKafka.setContent(new String(message.value().get(), Charset.forName("UTF-8")));
//            messageKafka.setExtendInfo(new HashMap<String, String>() {{
//                put("partition", String.valueOf(message.partition()));
//                put("offset", String.valueOf(message.offset()));
//            }});
//            executorChainPool.submit(() -> messageHandler.processMessage(messageKafka));
//            latch.countDown();
//        });


        containerProps.setAckMode(AbstractMessageListenerContainer.AckMode.MANUAL_IMMEDIATE);
        containerProps.setMessageListener(new BatchAcknowledgingMessageListener<byte[], Bytes>() {
            @Override
            public void onMessage(List<ConsumerRecord<byte[], Bytes>> consumerRecords, Acknowledgment acknowledgment) {
//                logger.info("Enter onMesage handler, start time = " + System.currentTimeMillis());
//                logger.info("Before receiving:" + consumerRecords.toString());
//                logger.info("this time have info number:" + consumerRecords.size());
//                final CountDownLatch latch = new CountDownLatch(consumerRecords.size() < config.getMaxPollRecords() ? consumerRecords.size():config.getMaxPollRecords());
//               logger.error("active:{}", messageProcessor.getActiveCount());
//                logger.error("largest:{}",messageProcessor.getLargestPoolSize());
//                logger.error("core:{}",messageProcessor.getCorePoolSize());
//                logger.error("maxpoll:{}",messageProcessor.getMaximumPoolSize());
//                logger.error("poolsize:{}",messageProcessor.getPoolSize());
//                logger.error("recordsSize:{}",consumerRecords.size());
//                logger.error("queuesize:{}",messageProcessor.getQueue().size());
                while (messageProcessor.getQueue().size() > 5000) {
                    try {
                        logger.error("===sleep");
                        Thread.sleep(2);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                }
                long _start = System.currentTimeMillis();
                for (ConsumerRecord<byte[], Bytes> record : consumerRecords) {
                    MessageKafka messageKafka = new MessageKafka();
                    messageKafka.setTopic(record.topic());
                    messageKafka.setContent(new String(record.value().get(), Charset.forName("UTF-8")));

                    messageKafka.setExtendInfo(new HashMap<String, String>() {{
                        put("partition", String.valueOf(record.partition()));
                        put("offset", String.valueOf(record.offset()));
                    }});
//                    messageProcessor.submit(() -> messageHandler.processMessage(messageKafka));


//                    MessageDTO messageDTO = new MessageDTO();
//                    messageDTO.setTopic(record.topic());
//                    messageDTO.setKey(String.valueOf(record.key()));
//                    messageDTO.setContent(record.value());
//                    messageDTO.setExtendInfo(new HashMap<String, String>() {{
//                        put("partition", String.valueOf(record.partition()));
//                        put("offset", String.valueOf(record.offset()));
//                    }});
//                    messageProcessor.execute(() -> messageHandler.process(messageDTO));
                }
//                logger.error("submitTime:{}",System.currentTimeMillis()-_start);
//                logger.info("Enter onMesage handlre, end time = " + System.currentTimeMillis());
//                logger.info("===========start commit offset=============");
                acknowledgment.acknowledge();//提交offset
//                logger.info("===========have done commit offset=============");
            }
        });


        return containerProps;
    }


    private Map<String, Object> getConsumerConfigs() {
        Map<String, Object> props = new HashMap<String, Object>();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaConfig.getConsumerBootstrapServers());
        props.put(ConsumerConfig.GROUP_ID_CONFIG, kafkaConfig.getConsumerGroupId());
        props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, kafkaConfig.getConsumerAutoCommitIntervalMs());
        props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, kafkaConfig.getConsumerSessionTimeoutMs());
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, kafkaConfig.isConsumerEnableAutoCommit());
        props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, kafkaConfig.getMaxPollRecords());
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class);
        props.put(ConsumerConfig.REQUEST_TIMEOUT_MS_CONFIG, 50000);
        props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 4);
        return props;
    }


    @Bean
    public KafkaMessageListenerContainer<byte[], byte[]> kafkaMessageListenerContainer() {
        DefaultKafkaConsumerFactory<byte[], byte[]> cf = new DefaultKafkaConsumerFactory<>(getConsumerConfigs());
        KafkaMessageListenerContainer<byte[], byte[]> container = new KafkaMessageListenerContainer<>(cf, getContainerProps());
        container.setBeanName("kafkaMessageListenerContainer");
        container.setAutoStartup(false);  // consumer不自动启动，在链路OK的时候在启动
        return container;
    }

}
