package com.lsh.oms.kafka.service.kafka;

import com.alibaba.fastjson.JSON;
import com.lsh.oms.core.model.bills.MessageBean;
import com.lsh.oms.core.service.bills.HandlerKafkaMessage;
import com.lsh.oms.core.utils.IdUtils;
import com.lsh.oms.core.utils.customLog.CustomLog;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.common.errors.WakeupException;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;

import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

@Deprecated
public class KafkaConsumer implements InitializingBean {

    @Value("${broker_ip}")
    public String broker_ip;
    @Value("${group_id}")
    private String group_id;
    @Value("${enable_auto_commit}")
    private String enable_auto_commit;
    @Value("${auto_commit_interval_ms}")
    private String auto_commit_interval_ms;
    @Value("${session_timeout_ms}")
    private String session_timeout_ms;
    @Value("${connections_max_idle_ms}")
    private String connections_max_idle_ms;
    @Value("${metadata_broker_list}")
    private String metadata_broker_list;
    @Value("${key_deserializer}")
    private String key_deserializer;
    @Value("${value_deserializer}")
    private String value_deserializer;
    @Value("${heartbeat_interval_ms}")
    private String heartbeat_interval_ms;
    @Value("#{'${topic_name}'.split(',')}")
    private List<String> topicNames;

    @Autowired
    private HandlerKafkaMessage handlerKafkaMessage;

    public static final int THREADPOOL_SIZE = 10;
    // 生成固定大小的线程池
    private static final ExecutorService exec = Executors.newFixedThreadPool(THREADPOOL_SIZE);

    private Logger logger;

    @Value("${logpath}")
    private String logPath;

    @Value("${kafka_threadnum}")
    private int threadNum;

    public void init() {
        this.logger = CustomLog.getInstance().createLogger(logPath, "kafka", "--->", false);
        Properties props = new Properties();
        props.put("bootstrap.servers", this.broker_ip);
        props.put("group.id", this.group_id);
        props.put("enable.auto.commit", this.enable_auto_commit);
        logger.info("kafka config: key: enable.auto.commit; value: " + enable_auto_commit);
        props.put("auto.commit.interval.ms", this.auto_commit_interval_ms);
        props.put("heartbeat.interval.ms", this.heartbeat_interval_ms);
        props.put("session.timeout.ms", this.session_timeout_ms);
        props.put("connections.max.idle.ms", this.connections_max_idle_ms);
        props.put("metadata.broker.list", this.metadata_broker_list);
        props.put("key.deserializer", this.key_deserializer);
        props.put("value.deserializer", this.value_deserializer);

        for (String name : topicNames) {
            logger.info("topicName is " + name + " handlerMessage");
            org.apache.kafka.clients.consumer.KafkaConsumer<String, String> consumer = new org.apache.kafka.clients.consumer.KafkaConsumer<>(props);
            consumer.subscribe(Arrays.asList(name));
            handlerMessage(consumer);
        }
    }

    private void handlerMessage(org.apache.kafka.clients.consumer.KafkaConsumer<String, String> consumer) {
        for (int i = 0; i < threadNum; i++) {
            exec.execute(new Task(consumer, handlerKafkaMessage));
        }
    }

    @Override
    public void afterPropertiesSet() throws Exception {
        this.init();
        logger.info("KafkaConsumer kafka start ");
    }

    @SuppressWarnings("all")
    public class Task implements Runnable {
        private org.apache.kafka.clients.consumer.KafkaConsumer<String, String> consumer;

        private HandlerKafkaMessage handlerKafkaMessage;

        public Task(org.apache.kafka.clients.consumer.KafkaConsumer<String, String> consumer,
                    HandlerKafkaMessage handlerKafkaMessage) {
            this.consumer = consumer;
            this.handlerKafkaMessage = handlerKafkaMessage;
        }

        @Override
        public void run() {

            logger.info("kafka consumer running>>>>>");
            int timeouts = 0;
            String prefix = "ThreadName: [" + Thread.currentThread().getName() + "] :";
            while (true) {
                ConsumerRecords<String, String> records = this.consumer.poll(5000);

                try {
                    if (records == null || records.count() == 0) {
                        timeouts++;

                        CustomLog.getInstance().customLog(logger, prefix + " timeouts is " + timeouts);
                    } else {
                        timeouts = 0;
                        CustomLog.getInstance().customLog(logger, prefix + " records size is " + records.count());
                    }

                    long pollKey = IdUtils.get();
                    for (ConsumerRecord<String, String> record : records) {
                        String recordKey = record.key();
                        prefix += "pollKey is " + pollKey + " recordKey is " + recordKey;

                        CustomLog.getInstance().customLog(logger, prefix + ":kafka_message>>>>topic_name:" + record.topic() + ",offset =" + record.offset() + " ,value is " + record.value());

                        MessageBean messageBean = JSON.parseObject(record.value(), MessageBean.class);
                        try {
                            handlerKafkaMessage.handlerKafkaMessage(messageBean);
                        } catch (Exception e) {
                            e.printStackTrace();
                            CustomLog.getInstance().customLog(logger, prefix + " : " + e.getMessage());
                        }

                        CustomLog.getInstance().customLog(logger, prefix + " : kafka_message>>>>topic_name:" + record.topic() + ",offset =" + record.offset() + " , 处理业务逻辑结束");
                    }

                    if (!records.isEmpty()) {
                        CustomLog.getInstance().customLog(logger, prefix + "pollKey is " + pollKey + ", kafka 提交");
                        consumer.commitSync();
                    }

                } catch (WakeupException e) {
                    logger.error("kafka consumer exception", e);
                    continue;
                } catch (Exception e) {
                    logger.error("kafka consumer exception", e);
                    consumer.commitSync();
                    continue;
                } finally {
                    try {
                        Thread.sleep(10000);
                    } catch (InterruptedException ignored) {

                    }
                }
            }
        }
    }

} 