package com.study.sbsummary.config.kafka;

import com.alibaba.fastjson.JSON;
import com.study.sbsummary.config.kafka.consumer.ActionConsumerThread;
import com.study.sbsummary.config.kafka.consumer.IConsumerThread;
import com.study.sbsummary.config.kafka.consumer.IRecordsHandler;
import com.study.sbsummary.model.KafkaMessageBO;
import com.study.sbsummary.utils.ThreadUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;

import javax.annotation.PreDestroy;
import java.util.*;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;

@Component
@Slf4j
public class KafkaService {

    public final static String EmptyKafkaMessageKey = null;

    @Autowired
    private KafkaConsumerConfig kafkaConsumerConfig;

    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;

    @Autowired
    private Topics topics;

    private static final Map<String, Executor> CONSUMER_EXECUTORS = new HashMap<>();

    private static final Map<String, List<IConsumerThread>> consumerThreads = new HashMap<>();

    @Autowired
    private Map<String, IRecordsHandler> messageHandlers;

    private void sendTest() {
        new Thread(new Runnable() {
            @Override
            public void run() {
                Topics.Topic topic = topics.getTopic("action");
                for (int i = 0; i < 1000; i++) {
                    KafkaMessageBO kafkaMessageBO = new KafkaMessageBO();
                    kafkaMessageBO.setTopic(topic.getName());
                    //appid=localside&opt=publishdoc&userid=1228954563&docid=T_00m2OR3I
                    Map<String, String> map = new HashMap<>();
                    map.put("appid", "localside");
                    map.put("opt", "publishdoc");
                    map.put("userid", "1228954563");
                    map.put("docid", "T_00m2OR3I");
                    map.put("ext", new Date().toString());
                    kafkaMessageBO.setData(map);
                    send(kafkaMessageBO);
                    ThreadUtil.sleep(3 * 1000);
                }
            }
        }).start();
    }

    public void startConsumerThreads() {
        if (messageHandlers == null) {
            log.warn("empty consumer handlers");
            return;
        }

//        sendTest();

        for (Map.Entry<String, IRecordsHandler> entry : messageHandlers.entrySet()) {
            messageHandlers.put(entry.getValue().name(), entry.getValue());
        }
        new Thread(() -> {
            log.info("startConsumerThreads");
            for (Map.Entry<String, Topics.Topic> entry : topics.getTopics().entrySet()) {
                Topics.Topic topic = entry.getValue();
                if (topic.getConsumerCnt() < 1) {
                    continue;
                }
                List<IConsumerThread> threads = new ArrayList<>();
                consumerThreads.put(entry.getKey(), threads);
                Executor executor = Executors.newFixedThreadPool(topic.getConsumerCnt());
                CONSUMER_EXECUTORS.put(entry.getKey(), executor);
                Properties additionalProperties = kafkaConsumerConfig.properties();
                additionalProperties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, topic.isAutoCommit());
                additionalProperties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, topic.getOffsetReset());
                additionalProperties.put(ConsumerConfig.GROUP_ID_CONFIG, topic.getGroup());
                for (int i = 0; i < topic.getConsumerCnt(); i++) {
                    Consumer consumer = kafkaConsumerConfig.consumerFactory(additionalProperties).createConsumer();
                    consumer.subscribe(Arrays.asList(topic.getName()));
                    IConsumerThread consumerThread = new ActionConsumerThread(consumer, topic,
                            messageHandlers.get(entry.getKey()), i, additionalProperties);
                    threads.add(consumerThread);
                    executor.execute(consumerThread);
                }
            }
        }).start();
    }

    @PreDestroy
    public void preDestroy() {
        if (consumerThreads != null) {
            for (Map.Entry<String, List<IConsumerThread>> entry : consumerThreads.entrySet()) {
                for (IConsumerThread consumerThread : entry.getValue()) {
                    consumerThread.close();
                }
            }
        }
    }

    public boolean send(KafkaMessageBO kafkaMessageBO) {
        String msgStr = kafkaMessageBO.getData() instanceof String ?
                (String) kafkaMessageBO.getData() :
                JSON.toJSONString(kafkaMessageBO.getData());
        try {
            if (kafkaMessageBO.getPartition() < 0) {
                kafkaTemplate.send(kafkaMessageBO.getTopic(), kafkaMessageBO.getKey(), msgStr).get();
            } else {
                kafkaTemplate.send(kafkaMessageBO.getTopic(), kafkaMessageBO.getPartition(), kafkaMessageBO.getKey(), msgStr).get();
            }
            return true;
        } catch (Exception e) {
            log.error("Failed to send message {} with key {} to topic {} due " +
                            "to {}", msgStr, kafkaMessageBO.getKey(), kafkaMessageBO.getTopic()
                    , e.getMessage());
            return false;
        }
    }

    public boolean send(String topic, Object message) {
        return send(topic, EmptyKafkaMessageKey, message);
    }

    public boolean send(String topic, String key, Object message) {
        KafkaMessageBO kafkaMessageBO = new KafkaMessageBO();
        kafkaMessageBO.setKey(key);
        kafkaMessageBO.setTopic(topic);
        kafkaMessageBO.setData(message);
        return send(kafkaMessageBO);
    }
}
