package com.smsc.headend.task.engine.config;

import com.smsc.headend.common.thread.UDISThreadFactory;
import com.smsc.headend.module.task.consts.TaskKafkaTopic;
import com.smsc.headend.task.engine.handler.concurrent.TaskRejectHandler;
import lombok.extern.slf4j.Slf4j;
import org.apache.curator.framework.CuratorFramework;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.admin.DescribeTopicsResult;
import org.apache.kafka.clients.admin.NewTopic;
import org.assertj.core.util.Lists;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.config.AutowireCapableBeanFactory;
import org.springframework.boot.ApplicationRunner;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.KafkaListenerEndpointRegistry;

import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;

@Configuration
@Slf4j
public class TaskEngineConfig {

    @Autowired
    AdminClient kafkaAdminClient;

    @Autowired
    KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry;

//    @Value("${collect.save.thread.coreSize}")
//    Integer saveCorePoolSize;
//    @Value("${collect.save.thread.maxPoolSize}")
//    Integer maxPoolSize;
//    @Value("${collect.save.thread.aliveTime}")
//    Integer taskAliveTime;
//
//    @Value("${collect.persist.thread.coreSize: 4}")
//    Integer collDataCorePoolSize;
//    @Value("${collect.persist.thread.maxPoolSize: 4}")
//    Integer collDataMaxPoolSize;

    @Value("${task.execute.thread.coreSize}")
    Integer taskExecuteCorePoolSize;
    @Value("${task.execute.thread.maxPoolSize}")
    Integer taskExecuteMaxPoolSize;
    @Value("${task.execute.thread.aliveTime}")
    Integer taskExecuteAliveTime;
    @Value("${spring.datasource.dbnames}")
    String[] dnNames;
    @Autowired
    AutowireCapableBeanFactory autowireCapableBeanFactory;

    @Bean("taskResponseThreadPool")
    public ThreadPoolExecutor taskResponseThreadPool() {
        TaskRejectHandler taskRejectHandler = new TaskRejectHandler();
        autowireCapableBeanFactory.autowireBean(taskRejectHandler);
        return new ThreadPoolExecutor(taskExecuteCorePoolSize, taskExecuteMaxPoolSize, taskExecuteAliveTime, TimeUnit.SECONDS, new LinkedBlockingQueue<>(1000), new UDISThreadFactory("req"), taskRejectHandler);
    }

    @Bean("taskRequestThreadPool")
    public ThreadPoolExecutor taskRequestThreadPool() {
        TaskRejectHandler taskRejectHandler = new TaskRejectHandler();
        autowireCapableBeanFactory.autowireBean(taskRejectHandler);
        return new ThreadPoolExecutor(taskExecuteCorePoolSize, taskExecuteMaxPoolSize, taskExecuteAliveTime, TimeUnit.SECONDS, new LinkedBlockingQueue<>(1000), new UDISThreadFactory("resp"), taskRejectHandler);
    }

    @Bean("saveDbExecutor")
    public ThreadPoolExecutor saveDbExecutor(@Value("${collect.persist.thread.coreSize:4}") Integer coreSize) {
        return new ThreadPoolExecutor(dnNames.length * coreSize, dnNames.length * coreSize, 120, TimeUnit.SECONDS, new LinkedBlockingQueue<>(10), new UDISThreadFactory("persist"), new RejectedExecutionHandler() {
            @Override
            public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) {
                r.run();
            }
        });

    }

    @Bean("schedulePool")
    public ScheduledExecutorService schedulePoolExecute() {
        return Executors.newSingleThreadScheduledExecutor(new UDISThreadFactory("schedule"));
    }

    @Bean("taskStopWatcher")
    public ScheduledExecutorService taskStopWatcher() {
        return Executors.newSingleThreadScheduledExecutor(new UDISThreadFactory("taskStopWatcher"));
    }

    @Bean("conNumber")
    public AtomicInteger connectorNumber() {
        return new AtomicInteger();
    }


    @Bean
    public ApplicationRunner runnerCreateTopic() {
        return args ->
        {
            DescribeTopicsResult result = kafkaAdminClient.describeTopics(Lists.newArrayList(
                    TaskKafkaTopic.TASK_SEND_TO_UTE,
                    TaskKafkaTopic.TASK_RESULT_BACK,
                    TaskKafkaTopic.DEVICE_READ_TRANSFER,
                    TaskKafkaTopic.DEVICE_EVENT_TRANSFER,
                    TaskKafkaTopic.DEVICE_STATUS_CHANGE_TRANSFER,
                    TaskKafkaTopic.ANONYMOUS_DEVICE_STATUS_TRANSFER,
                    TaskKafkaTopic.METER_READ_SAVE_DDS,
                    TaskKafkaTopic.CON_DEVICE_NOTIFICATION,
                    TaskKafkaTopic.TASK_RETRY_TO_UTE,
                    TaskKafkaTopic.FTP_COLLECTION_DATA_TO_UTE,
                    TaskKafkaTopic.TASK_DEVICE_STATUS_UPDATE,
                    TaskKafkaTopic.SUB_TASK_DEVICE_STATUS_UPDATE,
                    TaskKafkaTopic.LP_DATA_INTEGRITY_SAVE,
                    TaskKafkaTopic.COLLECT_PROGRESS_PLAN_UPDATE,
                    TaskKafkaTopic.COLLECT_DCU_PROGRESS_PLAN_UPDATE,
                    TaskKafkaTopic.RECOLLECT_TASK_GEN,
                    TaskKafkaTopic.RECOLLECT_ABSENT_CHECK,
                    TaskKafkaTopic.CLIENT_DEVICE_IP_CHANGED,
                    TaskKafkaTopic.TASK_CREATE,
                    TaskKafkaTopic.SUB_TASK_CREATE,
                    TaskKafkaTopic.REQUEST_MESSAGE_TASK_STATUS,
                    TaskKafkaTopic.DDS_GENERATE,
                    TaskKafkaTopic.RECOLLECT_TASK_UPDATE,
                    TaskKafkaTopic.LP_DATA_INTEGRITY_CALCULATE,
                    TaskKafkaTopic.LP_DATA_TO_UTE_DELETE,
                    TaskKafkaTopic.EXPORT_LP_DATA,
                    TaskKafkaTopic.TASK_DELAY15_TO_UTE,
                    TaskKafkaTopic.TASK_DELAY60_TO_UTE,
                    TaskKafkaTopic.TASK_DELAY240_TO_UTE,
                    TaskKafkaTopic.DAILY_ENERGY_DATA,
                    TaskKafkaTopic.VIRTUAL_PROFILE_READ_TRANSFER,
                    TaskKafkaTopic.PAYMENT_MODE_SWITCH_TO_POST
            ));

            List<NewTopic> newTopicList = new ArrayList();
            result.values().forEach((s, topicDescriptionKafkaFuture) -> {
                try {
                    topicDescriptionKafkaFuture.get();
                } catch (InterruptedException e) {
                    Thread.currentThread().interrupt();
                } catch (ExecutionException e) {
                    log.info("{} not exists ,create by UTE", s);
                    NewTopic topic = new NewTopic(s, 3, (short) 1);
                    newTopicList.add(topic);
                }
            });
            kafkaAdminClient.createTopics(newTopicList);
            //wait for kafka setting config
            if (newTopicList.size() > 0) {
                Thread.sleep(10000);
            }
            kafkaListenerEndpointRegistry.getAllListenerContainers().forEach(messageListenerContainer -> {
                if (!messageListenerContainer.isRunning()) {
                    log.info("start {}", messageListenerContainer);
                    messageListenerContainer.start();
                } else {
                    log.info("resume {}", messageListenerContainer);
                    messageListenerContainer.resume();
                }
            });
        };
    }
}
