package com.smsc.headend.task.engine.message;

import cn.hutool.core.convert.Convert;
import cn.hutool.core.date.SystemClock;
import cn.hutool.json.JSONUtil;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.google.protobuf.InvalidProtocolBufferException;
import com.smsc.headend.common.annotation.TaskMessageCount;
import com.smsc.headend.common.utils.ProtoBufferUtil;
import com.smsc.headend.common.utils.RedisKeys;
import com.smsc.headend.common.utils.RedisUtils;
import com.smsc.headend.module.data.dto.MeterProgressDTO;
import com.smsc.headend.module.data.dto.TermProgressDTO;
import com.smsc.headend.module.data.entity.RdMeterReadsLp;
import com.smsc.headend.module.meterscheme.entity.CollMeterTaskPlan;
import com.smsc.headend.module.scheme.entity.ConfigAutoRecollectTask;
import com.smsc.headend.module.task.consts.TaskKafkaTopic;
import com.smsc.headend.module.task.dto.*;
import com.smsc.headend.module.task.entity.DeviceTask;
import com.smsc.headend.module.task.entity.DeviceTaskSub;
import com.smsc.headend.module.task.proto.ConnectorDeviceNotificationProto;
import com.smsc.headend.module.task.proto.ConnectorTaskResultProto;
import com.smsc.headend.module.task.proto.RetryTaskDtoProto;
import com.smsc.headend.module.task.proto.TaskProto;
import com.smsc.headend.task.engine.mapper.RdMeterReadsLpMapper;
import com.smsc.headend.task.engine.po.RecollectInfo;
import com.smsc.headend.task.engine.po.TaskEndStatusPo;
import com.smsc.headend.task.engine.service.CollMeterTaskPlanService;
import com.smsc.headend.task.engine.service.CollTermTaskPlanService;
import com.smsc.headend.task.engine.service.DeviceNotificationService;
import com.smsc.headend.task.engine.service.collection.RecollectTaskService;
import com.smsc.headend.task.engine.service.task.DeviceTaskService;
import com.smsc.headend.task.engine.service.task.DeviceTaskSubService;
import com.smsc.headend.task.engine.service.task.TaskService;
import io.micrometer.core.instrument.Counter;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.checkerframework.checker.units.qual.A;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;

import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.RejectedExecutionException;
import java.util.concurrent.ThreadPoolExecutor;

//@EnableBinding({TaskBind.class})
@Slf4j
@Component
public class TaskMessageService {

    public static final String BATCH_FACTORY = "batchFactory";
    public static final String BATCH_TASK_FACTORY = "batchTaskFactory";
    public static final String FALSE = "false";
    public static final String UTF_8 = "utf-8";
    public static final String RECOLLECT_FACTORY = "recollectFactory";
    @Autowired
    TaskService taskService;

    @Autowired
    CollMeterTaskPlanService collMeterTaskPlanService;

    @Autowired
    CollTermTaskPlanService collTermTaskPlanService;

    @Autowired
    DeviceNotificationService deviceNotificationService;

    @Autowired
    DeviceTaskService deviceTaskService;

    @Autowired
    DeviceTaskSubService deviceTaskSubService;

    @Autowired
    RdMeterReadsLpMapper readsLpMapper;

    @Autowired
    RedisUtils redisUtils;

    @Autowired
    @Qualifier("taskRequestThreadPool")
    ThreadPoolExecutor taskThreadPool;

    @Autowired
    RecollectTaskService recollectTaskService;

    @Autowired
    @Qualifier("taskResponseThreadPool")
    ThreadPoolExecutor taskSendThreadPool;
    @Autowired
    @Qualifier("taskCONInbound")
    Counter conTaskInboundCounter;



    @KafkaListener(topics = TaskKafkaTopic.TASK_SEND_TO_UTE, containerFactory = BATCH_FACTORY, autoStartup = FALSE)
    public void receive(List<ConsumerRecord<String, byte[]>> dataList, Acknowledgment acknowledgment) {
        long start = SystemClock.now();
        for (ConsumerRecord data : dataList) {
            //处理下发到Connector的任务
            TaskProto.Task task = null;
            Boolean isTaskEntry = false;
            try {
                task = TaskProto.Task.parseFrom((byte[]) data.value());
                Task incomeTask = ProtoBufferUtil.fromProto(task, Task.class);
                if (incomeTask != null && incomeTask.getComId() != null) {
                    isTaskEntry = true;
                    taskThreadPool.submit(() -> {
                        try {
                            log.info("ExecuteTask:comId:{}, meterId:{}, taskNo:{},taskType:{}", incomeTask.getComId(), incomeTask.getMeterId(), incomeTask.getTaskNo(), incomeTask.getTaskType());
                            TaskEndStatusPo taskEndStatusPo = taskService.doStartTask(incomeTask);
                            if (taskEndStatusPo != null) {
                                taskService.handleTaskEndPo(taskEndStatusPo, incomeTask);
                            }
                        } catch (Exception e) {
                            log.error("unhandled exception: {}", JSONUtil.toJsonStr(incomeTask), e);
                        }
                    });
                }
            } catch (InvalidProtocolBufferException e) {
            }
            //处理Connector范围的任务
            ConnectorTaskResultProto.ConnectorTaskResult connectorTaskResult = null;
            if (!isTaskEntry) {
                try {
                    connectorTaskResult = ConnectorTaskResultProto.ConnectorTaskResult.parseFrom((byte[]) data.value());
                } catch (InvalidProtocolBufferException e) {
                }
                ConnectorTaskResult result = ProtoBufferUtil.fromProto(connectorTaskResult, ConnectorTaskResult.class);
                if (result != null) {
                    taskSendThreadPool.execute(() -> {
                        try {
                            conTaskInboundCounter.increment();
                            taskService.atomicTaskResponse(result);
                        } catch (Exception e) {
                            log.error(" atomic task {}", e);
                        }
                    });
                }
            }
        }
        log.debug("TASK_SEND_TO_UTE: end push in pool, size={} cost={}ms", dataList.size(), SystemClock.now() - start);
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.TASK_DEVICE_STATUS_UPDATE, containerFactory = BATCH_TASK_FACTORY, autoStartup = FALSE)
//    @TaskMessageCount(type = "update")
    public void taskStatusChange(List<ConsumerRecord<String, byte[]>> dataList, Acknowledgment acknowledgment) {
        long start = SystemClock.now();
        List<TaskStatusDto> dtoList = new ArrayList<>();
        for (ConsumerRecord data : dataList) {
//            saveThreadPool.execute(() -> {
//                try {
//                    byte[] bytes = (byte[]) data.value();
//                    String statusStr = new String(bytes, Charset.forName("utf-8"));
//                    log.debug("task status: msg={}", statusStr);
//                    TaskStatusDto dto = JSONUtil.toBean(statusStr, TaskStatusDto.class);
////                    taskService.updateTaskStatus(dto);
//                } catch (Exception e) {
//                    log.error("update task status fail", e);
//                }
//            });
            byte[] bytes = (byte[]) data.value();
            String statusStr = new String(bytes, Charset.forName(UTF_8));
            log.debug("task status: msg={}", statusStr);
            TaskStatusDto dto = JSONUtil.toBean(statusStr, TaskStatusDto.class);
            dtoList.add(dto);
        }
        deviceTaskService.batchInsertTaskProcess(dtoList);
        log.debug("task status: update size={}, cost={}", dtoList.size(), SystemClock.now() - start);
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.SUB_TASK_DEVICE_STATUS_UPDATE, containerFactory = BATCH_TASK_FACTORY, autoStartup = FALSE)
    public void subtaskStatusChange(List<ConsumerRecord<String, byte[]>> dataList, Acknowledgment acknowledgment) {
        long start = SystemClock.now();
        List<SubTaskStatusDto> dtoList = new ArrayList<>();
        for (ConsumerRecord data : dataList) {
//            saveThreadPool.execute(() -> {
//                try {
//                    byte[] bytes = (byte[]) data.value();
//                    String statusStr = new String(bytes, Charset.forName("utf-8"));
//                    log.debug("sub task status: msg={}", statusStr);
//                    SubTaskStatusDto dto = JSONUtil.toBean(statusStr, SubTaskStatusDto.class);
//                    taskService.updateSubTaskStatus(dto);
//                } catch (Exception e) {
//                    log.error("receive task fail", e);
//                }
//            });

            byte[] bytes = (byte[]) data.value();
            String statusStr = new String(bytes, Charset.forName(UTF_8));
            log.debug("task status: msg={}", statusStr);
            SubTaskStatusDto dto = JSONUtil.toBean(statusStr, SubTaskStatusDto.class);
            dtoList.add(dto);
        }
        deviceTaskService.batchInsertSubTaskStatus(dtoList);
        log.debug("subtask status: update size={}, cost={}", dtoList.size(), SystemClock.now() - start);
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.TASK_CREATE, containerFactory = BATCH_TASK_FACTORY, autoStartup = FALSE)
    public void deviceTaskCreate(List<ConsumerRecord<String, byte[]>> dataList, Acknowledgment acknowledgment) {
        long start = SystemClock.now();
        List<DeviceTask> dtoList = new ArrayList<>();
        for (ConsumerRecord data : dataList) {
            byte[] bytes = (byte[]) data.value();
            String statusStr = new String(bytes, Charset.forName(UTF_8));
            log.debug("taskCreate: msg={}", statusStr);
            DeviceTask dto = JSONUtil.toBean(statusStr, DeviceTask.class);
            dtoList.add(dto);
        }
        deviceTaskService.batchAddDeviceTask(dtoList);
        log.debug("deviceTask create number={},cost={}", dtoList.size(), SystemClock.now() - start);
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.SUB_TASK_CREATE, containerFactory = BATCH_TASK_FACTORY, autoStartup = FALSE)
    public void deviceSubTaskCreate(List<ConsumerRecord<String, byte[]>> dataList, Acknowledgment acknowledgment) {
        long start = SystemClock.now();
        List<DeviceTaskSub> dtoList = new ArrayList<>();
        for (ConsumerRecord data : dataList) {
            byte[] bytes = (byte[]) data.value();
            String statusStr = new String(bytes, Charset.forName(UTF_8));
            DeviceTaskSub dto = JSONUtil.toBean(statusStr, DeviceTaskSub.class);
            dtoList.add(dto);
        }
        deviceTaskSubService.batchAddDeviceSubTask(dtoList);
        log.debug("subDeviceTask create number={},cost={}", dtoList.size(), SystemClock.now() - start);
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.COLLECT_PROGRESS_PLAN_UPDATE, containerFactory = BATCH_TASK_FACTORY, autoStartup = FALSE)
    public void meterPlanUpdate(List<ConsumerRecord<String, byte[]>> dataList, Acknowledgment acknowledgment) {
        List<MeterProgressDTO> dtoList = new ArrayList<>();
        for (ConsumerRecord data : dataList) {
            byte[] bytes = (byte[]) data.value();
            String statusStr = new String(bytes, Charset.forName(UTF_8));
            MeterProgressDTO dto = JSONUtil.toBean(statusStr, MeterProgressDTO.class);
            dtoList.add(dto);
        }
        log.info("update meter progress number: {}", dtoList.size());
        collMeterTaskPlanService.batchSaveMeterCollectProgress(dtoList);
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.COLLECT_DCU_PROGRESS_PLAN_UPDATE, containerFactory = BATCH_TASK_FACTORY, autoStartup = FALSE)
    public void termPlanUpdate(List<ConsumerRecord<String, byte[]>> dataList, Acknowledgment acknowledgment) {
        List<TermProgressDTO> dtoList = new ArrayList<>();
        for (ConsumerRecord data : dataList) {
            byte[] bytes = (byte[]) data.value();
            String statusStr = new String(bytes, Charset.forName(UTF_8));
            log.debug("termPlanUpdate: msg={}", statusStr);
            TermProgressDTO dto = JSONUtil.toBean(statusStr, TermProgressDTO.class);
            dtoList.add(dto);
        }
        log.info("update meter progress number: {}", dtoList.size());
        collTermTaskPlanService.batchSaveTermCollectProgress(dtoList);
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.TASK_RESULT_BACK, containerFactory = BATCH_FACTORY, autoStartup = FALSE)
    public void receiveTaskRunMessage(List<ConsumerRecord> dataList, Acknowledgment acknowledgment) {
        Long start = SystemClock.now();
        for (ConsumerRecord data : dataList) {
            try {
                ConnectorTaskResultProto.ConnectorTaskResult connectorTaskResult = ConnectorTaskResultProto.ConnectorTaskResult.parseFrom((byte[]) data.value());
                ConnectorTaskResult result = ProtoBufferUtil.fromProto(connectorTaskResult, ConnectorTaskResult.class);
                if (result == null) {
                    log.error("result ");
                    continue;
                }
//                log.info("TASK_RESULT_BACK: atomic task {}", result.getAtomicTaskNo());
                log.debug("TASK_RESULT_BACK: atomic task {} , result {} {}", result.getAtomicTaskNo(), result.getResultMessage(), result.getResultData());
                taskSendThreadPool.execute(() -> {
                    try {
                        taskService.atomicTaskResponse(result);
                    } catch (Exception e) {
                        log.error(" atomic task {}", e);
                    }
                });
            } catch (InvalidProtocolBufferException e) {
                log.error("parse obj fail {}", data.value());
            }
        }
        log.debug("TASK_RESULT_BACK: end push in pool, size={} cost={}ms", dataList.size(), SystemClock.now() - start);
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.RECOLLECT_TASK_UPDATE, containerFactory = DataTransferListener.BATCH_FACTORY, autoStartup = FALSE)
    public void recollectTaskUpdate(List<ConsumerRecord> dataList, Acknowledgment acknowledgment) {
        long start = SystemClock.now();
        List<ConfigAutoRecollectTask> dtoList = new ArrayList<>();
        for (ConsumerRecord data : dataList) {
            byte[] bytes = (byte[]) data.value();
            String configAutoRecollectTaskJson = new String(bytes, StandardCharsets.UTF_8);
            ConfigAutoRecollectTask dto = JSONUtil.toBean(configAutoRecollectTaskJson, ConfigAutoRecollectTask.class);
            dtoList.add(dto);

//            redisUtils.sSet(RedisKeys.getReconnectConfigTaskDataUpdate(), configAutoRecollectTaskJson);
        }
        log.info("RECOLLECT_TASK_UPDATE: end Input, size={} cost={}ms", dataList.size(), SystemClock.now() - start);
        recollectTaskService.persistRecollectTask(dtoList);
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.RECOLLECT_TASK_GEN, containerFactory = RECOLLECT_FACTORY, autoStartup = FALSE)
    public void recollectJobGenerate(List<ConsumerRecord> dataList, Acknowledgment acknowledgment) {
        Long start = SystemClock.now();
        for (ConsumerRecord data : dataList) {
            String json = new String((byte[]) data.value(), Charset.forName(UTF_8));
            RecollectInfo recollectInfo = JSONUtil.toBean(json, RecollectInfo.class);
            if (recollectInfo.getStartTv() == 0L || recollectInfo.getEndTv() == 0l || recollectInfo.getLpDataItemId() == 0l || recollectInfo.getInterval() == 0) {
                continue;
            }
            recollectTaskService.createRecollectTask(recollectInfo);
        }
        log.info("auto recollect: end generate, size={} cost={}ms", dataList.size(), SystemClock.now() - start);
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.RECOLLECT_ABSENT_CHECK, containerFactory = BATCH_FACTORY, autoStartup = FALSE)
    public void recollectDataAbsentCheck(List<ConsumerRecord> dataList, Acknowledgment acknowledgment) {
        log.info("start absent point check");
        taskService.autoRecollectCheck();
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.CON_DEVICE_NOTIFICATION, containerFactory = BATCH_FACTORY, autoStartup = FALSE)
    public void receiveDeviceNotification(List<ConsumerRecord> dataList, Acknowledgment acknowledgment) {
//        log.info("{}", payload);
        for (ConsumerRecord data : dataList) {
            ConnectorDeviceNotification notification = null;
            try {
                notification = ProtoBufferUtil.fromProto(ConnectorDeviceNotificationProto.ConnectorDeviceNotification.parseFrom((byte[]) data.value()), ConnectorDeviceNotification.class);
            } catch (InvalidProtocolBufferException e) {
                log.error("result byte array parse fail", e);
            }
            if (notification == null) {
                log.error("notification unreadable");
                continue;
            }
            ConnectorDeviceNotification finalNotification = notification;
            deviceNotificationService.handleConnectorNotification(finalNotification);
        }
        acknowledgment.acknowledge();
    }


    @KafkaListener(topics = TaskKafkaTopic.LP_DATA_TO_UTE_DELETE, autoStartup = "false", containerFactory = BATCH_FACTORY)
    public void phaseChangeEvent(String payload, Acknowledgment acknowledgment) {
        log.info("receive utg delete lp data message {}", payload);
        try {
            Long meterId = Convert.toLong(payload);
            int delSize = readsLpMapper.delete(Wrappers.<RdMeterReadsLp>lambdaQuery().eq(RdMeterReadsLp::getMeterId, meterId));
            log.info("delete meter lp data: meterId {}, size {}", meterId, delSize);
        } catch (Exception e) {
            log.error("delete lp data:  error", e);
        }

        acknowledgment.acknowledge();
    }
}
