package com.smsc.headend.task.engine.message;

import cn.hutool.core.collection.CollUtil;
import cn.hutool.core.date.SystemClock;
import cn.hutool.core.util.BooleanUtil;
import cn.hutool.json.JSONArray;
import cn.hutool.json.JSONUtil;
import com.google.protobuf.InvalidProtocolBufferException;
import com.smsc.headend.common.utils.ProtoBufferUtil;
import com.smsc.headend.module.data.dto.ClearLocalCacheDto;
import com.smsc.headend.module.data.dto.FtpCollectionDTO;
import com.smsc.headend.module.data.dto.LpDdsDTO;
import com.smsc.headend.module.data.dto.ReCalcDailyEnergyDTO;
import com.smsc.headend.module.data.entity.RdMeterReadsLp;
import com.smsc.headend.module.data.entity.RdPaymentModeToPostLpData;
import com.smsc.headend.module.data.proto.FtpCollectionDtoProto;
import com.smsc.headend.module.event.entity.RdEventDcu;
import com.smsc.headend.module.event.entity.RdEventMeter;
import com.smsc.headend.module.mdm.dds.dto.MdmAnonyTerminalDto;
import com.smsc.headend.module.task.consts.TaskKafkaTopic;
import com.smsc.headend.module.task.dto.DeviceStatusDTO;
import com.smsc.headend.module.task.dto.TaskEventData;
import com.smsc.headend.module.task.dto.TaskReadData;
import com.smsc.headend.module.task.dto.TaskResult;
import com.smsc.headend.module.task.proto.TaskEventDataProto;
import com.smsc.headend.module.task.proto.TaskReadDataProto;
import com.smsc.headend.task.engine.config.KafkaConfig;
import com.smsc.headend.task.engine.manager.DdsFilterMdmDataManager;
import com.smsc.headend.task.engine.service.CollMeterTaskPlanService;
import com.smsc.headend.task.engine.service.DataProcessService;
import com.smsc.headend.task.engine.service.FeignAssetManagementService;
import com.smsc.headend.task.engine.service.RdVirtualProfileService;
import com.smsc.headend.task.engine.service.asset.AssetService;
import com.smsc.headend.task.engine.service.calc.RdEnergyDailyService;
import com.smsc.headend.task.engine.service.collection.IntegrityService;
import com.smsc.headend.task.engine.service.dds.DdsService;
import com.smsc.headend.task.engine.service.push.EventService;
import com.smsc.headend.task.engine.service.switchmode.RdPaymentModeToPostLpDataService;
import com.smsc.headend.task.engine.utils.DdsAnonymousTerminalStatusGeneUtil;
import com.smsc.headend.task.engine.utils.DdsBatchEventGenerateUtil;
import com.smsc.headend.task.engine.utils.DdsDeviceStatusGenerateUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;

import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.stream.Collectors;


@Component
@Slf4j
@ConfigurationProperties
public class DataTransferListener {

    public static final String FALSE = "false";
    public static final String BATCH_DDS_FACTORY = "batchDdsFactory";
    public static final String BATCH_DDS_FOR_GENERATE_FILE_FACTORY = "batchDdsForGenerateFileFactory";
    public static final String BATCH_FACTORY = "batchFactory";

    //    @Autowired
//    @Qualifier("dataPersistThreadPool")
//    ThreadPoolExecutor dataPersistThreadPool;
    @Autowired
    EventService eventService;
    @Autowired
    DdsBatchEventGenerateUtil ddsBatchEventGenerateUtil;
    @Autowired
    DdsDeviceStatusGenerateUtil ddsDeviceStatusGenerateUtil;
    @Autowired
    DdsAnonymousTerminalStatusGeneUtil ddsAnonymousTerminalStatusGeneUtil;
    @Autowired
    DataProcessService dataProcessService;
    @Autowired
    CollMeterTaskPlanService collMeterTaskPlanService;
    @Autowired
    FeignAssetManagementService assetManagementService;
    @Autowired
    IntegrityService integrityService;
    @Autowired
    DdsService ddsService;
    @Autowired
    AssetService assetService;
    @Autowired
    RdVirtualProfileService rdVirtualProfileService;
    @Autowired
    DdsFilterMdmDataManager ddsFilterMdmDataManager;
    @Autowired
    RdEnergyDailyService rdEnergyDailyService;
    @Autowired
    RdPaymentModeToPostLpDataService paymentModeToPostLpDataService;

    /**
     * Share the same topic {@link TaskKafkaTopic#DEVICE_READ_TRANSFER},different message groups {@link KafkaConfig#ENGINE_DDS_GROUP and "${spring.kafka.consumer.group-id}"}
     * this container group is {@link KafkaConfig#ENGINE_DDS_GROUP}
     *
     * @param dataList
     * @param acknowledgment
     */
    @KafkaListener(topics = TaskKafkaTopic.DEVICE_READ_TRANSFER, containerFactory = BATCH_DDS_FOR_GENERATE_FILE_FACTORY, autoStartup = FALSE)
    public void receiveForDds(List<ConsumerRecord> dataList, Acknowledgment acknowledgment) {
        long start = SystemClock.now();
        List<TaskReadData> results = new ArrayList<>();
        for (ConsumerRecord data : dataList) {
            TaskReadData result = null;
            try {
                result = ProtoBufferUtil.fromProto(TaskReadDataProto.TaskReadData.parseFrom((byte[]) data.value()), TaskReadData.class);
            } catch (InvalidProtocolBufferException e) {
            }

            if (result == null) {
                log.error("result ");
                continue;
            }
            results.add(result);
        }
        List<RdMeterReadsLp> allReadLp = results.stream().map(TaskReadData::getTaskMeterReads).flatMap(Collection::stream).distinct().collect(Collectors.toList());
        ddsService.transformMeterReadResult(allReadLp);
        log.debug("consume dds record end, cost={}ms", SystemClock.now() - start);
        acknowledgment.acknowledge();
    }


    @KafkaListener(topics = TaskKafkaTopic.DEVICE_READ_TRANSFER, containerFactory = BATCH_DDS_FACTORY, autoStartup = FALSE, concurrency = "${collect.persist.thread.coreSize:4}")
    public void receiveProfileData(List<ConsumerRecord> dataList, Acknowledgment acknowledgment) {
        try {
            List<TaskReadData> results = new ArrayList<>();
            Long start = SystemClock.now();
            for (ConsumerRecord data : dataList) {
                TaskReadData result = null;
                try {
                    result = ProtoBufferUtil.fromProto(TaskReadDataProto.TaskReadData.parseFrom((byte[]) data.value()), TaskReadData.class);
                } catch (InvalidProtocolBufferException e) {
                }

                if (result == null) {
                    log.error("result ");
                    continue;
                }
                results.add(result);
            }
            List<RdMeterReadsLp> allReadLp = results.stream().map(TaskReadData::getTaskMeterReads).flatMap(Collection::stream).distinct().collect(Collectors.toList());
//            dataPersistThreadPool.execute(() -> {
            try {
                long composeEnd = SystemClock.now();
                Boolean saveFlag = dataProcessService.saveLoadProfileRecord(allReadLp);
                long savePoint = SystemClock.now();
                if (saveFlag) {
                    collMeterTaskPlanService.dataProgressIntegrityUpdate(allReadLp);
                }
                log.info("save collection: result={},pollSize={}, size={},composeCosed={},saveCost={}ms, calculate={}ms", saveFlag, dataList.size(), allReadLp.size(), composeEnd - start, savePoint - composeEnd, SystemClock.now() - composeEnd);
                acknowledgment.acknowledge();
            } catch (Exception e) {
                log.error("save fail", e);
            }
//            });
        } catch (Exception e) {
            log.error("save lp read data error", e);
        }
    }

    @KafkaListener(topics = TaskKafkaTopic.DEVICE_EVENT_TRANSFER, containerFactory = BATCH_DDS_FACTORY, autoStartup = FALSE)
    public void receiveEventData(List<ConsumerRecord> dataList, Acknowledgment acknowledgment) {
        List<TaskEventData> results = new ArrayList<>();
        for (ConsumerRecord data : dataList) {
            TaskEventData result = null;
            try {
                result = ProtoBufferUtil.fromProto(TaskEventDataProto.TaskEventData.parseFrom((byte[]) data.value()), TaskEventData.class);
                if (result == null) {
                    log.error("result ");
                    continue;
                }
                results.add(result);
                ddsBatchEventGenerateUtil.insertRecord(result);
            } catch (InvalidProtocolBufferException e) {
                log.debug("error ", e);
                log.error("result byte array parse fail");
            }
        }
        List<RdEventMeter> list = results.stream().map(TaskEventData::getRdEventMeter).filter(o -> !CollectionUtils.isEmpty(o)).flatMap(Collection::stream).collect(Collectors.toList());
        List<RdEventDcu> dcuList = results.stream().map(TaskEventData::getRdEventDcu).filter(o -> !CollectionUtils.isEmpty(o)).flatMap(Collection::stream).collect(Collectors.toList());
        try {
            try {
                eventService.batchInsertEvent(list);
                eventService.batchInsertDcuEvent(dcuList);
                log.info("save collection: RdEventMeter={},RdEventDcu={}", JSONUtil.toJsonStr(list), JSONUtil.toJsonStr(dcuList));
            } catch (Exception e) {
                log.error("insert error", e);
            }
        } catch (Exception e) {
            log.error("save event unexpect error", e);
            return;
        }
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.DEVICE_STATUS_CHANGE_TRANSFER, containerFactory = BATCH_DDS_FACTORY, autoStartup = FALSE)
    public void receiveDeviceStatusData(List<ConsumerRecord> dataList, Acknowledgment acknowledgment) {
        for (ConsumerRecord consumerRecord : dataList) {
            String jsonStr = new String((byte[]) consumerRecord.value(), StandardCharsets.UTF_8);
            List<DeviceStatusDTO> results = JSONUtil.toList(JSONUtil.parseArray(jsonStr), DeviceStatusDTO.class);
            if (CollUtil.isNotEmpty(results)) {
                ddsDeviceStatusGenerateUtil.insertRecord(results);
            }
        }
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.ANONYMOUS_DEVICE_STATUS_TRANSFER, containerFactory = BATCH_DDS_FACTORY, autoStartup = FALSE)
    public void receiveAnonymousDeviceStatusData(List<ConsumerRecord> dataList, Acknowledgment acknowledgment) {

        for (ConsumerRecord consumerRecord : dataList) {
            String jsonStr = new String((byte[]) consumerRecord.value(), StandardCharsets.UTF_8);
            MdmAnonyTerminalDto result = JSONUtil.toBean(jsonStr, MdmAnonyTerminalDto.class);
            if (result != null) {
                ddsAnonymousTerminalStatusGeneUtil.insertRecord(result);
            }
        }
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.FTP_COLLECTION_DATA_TO_UTE, containerFactory = BATCH_DDS_FACTORY, autoStartup = FALSE)
    public void receiveFtpCollectionData(List<ConsumerRecord> dataList, Acknowledgment acknowledgment) {
        for (ConsumerRecord data : dataList) {
            FtpCollectionDTO result = null;
            try {
                result = ProtoBufferUtil.fromProto(FtpCollectionDtoProto.FtpCollectionDto.parseFrom((byte[]) data.value()), FtpCollectionDTO.class);
            } catch (InvalidProtocolBufferException e) {
                log.error("result byte array parse fail", e);
            }
            if (result == null) {
                log.error("message error");
                continue;
            }
            log.debug("ftp collect: {}", JSONUtil.toJsonStr(result));
            try {
                dataProcessService.saveDLMSGatewayFtpCollectionData(result);
            } catch (Exception e) {
                log.info("catch exception {}", e.getMessage());
            }
        }
        acknowledgment.acknowledge();
    }


    @KafkaListener(topics = TaskKafkaTopic.LP_DATA_INTEGRITY_CALCULATE, containerFactory = BATCH_DDS_FACTORY, autoStartup = FALSE)
    public void loadProfileIntegrityCalculate(List<ConsumerRecord> dataList, Acknowledgment acknowledgment) {

        for (ConsumerRecord data : dataList) {
            String jsonStr = new String((byte[]) data.value(), StandardCharsets.UTF_8);
            Map<String, JSONArray> meterLpDataTvMap = JSONUtil.toBean(jsonStr, Map.class);
//            dataPersistThreadPool.submit(() -> {
            try {
                long savePoint = SystemClock.now();
                Map<String, Set<Long>> lpMap = new HashMap<>();
                meterLpDataTvMap.entrySet().stream().forEach(en -> {
                    List<Long> tvs = JSONUtil.toList(en.getValue(), Long.class);
                    lpMap.put(en.getKey(), tvs.stream().collect(Collectors.toSet()));
                });
                integrityService.meterLpDataSaveToCache(lpMap);
                log.info("loadProfileIntegrityCalculate:  size={}, calculate={}ms", meterLpDataTvMap.keySet().size(), SystemClock.now() - savePoint);
            } catch (Exception e) {
                log.error("calculate integrity failed, {}", jsonStr, e);
            }

//            });
        }
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.EXPORT_LP_DATA, containerFactory = BATCH_DDS_FACTORY, autoStartup = FALSE)
    public void exportLpData(List<ConsumerRecord> dataList, Acknowledgment acknowledgment) {
        for (ConsumerRecord data : dataList) {
            String payload = new String((byte[]) data.value(), StandardCharsets.UTF_8);
            LpDdsDTO lpDdsDTO = JSONUtil.toBean(payload, LpDdsDTO.class);
            try {
                dataProcessService.exportLoadProfileToDds(lpDdsDTO);
                log.info("export load profile,startTime:{},endTime:{},lpDataItemId:{},meterSize；{}",
                        lpDdsDTO.getStartTime(), lpDdsDTO.getEndTime(), lpDdsDTO.getLpDataItemId(), lpDdsDTO.getMeterIds().size());
            } catch (Exception e) {
                log.info("catch exception {}", e.getMessage());
            }
        }
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.CLEAR_DEVICE_LOCAL_CACHE, autoStartup = "false", containerFactory = "broadcastFactory")
    public void clearDeviceLocalCache(byte[] payload, Acknowledgment acknowledgment) {
        log.debug("receive clear device local cache");
        String data = new String(payload, StandardCharsets.UTF_8);
        ClearLocalCacheDto dto = JSONUtil.toBean(data, ClearLocalCacheDto.class);
        try {
            if (null != dto.getDcuId()) {
                log.debug("clear local cache for dcu ,comId:{}", dto.getDcuId());
                assetService.clearDcuCacheById(dto.getDcuId());
            }
            if (null != dto.getMeterId()) {
                log.debug("clear local cache for meter ,meterId:{}", dto.getMeterId());
                assetService.clearMeterCacheById(dto.getMeterId());
            }
            if (null != dto.getComId()) {
                assetService.updateComCache(dto);
            }
            if (BooleanUtil.isTrue(dto.getClearScaler())) {
                log.debug("clear local cache for scaler");
                ddsFilterMdmDataManager.clearScalerCache();
            }
        } catch (Exception e) {
            log.info("catch exception {}", e.getMessage());
        }
        acknowledgment.acknowledge();
    }


    @KafkaListener(topics = TaskKafkaTopic.VIRTUAL_PROFILE_READ_TRANSFER, containerFactory = "vpSaveDataFactory", autoStartup = FALSE, concurrency = "${collect.persist.thread.coreSize:4}")
    public void receiveVirtualProfileData(String payload, Acknowledgment acknowledgment) {
        try {
            TaskResult taskResult = JSONUtil.toBean(payload, TaskResult.class);
            try {
                long start = SystemClock.now();
                rdVirtualProfileService.sendRdDataToAdapter(taskResult);
                long savePoint = SystemClock.now();

                log.info("save vp read data: saveCost={}ms, calculate={}ms", savePoint - start, SystemClock.now() - savePoint);
            } catch (Exception e) {
                log.error("save fail", e);
            }
//            });
            acknowledgment.acknowledge();
        } catch (Exception e) {
            log.error("save vp read data error", e);
            acknowledgment.acknowledge();
        }
    }

    @KafkaListener(topics = TaskKafkaTopic.DAILY_ENERGY_DATA, containerFactory = BATCH_DDS_FACTORY, autoStartup = FALSE)
    public void receiveDailyEnergyData(List<ConsumerRecord> dataList, Acknowledgment acknowledgment) {
        try {
            List<TaskReadData> taskReadDataList = new ArrayList<>();
            for (ConsumerRecord data : dataList) {
                TaskReadData taskReadData = null;
                try {
                    taskReadData = ProtoBufferUtil.fromProto(TaskReadDataProto.TaskReadData.parseFrom((byte[]) data.value()), TaskReadData.class);
                } catch (InvalidProtocolBufferException e) {
                }

                if (taskReadData == null) {
                    log.error("TaskReadData error", data);
                    continue;
                }
                taskReadDataList.add(taskReadData);
            }

            List<RdMeterReadsLp> dailyEnergyList = taskReadDataList.stream().map(TaskReadData::getTaskMeterReads).flatMap(Collection::stream).distinct().collect(Collectors.toList());
            rdEnergyDailyService.calcDailyEnergyInterval(dailyEnergyList, true);

            acknowledgment.acknowledge();
        } catch (Exception e) {
            log.error("calc daily energy interval error", e);
            acknowledgment.acknowledge();
        }
    }

    @KafkaListener(topics = TaskKafkaTopic.DAILY_ENERGY_DATA_RE_CAL, containerFactory = BATCH_DDS_FACTORY, autoStartup = FALSE)
    public void receiveDailyEnergyData(String payload, Acknowledgment acknowledgment) {
        try {
            ReCalcDailyEnergyDTO reCalcDailyEnergyDTO = null;
            reCalcDailyEnergyDTO = JSONUtil.toBean(payload, ReCalcDailyEnergyDTO.class);
            rdEnergyDailyService.reCalcDailyEnergyInterval(reCalcDailyEnergyDTO);
        } catch (Exception e) {
            log.error("reCalc daily energy interval error", e);
        }
        acknowledgment.acknowledge();
    }

    @KafkaListener(topics = TaskKafkaTopic.PAYMENT_MODE_SWITCH_TO_POST, containerFactory = BATCH_DDS_FACTORY, autoStartup = FALSE)
    public void receivePaymentModeSwitchData(List<ConsumerRecord> dataList, Acknowledgment acknowledgment) {
        log.info("start save paymentModeSwitchData");
        try {
            for (ConsumerRecord consumerRecord : dataList) {
                String jsonStr = new String((byte[]) consumerRecord.value(), StandardCharsets.UTF_8);
                List<RdPaymentModeToPostLpData> results = JSONUtil.toList(JSONUtil.parseArray(jsonStr), RdPaymentModeToPostLpData.class);
                if (CollUtil.isNotEmpty(results)) {
                    paymentModeToPostLpDataService.mergeLpData(results);
                }
            }
            acknowledgment.acknowledge();
        } catch (Exception e) {
            log.error("save paymentModeSwitchData error");
        }
    }
}
