package com.ikas.ai.server.kafka;

import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.map.MapUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.fastjson.JSONValidator;
import com.google.common.collect.Lists;
import com.ikas.ai.consts.Consts;
import com.ikas.ai.consts.TableDictConsts;
import com.ikas.ai.enums.CategoryTypeEnum;
import com.ikas.ai.enums.MeteTypeEnum;
import com.ikas.ai.enums.redis.RedisKeyEnum;
import com.ikas.ai.model.KafkaMeteData;
import com.ikas.ai.server.kafka.strategy.Kafka104DataConvertMeteDataStrategyContext;
import com.ikas.ai.server.module.data.enums.DataMeteTypeEnum;
import com.ikas.ai.server.module.data.model.BaseMete;
import com.ikas.ai.server.module.data.model.DataMete;
import com.ikas.ai.server.module.data.model.TripMete;
import com.ikas.ai.server.module.data.model.dto.Kafka104MeteData;
import com.ikas.ai.server.module.data.model.dto.RealDataHis;
import com.ikas.ai.server.module.data.tdDao.TdDataHisMapper;
import com.ikas.ai.utils.RedisUtils;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Component;

import javax.annotation.Resource;
import java.beans.Beans;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;


/**
 * 处理104服务发送过来数据
 */
@Slf4j
@Component
public class Kafka104MeteDataConsumerService {

    @Resource
    private TdDataHisMapper tdDataHisMapper;

    @Resource(name = "td")
    private ThreadPoolTaskExecutor tdExecutor;
    @Resource
    private KafkaTemplate<String, Object> kafkaTemplate;

    @Autowired
    Kafka104DataConvertMeteDataStrategyContext strategyContext;

    // 消费监听
    @KafkaListener(topics = {KafkaInitialConfiguration.TOPIC_104}, topicPattern = "0", groupId = KafkaInitialConfiguration.TOPIC_104_GROUP)
    public void onMessage104(List<ConsumerRecord<String, String>> list, Acknowledgment ack) {
        processKafkaMessage(list);
        //手动提交
        ack.acknowledge();
    }

    private void processKafkaMessage(List<ConsumerRecord<String, String>> list) {
        //处理数据发送kafka保存td
        list.forEach(record -> {
            log.debug("104--kafkaconsumer>topic:{},partition：{},offset:{},key:{},data:{}", record.topic(), record.partition(), record.offset(),
                    record.key(), record.value());
            final Optional<?> kafkaMessage = Optional.ofNullable(record.value());
            if (kafkaMessage.isPresent()) {
                try {
                    List<Kafka104MeteData> meteDataList = new ArrayList<>();
                    final JSONValidator.Type type = JSONValidator.from(record.value()).getType();
                    if (type == JSONValidator.Type.Array) {
                        meteDataList = JSONArray.parseArray(record.value(), Kafka104MeteData.class);
                    } else if (type == JSONValidator.Type.Object) {
                        meteDataList = Collections.singletonList(JSONObject.parseObject(record.value(), Kafka104MeteData.class));
                    }
                    if (CollectionUtil.isEmpty(meteDataList)) {
                        return;
                    }
                    Map<Integer, List<Kafka104MeteData>> meteDataGroupMap = meteDataList.stream().filter(Objects::nonNull).collect(Collectors.groupingBy(Kafka104MeteData::getCategoryType));
                    if (MapUtil.isEmpty(meteDataGroupMap)) {
                        log.info("meteDataGroupMap is null");
                        return;
                    }
                    List<Kafka104MeteData> ycList = meteDataGroupMap.get(CategoryTypeEnum.YC.getCode());
                    log.debug("遥测List:{}", ycList);
                    if (CollectionUtils.isNotEmpty(ycList)) {
                        Map<Integer, BaseMete> baseMeteMap = convertYcBaseMeteMap(ycList);
                        if (ObjectUtils.isEmpty(baseMeteMap)) {
                            log.info("104--kafka consumer-yc BaseMete baseMeteMap is null");
                            return;
                        }
                        // 1、写入tdengine
                        CompletableFuture.runAsync(() -> save104MeteDataToTdengine(ycList, TableDictConsts.DATA_HIS_PREFIX_YC, baseMeteMap), tdExecutor);
                        //2、发送data topic存入t_data表
                        sendMeteDataToDataKafka(ycList, baseMeteMap);
                    }
                    //遥信处理
                    List<Kafka104MeteData> yxList = meteDataGroupMap.get(CategoryTypeEnum.YX.getCode());
                    log.debug("遥信List:{}", yxList);
                    if (CollectionUtils.isNotEmpty(yxList)) {
                        Map<Integer, BaseMete> baseMeteMap = convertYxBaseMeteMap(yxList);
                        if (ObjectUtils.isEmpty(baseMeteMap)) {
                            log.info("104--kafka consumer-yx BaseMete baseMeteMap is null");
                            return;
                        }
                        //104收到数据只要满足basedata表条件
                        // 1、写入tdengine
                        CompletableFuture.runAsync(() -> save104MeteDataToTdengine(yxList, TableDictConsts.DATA_HIS_PREFIX_YX, baseMeteMap), tdExecutor);
                        //2、发送data topic存入t_data表
                        sendMeteDataToDataKafka(yxList, baseMeteMap);
                        //处理工况所需数据
                        processWorkFlowData(yxList);
                        //处理跳机测点
                        processTripMete(yxList);
                    }
                } catch (Exception e) {
                    log.error("=====104协议测点数据处理错误======", e);
                }

            }
        });
    }

    private void processWorkFlowData(List<Kafka104MeteData> kafka104MeteDataList) {
        List<DataMete> dataMeteList = convertDataMeteMap(kafka104MeteDataList);
        log.info("dataMeteList:{}", JSON.toJSONString(dataMeteList));

        //根据机组和meteid去重
        dataMeteList=dataMeteList.stream().filter(Objects::nonNull).collect(Collectors.collectingAndThen(Collectors.toCollection(() ->
                new TreeSet<>(Comparator.comparing(k -> k.getMachineNo() + ";" + k.getMeteId()))), ArrayList::new));
        log.info("去重dataMeteList:{}", JSON.toJSONString(dataMeteList));
        /**
         * 因为配置t_data_mete类型为0=执行项，1=响应项，2=步骤, 3=工况流程
         * 需要调整为排序后是  3 2 0 1
         */
        List<DataMete> typeSortList = dataMeteList.stream().filter(p -> DataMeteTypeEnum.EXECUTE.getCode() == p.getType() || DataMeteTypeEnum.RESPONSE.getCode() == p.getType())
                .sorted(Comparator.comparingInt(DataMete::getType)).collect(Collectors.toList());

        List<DataMete> typeReversedList = dataMeteList.stream().filter(p -> DataMeteTypeEnum.STEP.getCode() == p.getType() || DataMeteTypeEnum.FLOW.getCode() == p.getType())
                .sorted(Comparator.comparingInt(DataMete::getType).reversed()).collect(Collectors.toList());

        typeReversedList.addAll(typeSortList);

        log.info("排序后dataMeteList:{}", JSON.toJSONString(typeReversedList));
        //发送kafka
        sendMeteDataToFlowKafka(kafka104MeteDataList, typeReversedList);
    }

    /**
     * 跳机测点发送kafka,值为1的才发
     *
     * @param signalList
     */
    private void processTripMete(List<Kafka104MeteData> signalList) {
        List<Object> tripMeteList = RedisUtils.List.all(RedisKeyEnum.ALL_TRIP_METE_DATA.key());
        if (CollectionUtils.isEmpty(tripMeteList)) {
            log.info("缓存中未取到跳机测点数据:{}", RedisKeyEnum.ALL_TRIP_METE_DATA.key());
        }
        List<TripMete> meteList = tripMeteList.stream().map(i -> JSON.parseObject(i.toString(), TripMete.class)).collect(Collectors.toList());
        List<Long> tripIdList = meteList.stream().map(TripMete::getMeteId).collect(Collectors.toList());
        //筛选值为1的并且在跳机测点列表的
        List<Kafka104MeteData> appearMeteDataList = signalList.stream().filter(i -> Consts.VALID_METE_VALUE.equals(i.getCategoryValue())).filter(i -> tripIdList.contains((long) i.getAddress())).collect(Collectors.toList());
        if (CollectionUtils.isEmpty(appearMeteDataList)) {
            log.info("104上报数据无跳机测点");
        }
        Map<Integer, TripMete> tripMeteMap = meteList.stream().collect(Collectors.toMap(i -> Math.toIntExact(i.getMeteId()), mete -> mete));
        sendTripMeteDataToTripKafka(appearMeteDataList, tripMeteMap);
    }

    private void sendTripMeteDataToTripKafka(List<Kafka104MeteData> appearMeteDataList, Map<Integer, TripMete> tripMeteMap) {
        if (MapUtil.isEmpty(tripMeteMap)) {
            log.info("tripMeteMap is null");
            return;
        }
        List<KafkaMeteData> kafkaMeteDataList = strategyContext.getResource("TripMeteStrategy").doOperation(appearMeteDataList, tripMeteMap);
        List<KafkaMeteData> kafkaList = kafkaMeteDataList.stream().filter(Objects::nonNull).collect(Collectors.collectingAndThen(Collectors.toCollection(() -> new TreeSet<>(Comparator.comparing(k -> k.getMeteId() + ";" + k.getGenerateTime()))), ArrayList::new));
        Map<Long, List<KafkaMeteData>> timestampMap = kafkaList.stream().sorted(Comparator.comparing(KafkaMeteData::getGenerateTime)).collect(Collectors.groupingBy(KafkaMeteData::getGenerateTime));
        Set<Long> keySet = timestampMap.keySet();
        //根据时间分组发送消息
        keySet.stream().sorted().forEach(
                key -> {
                    log.info("========TRIP============>sendkafka-triptopic:{},data:{}", KafkaInitialConfiguration.TOPIC_MACHINE_TRIP, JSON.toJSONString(timestampMap.get(key)));
                    kafkaTemplate.send(KafkaInitialConfiguration.TOPIC_MACHINE_TRIP, JSON.toJSONString(timestampMap.get(key)));
                });
    }


    /**
     * 保存遥测数据至TDEngine
     * 遥信和遥测可能mete_id相同，但是测点编码不同
     *
     * @param kafka104MeteDataList
     */
    public void save104MeteDataToTdengine(List<Kafka104MeteData> kafka104MeteDataList, String tableName, Map<Integer, BaseMete> meteMap) {
        Map<String, List<RealDataHis>> map = new HashMap<>();
        kafka104MeteDataList.forEach(item -> {
            BaseMete baseMete = meteMap.get(item.getAddress());
            if (Objects.isNull(baseMete)) {
                log.error("104--this address is not exist:{}", item.getAddress());
                return;
            }
            DateTime date = DateUtil.date(item.getGenerateTime());
            RealDataHis realDataHis = new RealDataHis();
            realDataHis.setTablename(tableName + baseMete.getMeteId());
            realDataHis.setMeteId(baseMete.getMeteId());
            realDataHis.setMeteCode(baseMete.getMeteCode());
            realDataHis.setMeteName(baseMete.getSignalCnName());
            realDataHis.setValue(String.valueOf(item.getCategoryValue()));
            realDataHis.setIdTime(date);
            realDataHis.setCreateTime(new Date());
            realDataHis.setMeteType(MeteTypeEnum.convert(item.getCategoryType()).getNum());
            String address = String.valueOf(item.getAddress());
            if (map.get(address) == null) {
                map.put(address, Lists.newArrayList(realDataHis));
            } else {
                List<RealDataHis> oldList = map.get(address);
                oldList.add(realDataHis);
                map.put(address, oldList);
            }

        });
        long begin = System.currentTimeMillis();
        for (String key : map.keySet()) {
            List<RealDataHis> realDataHisList = map.get(key);
            List<RealDataHis> list = realDataHisList.stream().filter(realData -> Objects.nonNull(realData.getMeteId())).collect(Collectors.toList());
            if (CollectionUtils.isNotEmpty(list)) {
                Long size = tdDataHisMapper.batchInsert(list.get(0).getTablename(), list);
            }
        }
        log.info("104--SAVETDENGINE-TIME:{}", System.currentTimeMillis() - begin);
    }


    private List<DataMete> convertDataMeteMap(List<Kafka104MeteData> kafka104MeteDataList) {
        List<Long> addressList = kafka104MeteDataList.stream().filter(Objects::nonNull).map(i -> (long) i.getAddress()).distinct().collect(Collectors.toList());
        log.info("work-flow-addressList:{}", JSON.toJSONString(addressList));
        //兼容类似于《VIRT_152》功能测点
        return getDataMetesCacheByMeteIds(addressList);
        //4条
//        return metes.stream().filter(i -> Objects.nonNull(i.getMeteId())).collect(Collectors.collectingAndThen(Collectors.toCollection(() -> new TreeSet<>(Comparator.comparing(DataMete::getMeteId))), ArrayList::new)).stream().collect(Collectors.toMap(i -> Math.toIntExact(i.getMeteId()), mete -> mete));
    }
//    private Map<Integer, DataMete> convertDataMeteMap(List<Kafka104MeteData> kafka104MeteDataList) {
//        List<Long> addressList = kafka104MeteDataList.stream().filter(Objects::nonNull).map(i -> (long) i.getAddress()).distinct().collect(Collectors.toList());
//        log.info("work-flow-addressList:{}", JSON.toJSONString(addressList));
//        List<DataMete> metes = getDataMetesCacheByMeteIds(addressList);
//        //4条
//        return metes.stream().filter(i -> Objects.nonNull(i.getMeteId())).collect(Collectors.collectingAndThen(Collectors.toCollection(() -> new TreeSet<>(Comparator.comparing(DataMete::getMeteId))), ArrayList::new)).stream().collect(Collectors.toMap(i -> Math.toIntExact(i.getMeteId()), mete -> mete));
//    }

    /**
     * 从缓存获取dataMete
     *
     * @return list of DataMete
     */
    private List<DataMete> getDataMetesCache() {
        List<String> dataMeteJsonList = RedisUtils.List.all(RedisKeyEnum.DATA_METE_TABLE_ENTITY.key());
        if (CollectionUtils.isEmpty(dataMeteJsonList)) {
            log.info("缓存获取工况流程相关测点未获取到值:{}", RedisKeyEnum.DATA_METE_TABLE_ENTITY.key());
        }
        return dataMeteJsonList.stream().map(a -> JSON.parseObject(a, DataMete.class)).collect(Collectors.toList());
    }

    /**
     * 从缓存获取dataMete 并过滤
     *
     * @param addressList 过滤条件
     * @return list of DataMete filter by meteIds
     */
    private List<DataMete> getDataMetesCacheByMeteIds(List<Long> addressList) {
        if (CollectionUtils.isEmpty(addressList)) {
            log.info("params meteIds list is empty");
            return new ArrayList<>();
        }
        return getDataMetesCache().stream().filter(Objects::nonNull).filter(
                m -> addressList.contains(m.getMeteId())
        ).collect(Collectors.toList());
    }

    private List<BaseMete> getBaseMeteYcCache(List<Long> addressList) {
        List<String> keys = addressList.stream().map(RedisKeyEnum.BASE_METE_ENTITY_YC::key).collect(Collectors.toList());
        List<String> list = RedisUtils.multiGet(keys);
        return list.stream().map(i -> JSON.parseObject(i, BaseMete.class)).collect(Collectors.toList());
    }

    private List<BaseMete> getBaseMeteYxCache(List<Long> addressList) {
        List<String> keys = addressList.stream().map(RedisKeyEnum.BASE_METE_ENTITY_YX::key).collect(Collectors.toList());
        List<String> list = RedisUtils.multiGet(keys);
        return list.stream().map(i -> JSON.parseObject(i, BaseMete.class)).collect(Collectors.toList());
    }

    /**
     * 从缓存获取dataMete 并过滤
     *
     * @param addressList 过滤条件
     * @return list of BaseMete filter by meteIds
     */
    private List<BaseMete> getBaseMeteAllMeteIdList(List<Long> addressList, List<BaseMete> baseMeteList) {
        if (CollectionUtils.isEmpty(addressList)) {
            log.info("params meteIds is empty");
            return new ArrayList<>();
        }
        if (CollectionUtils.isEmpty(baseMeteList)) {
            log.info("baseMete meteIds is empty");
            return new ArrayList<>();
        }
        log.info("addressList:{},baseMeteList:{}", JSON.toJSONString(addressList), JSON.toJSONString(baseMeteList));
        return baseMeteList.stream().filter(Objects::nonNull).filter(
                m -> addressList.contains(m.getMeteId())
        ).collect(Collectors.toList());
    }

    private Map<Integer, BaseMete> convertYcBaseMeteMap(List<Kafka104MeteData> kafka104MeteDataList) {
        List<Long> addressList = kafka104MeteDataList.stream().filter(Objects::nonNull)
                .map(i -> (long) i.getAddress()).collect(Collectors.toList());
        List<BaseMete> baseMeteYcCache = getBaseMeteYcCache(addressList);
        log.info("该YC测点缓存:{}", JSON.toJSONString(baseMeteYcCache));
        List<BaseMete> baseMeteList = getBaseMeteAllMeteIdList(addressList, baseMeteYcCache);
        return baseMeteList.stream().collect(Collectors.toMap(i -> Math.toIntExact(i.getMeteId()), mete -> mete));
    }

    private Map<Integer, BaseMete> convertYxBaseMeteMap(List<Kafka104MeteData> kafka104MeteDataList) {
        List<Long> addressList = kafka104MeteDataList.stream().filter(Objects::nonNull)
                .map(i -> (long) i.getAddress()).collect(Collectors.toList());
        List<BaseMete> baseMeteYxCache = getBaseMeteYxCache(addressList);
        log.info("该YX测点缓存:{}", JSON.toJSONString(baseMeteYxCache));
        List<BaseMete> baseMeteList = getBaseMeteAllMeteIdList(addressList, baseMeteYxCache);
        return baseMeteList.stream().collect(Collectors.toMap(i -> Math.toIntExact(i.getMeteId()), mete -> mete));
    }

    /**
     * 发送数据到kafka 工况流程所需
     *
     * @param kafka104MeteDataList
     * @param meteList
     */
    private void sendMeteDataToFlowKafka(List<Kafka104MeteData> kafka104MeteDataList, List<DataMete> meteList) {
        if (CollectionUtils.isEmpty(meteList)) {
            log.info("104上报数据无工况流程测点");
            return;
        }

        meteList.forEach(p -> {
            Map<Integer, DataMete> meteMap = new HashMap<>();
            meteMap.put(Math.toIntExact(p.getMeteId()), p);
            List<KafkaMeteData> kafkaMeteDataList = strategyContext.getResource("DataMeteStrategy").doOperation(kafka104MeteDataList, meteMap);

            //判null去重
            kafkaMeteDataList.stream().filter(Objects::nonNull).collect(Collectors.collectingAndThen(Collectors.toCollection(() ->
                    new TreeSet<>(Comparator.comparing(k -> k.getMeteId() + ";" + k.getGenerateTime()))), ArrayList::new)).forEach(kafkaMeteData -> {
                String machineNo = kafkaMeteData.getMachineNo();
                log.info("========WORK-FLOW============>sendkafka--topic:{},data:{}", machineNo, JSON.toJSONString(kafkaMeteData));
                kafkaTemplate.send(machineNo, JSON.toJSONString(kafkaMeteData));
            });

//            kafkaMeteDataList.stream().filter(Objects::nonNull).forEach(kafkaMeteData -> {
//                String machineNo = kafkaMeteData.getMachineNo();
//                log.info("========WORK-FLOW============>sendkafka--topic:{},data:{}", machineNo, JSON.toJSONString(kafkaMeteData));
//                kafkaTemplate.send(machineNo, JSON.toJSONString(kafkaMeteData));
//            });

//            List<KafkaMeteData> kafkaList = kafkaMeteDataList.stream().filter(Objects::nonNull).collect(Collectors.collectingAndThen(Collectors.toCollection(() ->
//                    new TreeSet<>(Comparator.comparing(k -> k.getMeteId() + ";" + k.getGenerateTime()))), ArrayList::new));
//            Map<Long, List<KafkaMeteData>> timestampMap = kafkaList.stream().sorted(Comparator.comparing(KafkaMeteData::getGenerateTime)).collect(Collectors.groupingBy(KafkaMeteData::getGenerateTime));
//            Set<Long> keySet = timestampMap.keySet();
//            //根据时间分组发送消息
//            keySet.stream().sorted().forEach(
//                    key -> {
//                        String machineNo = timestampMap.get(key).get(0).getMachineNo();
//                        log.info("========WORK-FLOW============>sendkafka--topic:{},data:{}", machineNo, JSON.toJSONString(timestampMap.get(key)));
//                        kafkaTemplate.send(machineNo, JSON.toJSONString(timestampMap.get(key)));
//                    });
        });
    }

    /**
     * 遥信和遥测可能mete_id相同，但是测点编码不同
     * 将测点发送至t_data kafka 存入t_data
     *
     * @param kafka104MeteDataList
     * @param meteMap
     */
    private void sendMeteDataToDataKafka(List<Kafka104MeteData> kafka104MeteDataList, Map<Integer, BaseMete> meteMap) {
        List<KafkaMeteData> kafkaMeteDataList = strategyContext.getResource("BaseMeteStrategy").doOperation(kafka104MeteDataList, meteMap);
        List<KafkaMeteData> kafkaList = kafkaMeteDataList.stream().filter(Objects::nonNull).collect(Collectors.collectingAndThen(Collectors.toCollection(() -> new TreeSet<>(Comparator.comparing(k -> k.getMeteId() + ";" + k.getGenerateTime()))), ArrayList::new));
        Map<Long, List<KafkaMeteData>> timestampMap = kafkaList.stream().sorted(Comparator.comparing(KafkaMeteData::getGenerateTime)).collect(Collectors.groupingBy(KafkaMeteData::getGenerateTime));
        Set<Long> keySet = timestampMap.keySet();
        //根据时间分组发送消息
        keySet.stream().sorted().forEach(
                key -> {
                    log.info("========UDATA============>sendkafka--topic:{},data:{}", KafkaInitialConfiguration.TOPIC_U_DATA, JSON.toJSONString(timestampMap.get(key)));
                    kafkaTemplate.send(KafkaInitialConfiguration.TOPIC_U_DATA, JSON.toJSONString(timestampMap.get(key)));
                });
    }
}




