package com.ikas.ai.server.kafka;

import cn.hutool.core.date.DatePattern;
import cn.hutool.core.date.DateUtil;
import cn.hutool.json.JSONUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.ikas.ai.consts.MachineAndWorkConditionConst;
import com.ikas.ai.model.KafkaMeteData;
import com.ikas.ai.server.module.condition.dto.WorkConditionFlowDTO;
import com.ikas.ai.server.module.condition.dto.WorkConditionStepDTO;
import com.ikas.ai.server.module.condition.dto.WorkConditionStepDetailDTO;
import com.ikas.ai.server.module.condition.service.WorkConditionFlowService;
import com.ikas.ai.server.module.data.enums.DataMeteTypeEnum;
import com.ikas.ai.server.module.data.model.DataMete;
import com.ikas.ai.server.module.instance.enums.*;
import com.ikas.ai.server.module.instance.model.DorisWorkFlowInstance;
import com.ikas.ai.server.module.instance.model.DorisWorkInstanceStep;
import com.ikas.ai.server.module.instance.model.DorisWorkInstanceStepDetail;
import com.ikas.ai.server.module.instance.service.DorisWorkFlowInstanceService;
import com.ikas.ai.server.module.instance.service.DorisWorkInstanceStepDetailService;
import com.ikas.ai.server.module.instance.service.DorisWorkInstanceStepService;
import com.ikas.ai.server.module.machine.model.po.MachineInfo;
import com.ikas.ai.server.module.machine.service.MachineInfoService;
import com.ikas.ai.utils.CollectionUtil;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;

import java.sql.Timestamp;
import java.util.*;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;

/**
 * @author hu.bowei@ikasinfo.com
 * @date 2023-05-16- 11:12
 */
@Component
@Slf4j
public class WorkInstanceService {
    @Autowired
    private RedisDataOperation initDataRedisListener;
    @Autowired
    private DorisWorkFlowInstanceService dorisWorkFlowInstanceService;
    @Autowired
    private DorisWorkInstanceStepDetailService dorisWorkInstanceStepDetailService;
    @Autowired
    private DorisWorkInstanceStepService dorisWorkInstanceStepService;
    @Autowired
    private WorkConditionFlowService workConditionFlowService;
    @Autowired
    private MachineInfoService machineInfoService;

    @Autowired
    KafkaTemplate<String, Object> kafkaTemplate;


    @Transactional(rollbackFor = Exception.class)
    public void processKafkaMeteData(KafkaMeteData kafkaMeteData, long offset) {
        //1个测点出现在多个工况流程或者多个步骤中
        List<DataMete> cacheDataMeteList = initDataRedisListener.getCacheList(kafkaMeteData.getMachineNo(), kafkaMeteData.getMeteCode());
        if (CollectionUtil.isEmpty(cacheDataMeteList)) {
            log.info("meteCode:{},cacheDataMeteList: null,缓存中未获取到t_data_mete中数据", kafkaMeteData.getMeteCode());
            return;
        }
        Map<String, DataMete> cacheWfDataMeteMap = cacheDataMeteList.stream().collect(Collectors.toMap(DataMete::getWfName, t -> t, (entity1, entity2) -> entity1));
        if (CollectionUtil.isEmpty(cacheWfDataMeteMap)) {
            log.info("meteCode:{},cacheWfDataMeteMap: null,缓存中未获取到t_data_mete中数据", kafkaMeteData.getMeteCode());
            return;
        }
        List<DorisWorkFlowInstance> instanceList = dorisWorkFlowInstanceService.getProcessingList(kafkaMeteData.getMachineNo());
        cacheWfDataMeteMap.forEach((wfname, cacheMete) -> {
            if (cacheMete.getType() == DataMeteTypeEnum.FLOW.getCode()) {
                //同时存在两个不同工况流程情况 tb60和tb62
                if (cacheMete.getValidMeteValue().equals(kafkaMeteData.getValue())) {
                    if (instanceList.stream().filter(p -> p.getWfName().equalsIgnoreCase(wfname)).count() > 0L) {
                        log.error("{}已存在运行中工况", wfname);
                    } else {
                        createFlow(kafkaMeteData, cacheMete);
                    }
                } else {
                    instanceList.stream().filter(instance -> {
                        return instance.getMeteCode().equalsIgnoreCase(cacheMete.getMeteCode()) && WorkFlowInstanceStateEnum.PROCESSING.getCode().equals(instance.getState());
                    }).forEach(instance -> {
                        updateFlow(kafkaMeteData, cacheMete, instance);
                    });
                }
            } else if (cacheMete.getType() == DataMeteTypeEnum.STEP.getCode()) {
                if (!CollectionUtil.isEmpty(instanceList)) {
                    instanceList.forEach(instance -> {
                        processInstanceStep(kafkaMeteData, cacheMete, instance);
                    });
                }
                if (cacheMete.getValidMeteValue().equals(kafkaMeteData.getValue())
                        && instanceList.stream().filter(instance -> instance.getWfName().equalsIgnoreCase(wfname)).count() <= 0) {
                    cacheMete.setValue(kafkaMeteData.getValue());
                    cacheMete.setGenerateTime(kafkaMeteData.getGenerateTime());
                    initDataRedisListener.cacheFlowStep(cacheMete);
                }
            } else {
                if (cacheMete.getValidMeteValue().equals(kafkaMeteData.getValue())) {
                    //保存配置为需要提前缓存在数据库中缓存中(创建流程给提前缓存的值赋值使用)
                    if (WorkInstanceStepDetailEarlyCacheEnum.EARLY.getCode().equals(cacheMete.getEarlyCache())) {
                        cacheMete.setValue(kafkaMeteData.getValue());
                        cacheMete.setGenerateTime(kafkaMeteData.getGenerateTime());
                        initDataRedisListener.catchEarlyMeteCode(cacheMete);
                    }
                    //保存流程启动令到缓存中(批次时间和启动时间使用)
                    if (initDataRedisListener.existsWorkFlowStartMete(cacheMete.getMachineNo(), cacheMete.getWfName(), cacheMete.getMeteCode())) {
                        cacheMete.setValue(kafkaMeteData.getValue());
                        cacheMete.setGenerateTime(kafkaMeteData.getGenerateTime());
                        initDataRedisListener.catchStartMeteCode(cacheMete);
                    }
                    if (!CollectionUtil.isEmpty(instanceList)) {
                        //处理运行中工况流程数据
                        if (instanceList.stream().filter(p -> p.getWfName().equalsIgnoreCase(wfname)).count() > 0) {
                            processMeteDetail(kafkaMeteData, instanceList, cacheMete, offset);
                        }
                    }
                } else {
                    log.info("忽略无效值测点======{}", kafkaMeteData);
                }
            }
        });
    }

    private void updateFlow(KafkaMeteData kafkaMeteData, DataMete cacheMete, DorisWorkFlowInstance instance) {
        instance.setState(WorkFlowInstanceStateEnum.SUCCESS.getCode());
        instance.setUpdateTime(new Date());
        instance.setEndTime(kafkaMeteData.getGenerateTime());
        instance.setEndDate(new Timestamp(instance.getEndTime()));
        instance.setCostTime(Math.abs(instance.getEndTime() - instance.getStartTime()));
        instance.setDesc(JSONUtil.toJsonStr(kafkaMeteData));
        dorisWorkFlowInstanceService.updateById(instance);
        clearWorkFlowCache(cacheMete.getMachineNo());
        saveOrUpdateMachineInfo(instance, null);
    }

    private void processMeteDetail(KafkaMeteData kafkaMeteData, List<DorisWorkFlowInstance> instanceList, DataMete cacheMete, long offset) {
        //该运行中的工况是否包含该测点
        for (DorisWorkFlowInstance instance : instanceList) {
            List<DorisWorkInstanceStepDetail> instanceStepDetailList = dorisWorkInstanceStepDetailService.getListByInstanceId(instance.getId());
            if (CollectionUtil.isEmpty(instanceStepDetailList)) {
                return;
            }
            //查询出该工况流程当前测点所有记录
            List<DorisWorkInstanceStepDetail> detailList = instanceStepDetailList.stream().
                    filter(p -> cacheMete.getMeteCode().equalsIgnoreCase(p.getMeteCode())
                            && WorkInstanceStepDetailStateEnum.NOT_STARTED.getCode().equals(p.getState())
                            && kafkaMeteData.getValue().equalsIgnoreCase(p.getValidMeteValue().toString())
                            && WorkInstanceStepDetailLabelTypeEnum.METE_CODE.getCode().equals(p.getLabelType())
                    )
                    .sorted(Comparator.comparing(DorisWorkInstanceStepDetail::getSort)).collect(Collectors.toList());
            //该测点在一个工况流程中出现多次
            boolean flag = true;
            for (DorisWorkInstanceStepDetail detail : detailList) {
                if (flag) {
                    detail.setMeteValue(kafkaMeteData.getValue());
                    detail.setStartTime(kafkaMeteData.getGenerateTime());
                    detail.setStartDate(new Timestamp(detail.getStartTime()));
                    detail.setState(WorkInstanceStepDetailStateEnum.COMPLETE.getCode());
                    detail.setUpdateTime(new Date());
                    setDetailCostTime(instanceStepDetailList, instance, detail);
                    dorisWorkInstanceStepDetailService.updateById(detail);
                }
                if (WorkInstanceStepDetailUpdatePolicyEnum.SORT_FIRST.getCode().equals(detail.getUpdatePolicy())) {
                    flag = false;
                    break;
                }
                flag = true;
            }
            sendDataToKafkaWorkInstanceDetailCostTime(detailList);
            log.info("工况详情测点处理完成!!!");
        }
    }

    /**
     * 推送消息至kafka以进行状态评价和劣化告警处理
     */
    private boolean sendDataToKafkaWorkInstanceDetailCostTime(List<DorisWorkInstanceStepDetail> detailList) {
        if (CollectionUtil.isEmpty(detailList)) {
            return false;
        }
        detailList = detailList.stream()
                .filter(detail -> detail.getCostTime() != null)
                .filter(detail -> detail.getCostTime() > 0)
                .collect(Collectors.toList());
        kafkaTemplate.send(KafkaInitialConfiguration.WORK_INSTANCE_DETAIL_DELAY, JSON.toJSONString(detailList));
        return true;
    }


    private void setDetailCostTime(List<DorisWorkInstanceStepDetail> instanceStepDetailList, DorisWorkFlowInstance instance, DorisWorkInstanceStepDetail detail) {
        Map<String, List<String>> pMeteCodeMap = detail.getPMeteCode();
        if (CollectionUtil.isEmpty(pMeteCodeMap)) {
            return;
        }
        List<DorisWorkInstanceStep> stepList = dorisWorkInstanceStepService.getListByInstanceId(instance.getId());
        if (CollectionUtil.isEmpty(stepList)) {
            return;
        }
        pMeteCodeMap.forEach((k, v) -> {
            Optional<DorisWorkInstanceStep> workInstanceStepOptional = stepList.stream().filter(s -> s.getStepName().equalsIgnoreCase(k)).findFirst();
            if (!workInstanceStepOptional.isPresent()) {
                return;
            }
            DorisWorkInstanceStep workInstanceStep = workInstanceStepOptional.get();
            List<DorisWorkInstanceStepDetail> stepDetailList =
                    instanceStepDetailList.stream().filter(i -> workInstanceStep.getId().equals(i.getStepInstanceId()))
                            .filter(i -> v.contains(i.getMeteCode())).collect(Collectors.toList());
            if (CollectionUtil.isEmpty(stepDetailList)) {
                return;
            }
            stepDetailList.forEach(s -> {
                if (WorkInstanceStepDetailStateEnum.COMPLETE.getCode().equals(s.getState()) && s.getStartTime() != null) {
                    detail.setCostTime(Math.abs(detail.getStartTime() - s.getStartTime()));
                }
            });
        });
    }

    /**
     * 步骤有效值标识开始
     * 步骤无效值标识结束
     *
     * @param kafkaMeteData
     * @param mete
     * @param instance
     */
    private void processInstanceStep(KafkaMeteData kafkaMeteData, DataMete mete, DorisWorkFlowInstance instance) {
        LambdaQueryWrapper<DorisWorkInstanceStep> queryWrapper = new LambdaQueryWrapper<>();
        queryWrapper.eq(DorisWorkInstanceStep::getWfInstanceId, instance.getId());
        queryWrapper.eq(DorisWorkInstanceStep::getMeteCode, mete.getMeteCode());
        DorisWorkInstanceStep instanceStep = dorisWorkInstanceStepService.getOne(queryWrapper);
        if (null == instanceStep || WorkInstanceStepStateEnum.ERROR.getCode().equals(instanceStep.getState())) {
            return;
        }
        //覆盖更新缓存中更新的值
        if (instanceStep.getValidMeteValue().equalsIgnoreCase(kafkaMeteData.getValue())) {
            instanceStep.setMeteValue(kafkaMeteData.getValue());
            instanceStep.setStartTime(kafkaMeteData.getGenerateTime());
            instanceStep.setStartDate(new Timestamp(instanceStep.getStartTime()));
            instanceStep.setState(WorkInstanceStepStateEnum.PROCESSING.getCode());
            instanceStep.setUpdateTime(new Date());
            dorisWorkInstanceStepService.updateById(instanceStep);
        } else if (WorkInstanceStepStateEnum.PROCESSING.getCode().equals(instanceStep.getState())
                && kafkaMeteData.getGenerateTime() >= instanceStep.getStartTime()) {
//            instanceStep.setMeteValue(kafkaMeteData.getValue());
            instanceStep.setEndTime(kafkaMeteData.getGenerateTime());
            instanceStep.setEndDate(new Timestamp(instanceStep.getEndTime()));
            instanceStep.setState(WorkInstanceStepStateEnum.COMPLETE.getCode());
            instanceStep.setDelay(kafkaMeteData.getGenerateTime() - instanceStep.getStartTime());
            instanceStep.setUpdateTime(new Date());
            boolean flag = dorisWorkInstanceStepService.updateById(instanceStep);
            if (flag) {
                sendDataToKafkaForStepAvgCompute(instanceStep);
            }
        } else {
            log.error("忽略步骤测点======{}", mete);
        }
        saveOrUpdateMachineInfo(instance, instanceStep);
    }

    /**
     * 将消息发往kafka以进行平均耗时计算
     *
     * @param instanceStep
     */
    private void sendDataToKafkaForStepAvgCompute(DorisWorkInstanceStep instanceStep) {
        if (instanceStep.getDelay() != null && instanceStep.getDelay() > 0) {
            kafkaTemplate.send(KafkaInitialConfiguration.WORK_INSTANCE_STEP_DELAY, JSON.toJSONString(instanceStep));
        }
    }

    /**
     * @param kafkaMeteData
     * @param cacheMete
     */
    public void createFlow(KafkaMeteData kafkaMeteData, DataMete cacheMete) {
        //创建工况批次
        WorkConditionFlowDTO flow = workConditionFlowService.findWorkConditionFlowConfiguration(cacheMete.getMachineNo(), cacheMete.getWfName());
        List<WorkConditionStepDTO> stepList = flow.getSteps();
        if (CollectionUtil.isEmpty(stepList)) {
            log.error("未配置工况流程步骤");
            return;
        }

        stepList.sort(Comparator.comparing(WorkConditionStepDTO::getSort));
        DorisWorkFlowInstance instance = convertInstance(kafkaMeteData, flow);
        dorisWorkFlowInstanceService.save(instance);
        List<DataMete> catchStepList = initDataRedisListener.getCacheFlowStep(flow.getMachineNo(), flow.getWfName());

        AtomicReference<DorisWorkInstanceStep> currentStep = new AtomicReference<>();
        Map<String, DataMete> map = new HashMap<>();
        stepList.stream().forEach(conditionStep -> {
            DorisWorkInstanceStep step = convertInstanceStep(instance, catchStepList, currentStep, conditionStep);
            dorisWorkInstanceStepService.save(step);
            List<WorkConditionStepDetailDTO> detailList = conditionStep.getDetails();
            List<DorisWorkInstanceStepDetail> instanceStepDetailList = convertInstanceStepDetailList(instance, map, step, detailList);
            if (!CollectionUtil.isEmpty(instanceStepDetailList)) {
                dorisWorkInstanceStepDetailService.saveBatch(instanceStepDetailList);
            }
        });
        //设置请求执行时长
        List<DorisWorkInstanceStepDetail> instanceStepDetailList = dorisWorkInstanceStepDetailService.getListByInstanceId(instance.getId());
        List<DorisWorkInstanceStepDetail> detailList = instanceStepDetailList.stream()
                .filter(p -> WorkInstanceStepDetailStateEnum.COMPLETE.getCode().equals(p.getState()))
                .sorted(Comparator.comparing(DorisWorkInstanceStepDetail::getSort)).collect(Collectors.toList());
        for (DorisWorkInstanceStepDetail detail : detailList) {
            setDetailCostTime(instanceStepDetailList, instance, detail);
            dorisWorkInstanceStepDetailService.updateById(detail);
        }

        saveOrUpdateMachineInfo(instance, currentStep.get());
        clearWorkFlowCache(cacheMete.getMachineNo());
        log.info("工况实例创建完成!!!");
    }

    private void clearWorkFlowCache(String machineNo) {
        //清除数据early_cache的值
        initDataRedisListener.removeEarlyMeteCode(machineNo);
        //清除缓存工况流程启动令的值,config表中start_mete
        initDataRedisListener.removeStartMeteCode(machineNo);
        //提前缓存步骤测点值
        initDataRedisListener.removeFlowStep(machineNo);
    }

    private DorisWorkFlowInstance convertInstance(KafkaMeteData kafkaMeteData, WorkConditionFlowDTO flow) {

        DorisWorkFlowInstance instance = new DorisWorkFlowInstance();
        instance.setStartTime(getStartTime(kafkaMeteData, flow));
        instance.setCurrWc(flow.getCurrWc());
        instance.setNextWc(flow.getNextWc());
        instance.setStartDate(new Timestamp(instance.getStartTime()));
        instance.setMachineId(flow.getMachineId());
        instance.setMachineNo(flow.getMachineNo());
        instance.setWfId(flow.getId());
        instance.setWfName(flow.getWfName());
        instance.setState(WorkFlowInstanceStateEnum.PROCESSING.getCode());
        instance.setMeteCode(flow.getMeteCode());
        instance.setMeteName(flow.getMeteName());
        instance.setMeteValue(kafkaMeteData.getValue());
        instance.setValidMeteValue(flow.getValidMeteValue());
        instance.setStartMete(flow.getStartMete());
        instance.setEndMete(flow.getEndMete());
        instance.setMeteTime(kafkaMeteData.getGenerateTime());
        instance.setUpdateTime(new Date());
        instance.setCreateTime(new Date());
        instance.setCreator(1L);


        //设置批次号和工况流程与工况关联关系
        setBatch(kafkaMeteData, flow, instance);
        return instance;
    }


    private Long getStartTime(KafkaMeteData kafkaMeteData, WorkConditionFlowDTO flow) {
//        List<DataMete> cacheDataMeteList = initDataRedisListener.getCatchStartMeteCode(kafkaMeteData.getMachineNo()).stream()
//                .filter(p -> p.getWfName().equalsIgnoreCase(flow.getWfName())).collect(Collectors.toList());
//        if (!CollectionUtil.isEmpty(cacheDataMeteList)) {
//            cacheDataMeteList.sort(Comparator.comparing(DataMete::getGenerateTime));
//            DataMete dataMete = cacheDataMeteList.get(0);
//            log.info("GET-CATCH work-instance setStartTime:{}", dataMete);
//            return dataMete.getGenerateTime();
//        }
        return kafkaMeteData.getGenerateTime();
    }

    /**
     * 1、先查找缓存 启动令时间
     * 2、查找关联的前一个工况流程的启动时间
     * 3、工况流程测点时间
     *
     * @param kafkaMeteData
     * @param flow
     * @return
     */
    private void setBatch(KafkaMeteData kafkaMeteData, WorkConditionFlowDTO flow, DorisWorkFlowInstance instance) {
//        List<DataMete> cacheDataMeteList = initDataRedisListener.getCatchStartMeteCode(kafkaMeteData.getMachineNo())
//                .stream().filter(p -> p.getWfName().equalsIgnoreCase(flow.getWfName())).collect(Collectors.toList());
//        if (!CollectionUtil.isEmpty(cacheDataMeteList)) {
//            cacheDataMeteList.sort(Comparator.comparing(DataMete::getGenerateTime));
//            DataMete dataMete = cacheDataMeteList.get(0);
//            instance.setBatch(DateUtil.date(dataMete.getGenerateTime()).toString());
//            log.info("GET-CATCH work-instance setBatch:{},instance:{}", dataMete, instance);
//            return;
//        }
//        LambdaQueryWrapper<DorisWorkFlowInstance> wrapper = new LambdaQueryWrapper<>();
//        wrapper.eq(DorisWorkFlowInstance::getMachineNo, kafkaMeteData.getMachineNo()).eq(DorisWorkFlowInstance::getNextWc, flow.getCurrWc()).orderByDesc(DorisWorkFlowInstance::getId);
//        wrapper.last(" limit 1");
//        DorisWorkFlowInstance preInstance = dorisWorkFlowInstanceService.getOne(wrapper);
//        if (null != preInstance) {
//            instance.setBatch(preInstance.getBatch());
//            if (null != preInstance.getWorkId()) {
//                instance.setWorkId(preInstance.getWorkId());
//            } else {
//                instance.setWorkId(preInstance.getId());
//            }
//            log.info("GET-PRE-INSTANCE work-instance setBatch:{}", instance);
//            return;
//        }
        instance.setBatch(DateUtil.date(kafkaMeteData.getGenerateTime()).toString(DatePattern.NORM_DATETIME_PATTERN));
        instance.setStartDate(DateUtil.date(kafkaMeteData.getGenerateTime()).toTimestamp());
    }


    private DorisWorkInstanceStep convertInstanceStep(DorisWorkFlowInstance instance, List<DataMete> catchStepList, AtomicReference<DorisWorkInstanceStep> currentStep, WorkConditionStepDTO conditionStep) {
        DorisWorkInstanceStep step = new DorisWorkInstanceStep();
        step.setWfInstanceId(instance.getId());
        step.setWfId(conditionStep.getWfId());
        step.setWfName(conditionStep.getWfName());
        step.setStepId(conditionStep.getId());
        step.setStepName(conditionStep.getStepName());
        step.setMeteCode(conditionStep.getMeteCode());
        step.setMeteName(conditionStep.getMeteName());
        step.setType(conditionStep.getType());
        step.setDefaultDelay(conditionStep.getDefaultDelay());
        step.setSort(conditionStep.getSort());
        step.setValidMeteValue(conditionStep.getValidMeteValue());
        step.setUpdateTime(new Date());
        step.setCreator(1L);
        Optional<DataMete> stepOptional = catchStepList.stream().filter(x -> x.getMeteCode().equalsIgnoreCase(conditionStep.getMeteCode())).findFirst();
        if (stepOptional.isPresent()) {
            DataMete cache = stepOptional.get();
            if (Math.abs((instance.getMeteTime() - cache.getGenerateTime())) / 1000 / 60 <= 5) {
                step.setMeteValue(cache.getValue());
                step.setStartTime(cache.getGenerateTime());
                step.setState(WorkInstanceStepStateEnum.PROCESSING.getCode());
                step.setStartDate(new Timestamp(step.getStartTime()));
                currentStep.set(step);
            }
        }
        return step;
    }

    private List<DorisWorkInstanceStepDetail> convertInstanceStepDetailList(DorisWorkFlowInstance instance, Map<String, DataMete> map, DorisWorkInstanceStep
            step, List<WorkConditionStepDetailDTO> detailList) {
        List<DataMete> catchStep0MeteCodeList = initDataRedisListener.getCacheEarlyMeteCode(instance.getMachineNo()).stream()
                .filter(p -> p.getWfName().equalsIgnoreCase(step.getWfName())).collect(Collectors.toList());
        List<DorisWorkInstanceStepDetail> instanceStepDetailList = detailList.stream().map(d -> {
            DorisWorkInstanceStepDetail detail = new DorisWorkInstanceStepDetail();
            detail.setState(WorkInstanceStepDetailStateEnum.NOT_STARTED.getCode());
            if (map.get(d.getMeteCode()) != null) {
                if (WorkInstanceStepDetailUpdatePolicyEnum.ALL.getCode().equals(d.getUpdatePolicy())) {
                    detail.setMeteValue(String.valueOf(map.get(d.getMeteCode()).getValue()));
                    detail.setStartTime(map.get(d.getMeteCode()).getGenerateTime());
                    detail.setState(WorkInstanceStepDetailStateEnum.COMPLETE.getCode());
                    detail.setStartDate(new Timestamp(detail.getStartTime()));
                }
            } else {
                Optional<DataMete> optional = catchStep0MeteCodeList.stream().filter(x -> x.getMeteCode().equalsIgnoreCase(d.getMeteCode())).findFirst();
                if (optional.isPresent()) {
                    DataMete cache = optional.get();
                    //缓存数据和工况实例数据相差不大于于5分钟
                    if (Math.abs((instance.getMeteTime() - cache.getGenerateTime())) / 1000 / 60 <= 5) {
                        detail.setMeteValue(cache.getValue());
                        detail.setStartTime(cache.getGenerateTime());
                        detail.setState(WorkInstanceStepDetailStateEnum.COMPLETE.getCode());
                        detail.setStartDate(new Timestamp(detail.getStartTime()));
                        map.put(cache.getMeteCode(), cache);
                    }

                }
            }
            detail.setWfId(step.getWfId());
            detail.setDetailId(d.getId());
            detail.setWfInstanceId(step.getWfInstanceId());
            detail.setWfName(step.getWfName());
            detail.setStepInstanceId(step.getId());
            detail.setStepName(step.getStepName());
            detail.setMeteCode(d.getMeteCode());
            detail.setMeteName(d.getMeteName());
            detail.setSubDetailIds(d.getSubDetailIds());
            detail.setSubLogicalOperator(d.getSubLogicalOperator());
            detail.setPDetailId(d.getPDetailId());
            detail.setLeaf(d.getLeaf());
            detail.setType(d.getType());
            detail.setPMeteCode(d.getPMeteCode());
            detail.setLabel(d.getLabel());
            detail.setLabelType(d.getLabelType());
            detail.setUpdatePolicy(d.getUpdatePolicy());
            detail.setGroupNo(d.getGroupNo());
            detail.setGroupStepId(d.getGroupStepId());
            detail.setGroupState(WorkInstanceStepDetailGroupStateEnum.NOT_STARTED.getCode());
            detail.setSort(d.getSort());
            detail.setEarlyCache(d.getEarlyCache());
            detail.setValidMeteValue(d.getValidMeteValue());
            detail.setUpdateTime(new Date());
            return detail;
        }).collect(Collectors.toList());
        return instanceStepDetailList;
    }

    public synchronized void saveOrUpdateMachineInfo(DorisWorkFlowInstance instance, DorisWorkInstanceStep currentStep) {
        //修改机组实时数据表
        MachineInfo machineInfo = machineInfoService.getMachineInfo(instance.getMachineNo());
        machineInfo = machineInfo == null ? new MachineInfo() : machineInfo;
        machineInfo.setMachineNo(instance.getMachineNo());
        machineInfo.setCurrWf(instance.getWfName());
        machineInfo.setCurrStep(null != currentStep ? currentStep.getStepName() : StringUtils.isBlank(machineInfo.getCurrStep()) ? WorkInstanceStepEnum.STEP0.getName() : machineInfo.getCurrStep());
        if (WorkFlowInstanceStateEnum.SUCCESS.getCode().equals(instance.getState())) {
            machineInfo.setCurrWc(instance.getNextWc());
        } else if (WorkFlowInstanceStateEnum.ERROR.getCode().equals(instance.getState())) {
            machineInfo.setCurrWc(MachineAndWorkConditionConst.CONDITION_S);
        } else {
            machineInfo.setCurrWc(instance.getCurrWc());
        }
        machineInfo.setStartTime(instance.getStartTime());
        machineInfo.setUpdateTime(new Date());
        machineInfoService.saveOrUpdate(machineInfo);
    }


}
