package com.ikas.ai.server.kafka;//package com.ikas.ai.server.kafka;
//
//import cn.hutool.core.date.DatePattern;
//import cn.hutool.core.date.DateUtil;
//import cn.hutool.json.JSONUtil;
//import com.alibaba.fastjson.JSON;
//import com.alibaba.fastjson.JSONObject;
//import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
//import com.ikas.ai.consts.MachineAndWorkConditionConst;
//import com.ikas.ai.model.KafkaMeteData;
//import com.ikas.ai.server.module.condition.dto.WorkConditionFlowDTO;
//import com.ikas.ai.server.module.condition.dto.WorkConditionStepDTO;
//import com.ikas.ai.server.module.condition.dto.WorkConditionStepDetailDTO;
//import com.ikas.ai.server.module.condition.service.WorkConditionFlowService;
//import com.ikas.ai.server.module.data.enums.DataMeteTypeEnum;
//import com.ikas.ai.server.module.data.model.DataMete;
//import com.ikas.ai.server.module.instance.enums.*;
//import com.ikas.ai.server.module.instance.model.WorkFlowInstance;
//import com.ikas.ai.server.module.instance.model.WorkInstanceStep;
//import com.ikas.ai.server.module.instance.model.WorkInstanceStepDetail;
//import com.ikas.ai.server.module.instance.service.WorkFlowInstanceService;
//import com.ikas.ai.server.module.instance.service.WorkInstanceStepDetailService;
//import com.ikas.ai.server.module.instance.service.WorkInstanceStepService;
//import com.ikas.ai.server.module.machine.model.po.MachineInfo;
//import com.ikas.ai.server.module.machine.service.MachineInfoService;
//import com.ikas.ai.utils.CollectionUtil;
//import lombok.extern.slf4j.Slf4j;
//import org.apache.commons.lang3.StringUtils;
//import org.springframework.beans.factory.annotation.Autowired;
//import org.springframework.kafka.core.KafkaTemplate;
//import org.springframework.stereotype.Component;
//import org.springframework.transaction.annotation.Transactional;
//
//import java.sql.Timestamp;
//import java.util.*;
//import java.util.concurrent.atomic.AtomicReference;
//import java.util.stream.Collectors;
//
///**
// * @author hu.bowei@ikasinfo.com mysql对应service，需要修改名字并替换使用方的类名
// * @date 2023-05-16- 11:12
// */
//@Component
//@Slf4j
//public class MysqlWorkInstanceService {
//    @Autowired
//    private RedisDataOperation initDataRedisListener;
//    @Autowired
//    private WorkFlowInstanceService workFlowInstanceService;
//    @Autowired
//    private WorkInstanceStepDetailService workInstanceStepDetailService;
//    @Autowired
//    private WorkInstanceStepService workInstanceStepService;
//    @Autowired
//    private WorkConditionFlowService workConditionFlowService;
//    @Autowired
//    private MachineInfoService machineInfoService;
//
//    @Autowired
//    KafkaTemplate<String, Object> kafkaTemplate;
//
//
//    @Transactional(rollbackFor = Exception.class)
//    public void processKafkaMeteData(KafkaMeteData kafkaMeteData) {
//        //1个测点出现在多个工况流程或者多个步骤中
//        List<DataMete> cacheDataMeteList = initDataRedisListener.getCacheList(kafkaMeteData.getMachineNo(), kafkaMeteData.getMeteCode());
//        log.info("meteCode:{},cacheDataMeteList:{}", kafkaMeteData.getMeteCode(), JSONObject.toJSON(cacheDataMeteList));
//        if (CollectionUtil.isEmpty(cacheDataMeteList)) {
//            log.info("meteCode:{},cacheDataMeteList: null,缓存中未获取到t_data_mete中数据", kafkaMeteData.getMeteCode());
//            return;
//        }
////        Map<String, List<DataMete>> cacheWfDataMeteMap = cacheDataMeteList.stream().collect(Collectors.groupingBy(DataMete::getWfName));
//        Map<String, DataMete> cacheWfDataMeteMap = cacheDataMeteList.stream().collect(Collectors.toMap(DataMete::getWfName, t -> t, (entity1, entity2) -> entity1));
//        log.info("meteCode:{},cacheWfDataMeteMap:{}", kafkaMeteData.getMeteCode(), JSONObject.toJSON(cacheWfDataMeteMap));
//        if (CollectionUtil.isEmpty(cacheWfDataMeteMap)) {
//            log.info("meteCode:{},cacheWfDataMeteMap: null,缓存中未获取到t_data_mete中数据", kafkaMeteData.getMeteCode());
//            return;
//        }
//        List<WorkFlowInstance> instanceList = workFlowInstanceService.getProcessingList(kafkaMeteData.getMachineNo());
//        cacheWfDataMeteMap.forEach((wfname, cacheMete) -> {
//            if (cacheMete.getType() == DataMeteTypeEnum.FLOW.getCode()) {
//                //同时存在两个不同工况流程情况 tb60和tb62
//                if (cacheMete.getValidMeteValue().equals(kafkaMeteData.getValue())) {
//                    if (instanceList.stream().filter(p -> p.getWfName().equalsIgnoreCase(wfname)).count() > 0L) {
//                        log.error("{}已存在运行中工况", wfname);
//                    } else {
//                        createFlow(kafkaMeteData, cacheMete);
//                    }
//                } else {
//                    instanceList.stream().filter(instance -> {
//                        return instance.getMeteCode().equalsIgnoreCase(cacheMete.getMeteCode()) && WorkFlowInstanceStateEnum.PROCESSING.getCode().equals(instance.getState());
//                    }).forEach(instance -> {
//                        instance.setState(WorkFlowInstanceStateEnum.SUCCESS.getCode());
//                        instance.setUpdateTime(new Date());
//                        instance.setEndTime(kafkaMeteData.getGenerateTime());
//                        instance.setEndDate(new Timestamp(instance.getEndTime()));
//                        instance.setCostTime(Math.abs(instance.getEndTime() - instance.getStartTime()));
//                        instance.setDesc(JSONUtil.toJsonStr(kafkaMeteData));
//                        workFlowInstanceService.updateById(instance);
//                        saveOrUpdateMachineInfo(instance, null);
//                    });
//                }
//            } else if (cacheMete.getType() == DataMeteTypeEnum.STEP.getCode()) {
//                if (!CollectionUtil.isEmpty(instanceList)) {
//                    instanceList.forEach(instance -> {
//                        processInstanceStep(kafkaMeteData, cacheMete, instance);
//                    });
//                }
//                if (cacheMete.getValidMeteValue().equals(kafkaMeteData.getValue())
//                        && instanceList.stream().filter(instance -> instance.getWfName().equalsIgnoreCase(wfname)).count() <= 0) {
//                    cacheMete.setValue(kafkaMeteData.getValue());
//                    cacheMete.setGenerateTime(kafkaMeteData.getGenerateTime());
//                    initDataRedisListener.cacheFlowStep(cacheMete);
//                }
//            } else {
//                if (cacheMete.getValidMeteValue().equals(kafkaMeteData.getValue())) {
//                    //保存配置为需要提前缓存在数据库中缓存中(创建流程给提前缓存的值赋值使用)
//                    if (WorkInstanceStepDetailEarlyCacheEnum.EARLY.getCode().equals(cacheMete.getEarlyCache())) {
//                        cacheMete.setValue(kafkaMeteData.getValue());
//                        cacheMete.setGenerateTime(kafkaMeteData.getGenerateTime());
//                        initDataRedisListener.catchEarlyMeteCode(cacheMete);
//                    }
//                    //保存流程启动令到缓存中(批次时间和启动时间使用)
//                    if (initDataRedisListener.existsWorkFlowStartMete(cacheMete.getMachineNo(), cacheMete.getWfName(), cacheMete.getMeteCode())) {
//                        cacheMete.setValue(kafkaMeteData.getValue());
//                        cacheMete.setGenerateTime(kafkaMeteData.getGenerateTime());
//                        initDataRedisListener.catchStartMeteCode(cacheMete);
//                    }
//                    if (!CollectionUtil.isEmpty(instanceList)) {
//                        //处理运行中工况流程数据
//                        if (instanceList.stream().filter(p -> p.getWfName().equalsIgnoreCase(wfname)).count() > 0) {
//                            processMeteDetail(kafkaMeteData, instanceList, cacheMete);
//                        }
//                    }
//                } else {
//                    log.info("忽略无效值测点======{}", kafkaMeteData);
//                }
//            }
//        });
//    }
//
//    private void processMeteDetail(KafkaMeteData kafkaMeteData, List<WorkFlowInstance> instanceList, DataMete cacheMete) {
//        //该运行中的工况是否包含该测点
//        for (WorkFlowInstance instance : instanceList) {
//            List<WorkInstanceStepDetail> instanceStepDetailList = workInstanceStepDetailService.getListByInstanceId(instance.getId());
//            if (CollectionUtil.isEmpty(instanceStepDetailList)) {
//                return;
//            }
//            //查询出该工况流程当前测点所有记录
//            List<WorkInstanceStepDetail> detailList = instanceStepDetailList.stream().
//                    filter(p -> cacheMete.getMeteCode().equalsIgnoreCase(p.getMeteCode())
//                            && WorkInstanceStepDetailStateEnum.NOT_STARTED.getCode().equals(p.getState())
//                            && kafkaMeteData.getValue().equalsIgnoreCase(p.getValidMeteValue().toString())
//                            && WorkInstanceStepDetailLabelTypeEnum.METE_CODE.getCode().equals(p.getLabelType())
//                    )
//                    .sorted(Comparator.comparing(WorkInstanceStepDetail::getSort)).collect(Collectors.toList());
//            //该测点在一个工况流程中出现多次
//            boolean flag = true;
//            for (WorkInstanceStepDetail detail : detailList) {
//                if (flag) {
//                    detail.setMeteValue(kafkaMeteData.getValue());
//                    detail.setStartTime(kafkaMeteData.getGenerateTime());
//                    detail.setStartDate(new Timestamp(detail.getStartTime()));
//                    detail.setState(WorkInstanceStepDetailStateEnum.COMPLETE.getCode());
//                    detail.setUpdateTime(new Date());
//                    setDetailCostTime(instanceStepDetailList, instance, detail);
//                    workInstanceStepDetailService.updateById(detail);
//                }
//                if (WorkInstanceStepDetailUpdatePolicyEnum.SORT_FIRST.getCode().equals(detail.getUpdatePolicy())) {
//                    log.info("结束当前工况流程测点操作");
//                    flag = false;
//                    break;
//                }
//                flag = true;
//            }
//            sendDataToKafkaWorkInstanceDetailCostTime(detailList);
//            //测点为工况流程稳态则修改稳态状态
////            if (instance.getEndMete().contains(kafkaMeteData.getMeteCode())) {
////                instance.setEndTime(kafkaMeteData.getGenerateTime());
////                instance.setEndDate(DateUtil.date(instance.getEndTime()));
////                instance.setCostTime(Math.abs(instance.getEndTime() - instance.getStartTime()));
////                instance.setState(WorkFlowInstanceStateEnum.STEADY.getCode());
////                workFlowInstanceService.updateById(instance);
////            }
//            log.info("工况详情测点执行完成!!!");
//        }
//    }
//
//    /**
//     * 推送消息至kafka以进行状态评价和劣化告警处理
//     */
//    private boolean sendDataToKafkaWorkInstanceDetailCostTime(List<WorkInstanceStepDetail> detailList) {
//        if (CollectionUtil.isEmpty(detailList)) {
//            return false;
//        }
//        detailList = detailList.stream()
//                .filter(detail -> detail.getCostTime() != null)
//                .filter(detail -> detail.getCostTime() > 0)
//                .collect(Collectors.toList());
//        kafkaTemplate.send(KafkaInitialConfiguration.WORK_INSTANCE_DETAIL_DELAY, JSON.toJSONString(detailList));
//        return true;
//    }
//
//
//    private void setDetailCostTime(List<WorkInstanceStepDetail> instanceStepDetailList, WorkFlowInstance instance, WorkInstanceStepDetail detail) {
//        Map<String, List<String>> pMeteCodeMap = detail.getPMeteCode();
//        if (CollectionUtil.isEmpty(pMeteCodeMap)) {
//            return;
//        }
//        List<WorkInstanceStep> stepList = workInstanceStepService.getListByInstanceId(instance.getId());
//        if (CollectionUtil.isEmpty(stepList)) {
//            return;
//        }
//        pMeteCodeMap.forEach((k, v) -> {
//            Optional<WorkInstanceStep> workInstanceStepOptional = stepList.stream().filter(s -> s.getStepName().equalsIgnoreCase(k)).findFirst();
//            if (!workInstanceStepOptional.isPresent()) {
//                return;
//            }
//            WorkInstanceStep workInstanceStep = workInstanceStepOptional.get();
//            List<WorkInstanceStepDetail> stepDetailList =
//                    instanceStepDetailList.stream().filter(i -> workInstanceStep.getId().equals(i.getStepInstanceId()))
//                            .filter(i -> v.contains(i.getMeteCode())).collect(Collectors.toList());
//            if (CollectionUtil.isEmpty(stepDetailList)) {
//                return;
//            }
//            stepDetailList.forEach(s -> {
//                if (WorkInstanceStepDetailStateEnum.COMPLETE.getCode().equals(s.getState()) && s.getStartTime() != null) {
//                    detail.setCostTime(Math.abs(detail.getStartTime() - s.getStartTime()));
//                }
//            });
//        });
//    }
//
//    /**
//     * 步骤有效值标识开始
//     * 步骤无效值标识结束
//     *
//     * @param kafkaMeteData
//     * @param mete
//     * @param instance
//     */
//    private void processInstanceStep(KafkaMeteData kafkaMeteData, DataMete mete, WorkFlowInstance instance) {
//        LambdaQueryWrapper<WorkInstanceStep> queryWrapper = new LambdaQueryWrapper<>();
//        queryWrapper.eq(WorkInstanceStep::getWfInstanceId, instance.getId());
//        queryWrapper.eq(WorkInstanceStep::getMeteCode, mete.getMeteCode());
//        WorkInstanceStep instanceStep = workInstanceStepService.getOne(queryWrapper);
//        if (null == instanceStep || WorkInstanceStepStateEnum.ERROR.getCode().equals(instanceStep.getState())) {
//            return;
//        }
//        //覆盖更新缓存中更新的值
//        if (instanceStep.getValidMeteValue().equalsIgnoreCase(kafkaMeteData.getValue())) {
//            instanceStep.setMeteValue(kafkaMeteData.getValue());
//            instanceStep.setStartTime(kafkaMeteData.getGenerateTime());
//            instanceStep.setStartDate(new Timestamp(instanceStep.getStartTime()));
//            instanceStep.setState(WorkInstanceStepStateEnum.PROCESSING.getCode());
//            instanceStep.setUpdateTime(new Date());
//            workInstanceStepService.updateById(instanceStep);
//        } else if (WorkInstanceStepStateEnum.PROCESSING.getCode().equals(instanceStep.getState())
//                && kafkaMeteData.getGenerateTime() >= instanceStep.getStartTime()) {
////            instanceStep.setMeteValue(kafkaMeteData.getValue());
//            instanceStep.setEndTime(kafkaMeteData.getGenerateTime());
//            instanceStep.setEndDate(new Timestamp(instanceStep.getEndTime()));
//            instanceStep.setState(WorkInstanceStepStateEnum.COMPLETE.getCode());
//            instanceStep.setDelay(kafkaMeteData.getGenerateTime() - instanceStep.getStartTime());
//            instanceStep.setUpdateTime(new Date());
//            boolean flag = workInstanceStepService.updateById(instanceStep);
//            if (flag) {
//                sendDataToKafkaForStepAvgCompute(instanceStep);
//            }
//        } else {
//            log.error("忽略步骤测点======{}", mete);
//        }
//        saveOrUpdateMachineInfo(instance, instanceStep);
//
////        //收到step0的无效值作为工况实例结束逻辑
////        if (!instanceStep.getValidMeteValue().equalsIgnoreCase(kafkaMeteData.getValue())
////                && instance.getState() == WorkFlowInstanceStateEnum.PROCESSING.getCode()
////                && WorkInstanceStepEnum.STEP0.getName().equalsIgnoreCase(mete.getStepName())) {
////            instance.setState(WorkFlowInstanceStateEnum.SUCCESS.getCode());
////            instance.setUpdateTime(new Date());
////            instance.setEndTime(kafkaMeteData.getGenerateTime());
////            instance.setEndDate(DateUtil.date(instance.getEndTime()));
////            instance.setCostTime(Math.abs(instance.getEndTime() - instance.getStartTime()));
////            instance.setDesc(JSONUtil.toJsonStr(kafkaMeteData));
////            workFlowInstanceService.updateById(instance);
////            saveOrUpdateMachineInfo(instance, null);
////        }
//
//
//    }
//
//    /**
//     * 将消息发往kafka以进行平均耗时计算
//     *
//     * @param instanceStep
//     */
//    private void sendDataToKafkaForStepAvgCompute(WorkInstanceStep instanceStep) {
//        if (instanceStep.getDelay() != null && instanceStep.getDelay() > 0) {
//            kafkaTemplate.send(KafkaInitialConfiguration.WORK_INSTANCE_STEP_DELAY, JSON.toJSONString(instanceStep));
//        }
//    }
//
//    /**
//     * @param kafkaMeteData
//     * @param cacheMete
//     */
//    public void createFlow(KafkaMeteData kafkaMeteData, DataMete cacheMete) {
//        //创建工况批次
//        WorkConditionFlowDTO flow = workConditionFlowService.findWorkConditionFlowConfiguration(cacheMete.getMachineNo(), cacheMete.getWfName());
//        List<WorkConditionStepDTO> stepList = flow.getSteps();
//        if (CollectionUtil.isEmpty(stepList)) {
//            log.error("未配置工况流程步骤");
//            return;
//        }
//
//        stepList.sort(Comparator.comparing(WorkConditionStepDTO::getSort));
//        WorkFlowInstance instance = convertInstance(kafkaMeteData, flow);
//        workFlowInstanceService.save(instance);
//        List<DataMete> catchStepList = initDataRedisListener.getCacheFlowStep(flow.getMachineNo(), flow.getWfName());
//
//        AtomicReference<WorkInstanceStep> currentStep = new AtomicReference<>();
//        Map<String, DataMete> map = new HashMap<>();
//        stepList.stream().forEach(conditionStep -> {
//            WorkInstanceStep step = convertInstanceStep(instance, catchStepList, currentStep, conditionStep);
//            workInstanceStepService.save(step);
//            List<WorkConditionStepDetailDTO> detailList = conditionStep.getDetails();
//            List<WorkInstanceStepDetail> instanceStepDetailList = convertInstanceStepDetailList(instance, map, step, detailList);
//            if (!CollectionUtil.isEmpty(instanceStepDetailList)) {
//                workInstanceStepDetailService.saveBatch(instanceStepDetailList);
//            }
//        });
//        //设置请求执行时长
//        List<WorkInstanceStepDetail> instanceStepDetailList = workInstanceStepDetailService.getListByInstanceId(instance.getId());
//        List<WorkInstanceStepDetail> detailList = instanceStepDetailList.stream()
//                .filter(p -> WorkInstanceStepDetailStateEnum.COMPLETE.getCode().equals(p.getState()))
//                .sorted(Comparator.comparing(WorkInstanceStepDetail::getSort)).collect(Collectors.toList());
//        for (WorkInstanceStepDetail detail : detailList) {
//            setDetailCostTime(instanceStepDetailList, instance, detail);
//            workInstanceStepDetailService.updateById(detail);
//        }
//
//        saveOrUpdateMachineInfo(instance, currentStep.get());
//        //清除数据early_cache的值
//        initDataRedisListener.removeEarlyMeteCode(cacheMete.getMachineNo());
//        //清除缓存工况流程启动令的值,config表中start_mete
//        initDataRedisListener.removeStartMeteCode(cacheMete.getMachineNo());
//        //提前缓存步骤测点值
//        initDataRedisListener.removeFlowStep(cacheMete.getMachineNo(), cacheMete.getWfName());
//        log.info("工况实例创建完成!!!");
//    }
//
//    private WorkFlowInstance convertInstance(KafkaMeteData kafkaMeteData, WorkConditionFlowDTO flow) {
//
//        WorkFlowInstance instance = new WorkFlowInstance();
//        instance.setStartTime(getStartTime(kafkaMeteData, flow));
//        instance.setCurrWc(flow.getCurrWc());
//        instance.setNextWc(flow.getNextWc());
//        instance.setStartDate(new Timestamp(instance.getStartTime()));
//        instance.setMachineId(flow.getMachineId());
//        instance.setMachineNo(flow.getMachineNo());
//        instance.setWfId(flow.getId());
//        instance.setWfName(flow.getWfName());
//        instance.setState(WorkFlowInstanceStateEnum.PROCESSING.getCode());
//        instance.setMeteCode(flow.getMeteCode());
//        instance.setMeteName(flow.getMeteName());
//        instance.setMeteValue(kafkaMeteData.getValue());
//        instance.setValidMeteValue(flow.getValidMeteValue());
//        instance.setStartMete(flow.getStartMete());
//        instance.setEndMete(flow.getEndMete());
//        instance.setMeteTime(kafkaMeteData.getGenerateTime());
//        instance.setUpdateTime(new Date());
//        instance.setCreateTime(new Date());
//        instance.setCreator(1L);
//
//
//        //设置批次号和工况流程与工况关联关系
//        setBatch(kafkaMeteData, flow, instance);
//        return instance;
//    }
//
//
//    private Long getStartTime(KafkaMeteData kafkaMeteData, WorkConditionFlowDTO flow) {
//        List<DataMete> cacheDataMeteList = initDataRedisListener.getCatchStartMeteCode(kafkaMeteData.getMachineNo()).stream()
//                .filter(p -> p.getWfName().equalsIgnoreCase(flow.getWfName())).collect(Collectors.toList());
//        if (!CollectionUtil.isEmpty(cacheDataMeteList)) {
//            cacheDataMeteList.sort(Comparator.comparing(DataMete::getGenerateTime));
//            DataMete dataMete = cacheDataMeteList.get(0);
//            log.info("GET-CATCH work-instance setStartTime:{}", dataMete);
//            return dataMete.getGenerateTime();
//        }
//        return kafkaMeteData.getGenerateTime();
//    }
//
//    /**
//     * 1、先查找缓存 启动令时间
//     * 2、查找关联的前一个工况流程的启动时间
//     * 3、工况流程测点时间
//     *
//     * @param kafkaMeteData
//     * @param flow
//     * @return
//     */
//    private void setBatch(KafkaMeteData kafkaMeteData, WorkConditionFlowDTO flow, WorkFlowInstance instance) {
////        List<DataMete> cacheDataMeteList = initDataRedisListener.getCatchStartMeteCode(kafkaMeteData.getMachineNo())
////                .stream().filter(p -> p.getWfName().equalsIgnoreCase(flow.getWfName())).collect(Collectors.toList());
////        if (!CollectionUtil.isEmpty(cacheDataMeteList)) {
////            cacheDataMeteList.sort(Comparator.comparing(DataMete::getGenerateTime));
////            DataMete dataMete = cacheDataMeteList.get(0);
////            instance.setBatch(DateUtil.date(dataMete.getGenerateTime()).toString());
////            log.info("GET-CATCH work-instance setBatch:{},instance:{}", dataMete, instance);
////            return;
////        }
////        LambdaQueryWrapper<WorkFlowInstance> wrapper = new LambdaQueryWrapper<>();
////        wrapper.eq(WorkFlowInstance::getMachineNo, kafkaMeteData.getMachineNo()).eq(WorkFlowInstance::getNextWc, flow.getCurrWc()).orderByDesc(WorkFlowInstance::getId);
////        wrapper.last(" limit 1");
////        WorkFlowInstance preInstance = workFlowInstanceService.getOne(wrapper);
////        if (null != preInstance) {
////            instance.setBatch(preInstance.getBatch());
////            if (null != preInstance.getWorkId()) {
////                instance.setWorkId(preInstance.getWorkId());
////            } else {
////                instance.setWorkId(preInstance.getId());
////            }
////            log.info("GET-PRE-INSTANCE work-instance setBatch:{}", instance);
////            return;
////        }
//        instance.setBatch(DateUtil.date(kafkaMeteData.getGenerateTime()).toString(DatePattern.NORM_DATETIME_PATTERN));
//    }
//
//
//    private WorkInstanceStep convertInstanceStep(WorkFlowInstance instance, List<DataMete> catchStepList, AtomicReference<WorkInstanceStep> currentStep, WorkConditionStepDTO conditionStep) {
//        WorkInstanceStep step = new WorkInstanceStep();
//        //todo id生成
//        step.setWfInstanceId(instance.getId());
//        step.setWfId(conditionStep.getWfId());
//        step.setWfName(conditionStep.getWfName());
//        step.setStepId(conditionStep.getId());
//        step.setStepName(conditionStep.getStepName());
//        step.setMeteCode(conditionStep.getMeteCode());
//        step.setMeteName(conditionStep.getMeteName());
//        step.setType(conditionStep.getType());
//        step.setDefaultDelay(conditionStep.getDefaultDelay());
//        step.setSort(conditionStep.getSort());
//        step.setValidMeteValue(conditionStep.getValidMeteValue());
//        step.setUpdateTime(new Date());
//        step.setCreator(1L);
//        Optional<DataMete> stepOptional = catchStepList.stream().filter(x -> x.getMeteCode().equalsIgnoreCase(conditionStep.getMeteCode())).findFirst();
//        if (stepOptional.isPresent()) {
//            DataMete cache = stepOptional.get();
//            if (Math.abs((instance.getMeteTime() - cache.getGenerateTime())) / 1000 / 60 <= 5) {
//                step.setMeteValue(cache.getValue());
//                step.setStartTime(cache.getGenerateTime());
//                step.setState(WorkInstanceStepStateEnum.PROCESSING.getCode());
//                step.setStartDate(new Timestamp(step.getStartTime()));
//                currentStep.set(step);
//            }
//        }
//        return step;
//    }
//
//    private List<WorkInstanceStepDetail> convertInstanceStepDetailList(WorkFlowInstance instance, Map<String, DataMete> map, WorkInstanceStep
//            step, List<WorkConditionStepDetailDTO> detailList) {
//        List<DataMete> catchStep0MeteCodeList = initDataRedisListener.getCacheEarlyMeteCode(instance.getMachineNo()).stream()
//                .filter(p -> p.getWfName().equalsIgnoreCase(step.getWfName())).collect(Collectors.toList());
//        List<WorkInstanceStepDetail> instanceStepDetailList = detailList.stream().map(d -> {
//            WorkInstanceStepDetail detail = new WorkInstanceStepDetail();
//            detail.setState(WorkInstanceStepDetailStateEnum.NOT_STARTED.getCode());
//            if (map.get(d.getMeteCode()) != null) {
//                if (WorkInstanceStepDetailUpdatePolicyEnum.ALL.getCode().equals(d.getUpdatePolicy())) {
//                    detail.setMeteValue(String.valueOf(map.get(d.getMeteCode()).getValue()));
//                    detail.setStartTime(map.get(d.getMeteCode()).getGenerateTime());
//                    detail.setState(WorkInstanceStepDetailStateEnum.COMPLETE.getCode());
//                    detail.setStartDate(new Timestamp(detail.getStartTime()));
//                }
//            } else {
//                Optional<DataMete> optional = catchStep0MeteCodeList.stream().filter(x -> x.getMeteCode().equalsIgnoreCase(d.getMeteCode())).findFirst();
//                if (optional.isPresent()) {
//                    DataMete cache = optional.get();
//                    //缓存数据和工况实例数据相差不大于于5分钟
//                    if (Math.abs((instance.getMeteTime() - cache.getGenerateTime())) / 1000 / 60 <= 5) {
//                        detail.setMeteValue(cache.getValue());
//                        detail.setStartTime(cache.getGenerateTime());
//                        detail.setState(WorkInstanceStepDetailStateEnum.COMPLETE.getCode());
//                        detail.setStartDate(new Timestamp(detail.getStartTime()));
//                        map.put(cache.getMeteCode(), cache);
//                    }
//
//                }
//            }
//            detail.setWfId(step.getWfId());
//            detail.setDetailId(d.getId());
//            detail.setWfInstanceId(step.getWfInstanceId());
//            detail.setWfName(step.getWfName());
//            detail.setStepInstanceId(step.getId());
//            detail.setStepName(step.getStepName());
//            detail.setMeteCode(d.getMeteCode());
//            detail.setMeteName(d.getMeteName());
//            detail.setSubDetailIds(d.getSubDetailIds());
//            detail.setSubLogicalOperator(d.getSubLogicalOperator());
//            detail.setPDetailId(d.getPDetailId());
//            detail.setLeaf(d.getLeaf());
//            detail.setType(d.getType());
//            detail.setPMeteCode(d.getPMeteCode());
//            detail.setLabel(d.getLabel());
//            detail.setLabelType(d.getLabelType());
//            detail.setUpdatePolicy(d.getUpdatePolicy());
//            detail.setGroupNo(d.getGroupNo());
//            detail.setGroupStepId(d.getGroupStepId());
//            detail.setGroupState(WorkInstanceStepDetailGroupStateEnum.NOT_STARTED.getCode());
//            detail.setSort(d.getSort());
//            detail.setEarlyCache(d.getEarlyCache());
//            detail.setValidMeteValue(d.getValidMeteValue());
//            detail.setUpdateTime(new Date());
//            return detail;
//        }).collect(Collectors.toList());
//        return instanceStepDetailList;
//    }
//
//    public synchronized void saveOrUpdateMachineInfo(WorkFlowInstance instance, WorkInstanceStep currentStep) {
//        //修改机组实时数据表
//        MachineInfo machineInfo = machineInfoService.getMachineInfo(instance.getMachineNo());
//        machineInfo = machineInfo == null ? new MachineInfo() : machineInfo;
//        machineInfo.setMachineNo(instance.getMachineNo());
//        machineInfo.setCurrWf(instance.getWfName());
//        machineInfo.setCurrStep(null != currentStep ? currentStep.getStepName() : StringUtils.isBlank(machineInfo.getCurrStep()) ? WorkInstanceStepEnum.STEP0.getName() : machineInfo.getCurrStep());
//        if (WorkFlowInstanceStateEnum.SUCCESS.getCode().equals(instance.getState())) {
//            machineInfo.setCurrWc(instance.getNextWc());
//        } else if (WorkFlowInstanceStateEnum.ERROR.getCode().equals(instance.getState())) {
//            machineInfo.setCurrWc(MachineAndWorkConditionConst.CONDITION_S);
//        } else {
//            machineInfo.setCurrWc(instance.getCurrWc());
//        }
//        machineInfo.setStartTime(instance.getStartTime());
//        machineInfo.setUpdateTime(new Date());
//        machineInfoService.saveOrUpdate(machineInfo);
//    }
//
//
//}
