package com.ikas.ai.server.module.condition.service;

import cn.hutool.core.date.DateTime;
import cn.hutool.core.date.DateUtil;
import com.alibaba.fastjson.JSON;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.conditions.query.LambdaQueryChainWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.ikas.ai.consts.TableDictConsts;
import com.ikas.ai.enums.redis.RedisKeyEnum;
import com.ikas.ai.server.module.condition.dao.WorkFlowInstanceResultDao;
import com.ikas.ai.server.module.condition.model.WorkFlowInstanceResult;
import com.ikas.ai.server.module.data.model.BaseMete;
import com.ikas.ai.server.module.data.model.tdengine.RealDataHis;
import com.ikas.ai.server.module.data.tdDao.DataHisTdMapper;
import com.ikas.ai.server.module.instance.dorisDao.DorisWorkFlowInstanceDao;
import com.ikas.ai.server.module.instance.enums.WorkFlowInstanceStateEnum;
import com.ikas.ai.server.module.instance.model.DorisWorkFlowInstance;
import com.ikas.ai.server.module.instance.service.DorisWorkFlowInstanceService;
import com.ikas.ai.server.module.morningpaper.dao.MorningPaperConstsDao;
import com.ikas.ai.server.module.morningpaper.model.po.MorningPaperConsts;
import com.ikas.ai.utils.RedisUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.math.BigDecimal;
import java.time.Duration;
import java.time.Instant;
import java.util.*;
import java.util.stream.Collectors;

/**
 * @Description: 工况流程Service
 * @Author: qi.yilai
 * @Date: 2023/4/13 10:04
 * @Version: 1.0.0
 **/
@Service
@Slf4j
public class WorkFlowInstanceResultService extends ServiceImpl<WorkFlowInstanceResultDao, WorkFlowInstanceResult> {

    @Autowired
    private DorisWorkFlowInstanceDao dorisWorkFlowInstanceDao;
    @Autowired
    private DorisWorkFlowInstanceService dorisWorkFlowInstanceService;

    @Autowired
    private RedisUtil redisUtil;

    @Autowired
    private DataHisTdMapper dataHisTdMapper;

    @Autowired
    private MorningPaperConstsDao baseMeteConstsDao;

    private static final BigDecimal DIVIDE_FACTOR = new BigDecimal(1000 * 60 * 60);

    //默认12小时
    @Value("${workFlowInstance.schedule.state.time:43200000}")
    private Long millSeconds;

    /**
     * 定时任务从工况流程实例中拉取数据到结果表
     */
    @Transactional(rollbackFor = Exception.class)
    public void syncWorkFlowInstanceResult() {
        LambdaQueryChainWrapper<MorningPaperConsts> baseMeteConstsLambdaQueryChainWrapper = new LambdaQueryChainWrapper<>(baseMeteConstsDao);
        List<MorningPaperConsts> list = baseMeteConstsLambdaQueryChainWrapper.eq(MorningPaperConsts::getType, 0).list();
        Map<MorningPaperConsts, WorkFlowInstanceResult> collect = list.stream().collect(
                Collectors.toMap(
                        machineCondition -> machineCondition,
                        machineCondition -> {
                            Integer validMeteValue = getValidMeteValue(machineCondition.getMeteId());
                            List<RealDataHis> meteHis = getMeteHis(machineCondition.getMeteId());
                            return fillWorkInstanceResult(
                                    machineCondition.getMachineNo(),
                                    machineCondition.getWcName(),
                                    machineCondition.getMeteCode(),
                                    getStartDateTime().toSqlDate(),
                                    getConditionCount(validMeteValue, meteHis),
                                    getConditionHours(validMeteValue, meteHis)
                            );
                        }
                )
        );
        Map<String, MorningPaperConsts> constsMap = list.stream().collect(Collectors.toMap(MorningPaperConsts::getMeteCode, f -> f));
        collect.putAll(
                list(
                        new LambdaQueryWrapper<WorkFlowInstanceResult>()
                                .eq(WorkFlowInstanceResult::getStatisticalDate, getStartDateTime().toSqlDate())
                ).stream().collect(
                        Collectors.toMap(
                                wir -> constsMap.get(wir.getMeteCode()),
                                wir -> wir
                        )
                )
        );
        saveOrUpdateBatch(collect.values());
    }

    public WorkFlowInstanceResult fillWorkInstanceResult(String machineNo, String wcName, String meteCode, Date date, Integer successSum, Double ranHours) {
        WorkFlowInstanceResult workConditionResult = new WorkFlowInstanceResult();
        workConditionResult.setMachineNo(machineNo);
        workConditionResult.setWcName(wcName);
        workConditionResult.setMeteCode(meteCode);
        workConditionResult.setStatisticalDate(date);
        workConditionResult.setSuccessNum(successSum);
        workConditionResult.setRanHours(ranHours);
        workConditionResult.setCreator(1L);
        workConditionResult.setUpdater(1L);
        workConditionResult.setCreateTime(new Date());
        workConditionResult.setUpdateTime(new Date());
        return workConditionResult;
    }

    private DateTime getEndDateTime() {
        Calendar calendar = Calendar.getInstance();
        calendar.setTime(new Date());
        calendar.add(Calendar.DATE, -1);
        Date date = calendar.getTime();
        return DateUtil.endOfDay(date);
    }

    private DateTime getStartDateTime() {
        Calendar calendar = Calendar.getInstance();
        calendar.setTime(new Date());
        calendar.add(Calendar.DATE, -1);
        Date date = calendar.getTime();
        return DateUtil.beginOfDay(date);
    }

    private Integer getValidMeteValue(Long meteId) {
        return redisUtil.getBean(RedisKeyEnum.BASE_METE_ENTITY_YX.key(meteId), BaseMete.class).getValidMeteValue();
    }

    public List<RealDataHis> getMeteHis(Long meteId) {
        try {
            return dataHisTdMapper.selectByTableNameTimeAsc(TableDictConsts.DATA_HIS_PREFIX_YX + meteId, getStartDateTime(), getEndDateTime());
        } catch (Exception e) {
            log.info(String.format("meteId：%s在TdEngine中没有找到对应的表或者表内没有数据", meteId));
            return new ArrayList<>();
        }
    }

    private Double getConditionHours(Integer validMeteValue, List<RealDataHis> meteHis) {
        Long sumTime = 0L;
        Instant startTimeFlag = null;
        for (RealDataHis data : meteHis) {
            if (validMeteValue.equals(Integer.valueOf(data.getValue()))) {
                startTimeFlag = data.getIdTime().toInstant();
            } else if (startTimeFlag != null && data.getIdTime() != null) {
                Instant endTimeFlag = data.getIdTime().toInstant();
                long millis = Duration.between(startTimeFlag, endTimeFlag).toMillis();
                sumTime += millis;
                startTimeFlag = null;
            }
        }
        BigDecimal hoursBigDecimal = new BigDecimal(sumTime).divide(DIVIDE_FACTOR, 2, BigDecimal.ROUND_HALF_UP);
        return hoursBigDecimal.doubleValue();
    }

    protected Integer getConditionCount(Integer validMeteValue, List<RealDataHis> meteHis) {
        return Long.valueOf(
                        meteHis.stream()
                                .filter(mete -> validMeteValue.equals(Integer.valueOf(mete.getValue())))
                                .count()
                )
                .intValue();
    }

    /**
     * 驾驶舱首页统计工况启动失败表
     */
    public void initWorkFlowInstanceResult() {
        LambdaQueryChainWrapper<DorisWorkFlowInstance> instanceWrapper = new LambdaQueryChainWrapper<>(dorisWorkFlowInstanceDao);
        //流程实例集合
        List<DorisWorkFlowInstance> list = instanceWrapper.list();
        //构建WorkFlowInstanceResult
        List<WorkFlowInstanceResult> workConditionResultList = list.stream().map(workFlowInstance -> {
            WorkFlowInstanceResult workConditionResult = new WorkFlowInstanceResult();
            workConditionResult.setMachineNo(workFlowInstance.getMachineNo());
            workConditionResult.setWcName(workFlowInstance.getNextWc());
            workConditionResult.setCreator(1L);
            workConditionResult.setUpdater(1L);
            workConditionResult.setCreateTime(new Date());
            workConditionResult.setUpdateTime(new Date());
            return workConditionResult;
        }).collect(Collectors.toList());
        this.saveBatch(workConditionResultList);
    }

    public void updateWorkFlowInstance() {
        LambdaQueryChainWrapper<DorisWorkFlowInstance> wrapper = new LambdaQueryChainWrapper<>(dorisWorkFlowInstanceDao);
        List<DorisWorkFlowInstance> list = wrapper.eq(DorisWorkFlowInstance::getState, WorkFlowInstanceStateEnum.PROCESSING.getCode())
                .list();
        List<DorisWorkFlowInstance> dorisWorkFlowInstanceList = new ArrayList<>();
        list.forEach(i -> {
            //将进行中的工况进行判断
            if (DateTime.now().getTime() - i.getStartTime() > millSeconds) {
                //如果大于十个小时，则直接更新这条数据的状态为2，desc="超过10小时(可配置)，未结束，自动修改为状态为已完成"。updateTime=now; endTime=now()
                i.setState(WorkFlowInstanceStateEnum.SUCCESS.getCode());
                i.setDesc(String.format("超过%s毫秒，未结束，自动修改为状态为已完成", millSeconds));
                i.setUpdateTime(new Date());
                i.setEndTime(DateTime.now().getTime());
                i.setEndDate(DateTime.now().toTimestamp());
                dorisWorkFlowInstanceList.add(i);
            }
        });
        log.info("需要将状态从0改为2的工况流程实例集合:{}", JSON.toJSONString(dorisWorkFlowInstanceList));
        dorisWorkFlowInstanceService.updateBatchById(dorisWorkFlowInstanceList);
    }
}
