package cn.piesat.scanning.business.common.algorithm.processAlgorithm;

import com.alibaba.fastjson.JSON;
import cn.piesat.scanning.business.dq1043.flow.FlowMessageParam;
import cn.piesat.scanning.dto.*;
import cn.piesat.scanning.service.*;
import cn.piesat.scanning.utils.RedisUtil;
import cn.piesat.scanning.utils.TaskRecordUtils;
import cn.piesat.scanning.vo.flow.FlowVO;
import cn.piesat.scanning.vo.flow.LineVO;
import cn.piesat.scanning.vo.flow.NodeVO;
import cn.piesat.scanning.vo.flow.RecordVO;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.transaction.annotation.Transactional;
import redis.clients.jedis.Jedis;

import java.util.*;
import java.util.stream.Collectors;

/**
 * @author xhl
 */
public abstract class FlowTaskAbstract {

    @Autowired
    private HtScheduleJobService htScheduleJobService;
    @Autowired
    private DbDmsAlgorithmFlowManageService dbDmsAlgorithmFlowManageService;
    @Autowired
    private FlowAlgorithmParamsService flowAlgorithmParamsService;
    @Autowired
    private DbDmsSchedulerTaskFileRecordService dbDmsSchedulerTaskFileRecordService;
    @Autowired
    private HtAlgorithmConfigurationService htAlgorithmConfigurationService;
    @Autowired
    private DbDmsSchedulerTaskRecordLogService dbDmsSchedulerTaskRecordLogService;
    @Autowired
    private DbDmsSchedulerTaskFileRecordFlowService dbDmsSchedulerTaskFileRecordFlowService;
    @Autowired
    private KafkaTemplate<String,String> kafkaTemplate;
    /**
     * 开始节点的名称
     */
    private static final String START_NODE_ID = "nodeA";

    @Value("${spring.kafka.producer.dms_task_exec_state.topic}")
    private String taskExecState;
    @Value("${spring.kafka.producer.dmstaskexec.topic}")
    private String execTopic;

    /**
     * 查找源数据信息，解析参数，组装数据
     * @param algorithmParams 同一级别所有算法的参数信息集合
     * @param taskDTO 任务信息对象
     */
    public abstract void parseRuleAndFindData(List<FlowAlgorithmParam> algorithmParams, HtScheduleJobDTO taskDTO);

    /**
     * 根据传入的数据信息，对当前这组信息解析参数，组装数据
     * @param algorithmParams 同一级别所有算法的参数信息集合
     * @param taskDTO 任务信息对象
     * @param data 自定义所需要的数据信息
     */
    public abstract void parseRuleAndFindData(List<FlowAlgorithmParam> algorithmParams, HtScheduleJobDTO taskDTO, List<String> data);

    /**
     * 算法程序返回重做状态时，在扫描工程回执执行时调用该方法，传入当前需要重新执行的主要文件和当前算法、流程的信息
     * @param algorithmId
     * @param taskDTO
     * @param metadataDTO
     */
    public abstract void singleFileRedo(String algorithmId, HtScheduleJobDTO taskDTO, HtScanningMetadataDTO metadataDTO);

    /**
     * 入口方法
     * @param taskId 任务ID
     */
    public void process(String taskId) {
        HtScheduleJobDTO taskDTO = htScheduleJobService.findById(taskId);
        List<FlowAlgorithmParam> sameLevelAlgorithmParamList = findStartAlgorithmParameter(taskDTO);
        parseRuleAndFindData(sameLevelAlgorithmParamList,taskDTO);
    }

    /**
     * 查找流程中开始节点下一级的所有算法参数信息
     * @param taskDTO 任务信息对象
     * @return
     */
    private List<FlowAlgorithmParam> findStartAlgorithmParameter(HtScheduleJobDTO taskDTO) {
        //解析流程模板，找到第一个节点下的所有节点算法参数信息，组装为集合对象
        String flowId = taskDTO.getFlowId();
        DbDmsAlgorithmFlowManageDTO flowManage = dbDmsAlgorithmFlowManageService.getById(flowId);
        if (flowManage == null || StringUtils.isBlank(flowManage.getFlowContent())) {
            return new ArrayList<>();
        }
        FlowVO flowVO = JSON.parseObject(flowManage.getFlowContent(), FlowVO.class);
        List<LineVO> lineList = flowVO.getLineList();
        List<NodeVO> nodeList = flowVO.getNodeList();
        Map<String, String> nodeHash = nodeList.stream()
                .filter(nodeVO -> StringUtils.isNotBlank(nodeVO.getNodeId()))
                .collect(Collectors.toMap(NodeVO::getId, NodeVO::getNodeId, (k1, k2) -> k2));
        //同等级所有算法的参数信息集合
        return lineList.stream()
                .filter(line -> START_NODE_ID.equals(line.getFrom()))
                .map(line -> {
                    FlowAlgorithmParam param = new FlowAlgorithmParam();
                    String algorithmId = nodeHash.get(line.getTo());
                    param.setAlgorithmId(algorithmId);
                    Map<String, Object> inputParam = flowAlgorithmParamsService.findDynamicParams(flowId, algorithmId);
                    param.setFlowInputParam(inputParam);
                    Map<String, Object> ruleParam = flowAlgorithmParamsService.findFixedParams(flowId, algorithmId);
                    param.setFlowRuleParam(ruleParam);
                    return param;
                }).collect(Collectors.toList());
    }

    /**
     * 通过任务记录重做单个流程记录的准备方法，清空所有的旧记录相关信息，查找到开始节点下一级所有算法参数信息，开始执行当前记录的文件
     * @param recordId 记录ID
     */
    @Transactional(rollbackFor = Exception.class)
    public void redo(String recordId) {
        DbDmsSchedulerTaskFileRecordDTO record = dbDmsSchedulerTaskFileRecordService.findById(recordId);
        if (record.getState() < 1) {
            //无法重做整个流程处于待执行或者执行中的情况
            return ;
        }
        //重新设置记录表中的信息
        record.setState(0);
        record.setStartTime(null);
        record.setEndTime(null);
        record.setCreateTime(new Date());
        dbDmsSchedulerTaskFileRecordService.save(record);
        //清空整个流程中失败的文件数量,遍历流程每个节点，记录失败数量
        DbDmsSchedulerTaskFileRecordFlowDTO recordFlowDTO = dbDmsSchedulerTaskFileRecordFlowService.findByRid(record.getId());
        if (recordFlowDTO != null) {
            String flowInfo = recordFlowDTO.getFlowInfo();
            int failCount = 0;
            FlowVO flowVO = JSON.parseObject(flowInfo, FlowVO.class);
            List<NodeVO> nodeList = flowVO.getNodeList();
            for (NodeVO node : nodeList) {
                if (node.getChildren() != null) {
                    List<RecordVO> childrenList = node.getChildren();
                    for (RecordVO recordVO : childrenList) {
                        if (recordVO.getState() == 3) {
                            failCount++;
                        }
                    }
                }
            }
            String hkey = TaskRecordUtils.getHkey(record.getTaskId());
            TaskRecordUtils.increaseFailCount(hkey, failCount);
        }
        //删除日志和流程相关信息
        dbDmsSchedulerTaskRecordLogService.deleteByRid(record.getId());
        dbDmsSchedulerTaskFileRecordFlowService.deleteByRid(record.getId());
        Jedis jedis = RedisUtil.getJedis();
        try {
            if (jedis != null) {
                String viewNumber = record.getFileName();
                jedis.del(String.format("flow:%s:%s",record.getTaskId(),viewNumber));
                String recordCountKeys = String.format("recordCount:%s:%s:*",record.getTaskId(),viewNumber);
                Set<String> keys = jedis.keys(recordCountKeys);
                for (String key : keys) {
                    jedis.del(key);
                }
                jedis.del(String.format("node:%s:%s",record.getTaskId(),viewNumber));
            }
        } finally {
            RedisUtil.close(jedis);
        }
        //查找第一个节点的信息
        HtScheduleJobDTO taskDTO = htScheduleJobService.findById(record.getId());
        List<FlowAlgorithmParam> sameLevelAlgorithmParamList = findStartAlgorithmParameter(taskDTO);
        //TODO 如何把当前重做信息传入进去 viewNumber
        List<String> viewNumberList = new ArrayList<>();
        viewNumberList.add(record.getFileName());
        parseRuleAndFindData(sameLevelAlgorithmParamList,taskDTO,viewNumberList);
    }



    /**
     * 修改记录发送消息，并记录每个算法执行的数量
     * @param fileName 数据库中的字段为FileName，但是真实存储的为一批数据的定义规则，如：1043项目为景号viewNumber
     * @param execParam 发送消息中的参数集合
     */
    public void updateRecordAndSendMessage(String taskPlanId,String fileName, HtScheduleJobDTO taskDTO, List<FlowMessageParam> execParam,
                                           String algorithmId) {
        DbDmsSchedulerTaskFileRecordDTO record = dbDmsSchedulerTaskFileRecordService.findByTaskIdAndFileName(taskDTO.getId(), fileName);
        List<FlowMessageParam> canExecParam = execParam.stream()
                .filter(param -> param.getStatus() == 0)
                .collect(Collectors.toList());
        if (record == null) {
            record = dbDmsSchedulerTaskFileRecordService.prepareDataByFileName(taskPlanId,taskDTO.getId(),fileName,canExecParam.size());
        } else {
            //如果存在记录，说明同级算法进入该方法，需要增加待执行数量
            TaskRecordUtils.increaseWaitCount(TaskRecordUtils.getHkey(taskDTO.getId()), canExecParam.size());
        }
        Jedis jedis = RedisUtil.getJedis();
        try {
            if (jedis != null) {
                String recordKey = String.format("record:%s:%s",taskDTO.getId(),fileName);
                String redisRecordValue = jedis.get(recordKey);
                if (StringUtils.isBlank(redisRecordValue)) {
                    jedis.set(recordKey,JSON.toJSONString(record));
                    jedis.expire(recordKey,3600*48);
                }
                for (FlowMessageParam flowMessageParam : execParam) {
                    //redis中存储该状态
                    String mainFileName = null;
                    if (flowMessageParam.getMainFileData() != null) {
                        mainFileName = flowMessageParam.getMainFileData().getFileName();
                    } else {
                        //当主文件数据为null时，只会有一个状态参数。
                        mainFileName = "noMainFile";
                    }
                    String key = String.format("recordCount:%s:%s:%s:%s",taskDTO.getId(),fileName,algorithmId,mainFileName);
                    if (!jedis.exists(key)) {
                        jedis.hset(key,"redo_count","0");
                        jedis.expire(key,3600*48);
                    }
                    jedis.hset(key,"state",flowMessageParam.getStatus() + "");
                }
            }
        } finally {
            RedisUtil.close(jedis);
        }
        //查询表中必备字段
        HtAlgorithmConfigurationDTO algorithmDTO = htAlgorithmConfigurationService.findById(algorithmId);
        if (jedis != null) {
            for (FlowMessageParam flowMessageParam : execParam) {
                Integer status = flowMessageParam.getStatus();
                //组装状态Kafka消息
                Map<String, Object> messageMap = flowMessageParam.getExecParam();
                Map<String,Object> stateMap = new HashMap<>();
                stateMap.put("status",status);
                stateMap.put("taskId",taskDTO.getId());
                stateMap.put("algorithmId",algorithmId);
                stateMap.put("flowId",taskDTO.getFlowId());
                stateMap.put("viewNumber",fileName);
                //TODO 再考虑一下如何将自定义日志加上
                stateMap.put("log",flowMessageParam.getExecParam().get("log"));
                //如果没有主文件时参数中为null
                if (flowMessageParam.getMainFileData() != null) {
                    stateMap.put("fileName",flowMessageParam.getMainFileData().getFileName());
                }
                kafkaTemplate.send(taskExecState,JSON.toJSONString(stateMap));
                if (status == 0) {
                    Map<String,Object> resultMap = new HashMap<>();
                    Map<String,Object> basic = new HashMap<>();
                    basic.put("algorithmIdentify",algorithmDTO.getAlgorithmIdentify());
                    basic.put("hasFlow",true);
                    basic.put("taskId",taskDTO.getId());
                    basic.put("fileTaskID",record.getId());
                    basic.put("mainFileName",flowMessageParam.getMainFileData().getFileName());
                    resultMap.put("basic",basic);
                    resultMap.put("algorithmParameter",flowMessageParam.getAlgorithmParam());
                    resultMap.put("other",messageMap);
                    kafkaTemplate.send(execTopic,JSON.toJSONString(resultMap));
                }
            }
        }
    }

}
