package com.casic.ops.handler.pipeline;

import cn.hutool.core.lang.Assert;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.casic.ops.consts.user.UserHolder;
import com.casic.ops.dao.pipeline.PipelineDAO;
import com.casic.ops.dao.pipeline.PipelineLoggerDAO;
import com.casic.ops.entity.domain.pipeline.PipelineDO;
import com.casic.ops.entity.domain.pipeline.PipelineLoggerDO;
import com.casic.ops.entity.dto.pipeline.datastructure.Logger.NodeLogger;
import com.casic.ops.entity.dto.pipeline.datastructure.context.DefaultResult;
import com.casic.ops.entity.dto.pipeline.datastructure.context.Result;
import com.casic.ops.entity.dto.pipeline.datastructure.enums.NodeEnum;
import com.casic.ops.entity.dto.pipeline.datastructure.enums.NodeExecuteStatus;
import com.casic.ops.entity.dto.pipeline.datastructure.event.EventBusI;
import com.casic.ops.entity.dto.pipeline.datastructure.event.PipelineNodeRefreshEvent;
import com.casic.ops.entity.dto.pipeline.datastructure.executecontext.PipelineExecuteContext;
import com.casic.ops.entity.dto.pipeline.datastructure.graph.Graph;
import com.casic.ops.entity.dto.pipeline.datastructure.graph.Nodes;
import com.casic.ops.entity.dto.pipeline.datastructure.manager.PipelineNodeManager;
import com.casic.ops.entity.dto.pipeline.datastructure.node.PipelineNodeConfigInfo;
import com.casic.ops.entity.vo.pipeline.response.Response;
import com.casic.ops.entity.vo.pipeline.response.SingleResponse;
import com.casic.ops.service.api.pipeline.PipelineLoggerService;
import com.casic.ops.service.api.pipeline.PipelineNodeInfoService;
import com.casic.ops.service.api.pipeline.PipelineNodeLoggerService;
import com.casic.ops.service.api.pipeline.PipelineService;
import com.casic.ops.utils.OpsSnowFlake;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.DisposableBean;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

/**
 * 流水线执行器，根据流水线ID来执行流水线
 *
 * @author cww
 * @date 2021-12-28 22:57:10
 */
@Slf4j
@Service
public class PipelineExecutor implements DisposableBean {

    private static ThreadPoolExecutor executor = new ThreadPoolExecutor(2, 3, 5L, TimeUnit.SECONDS, new LinkedBlockingDeque<>());

    //当前正在执行的节点 k=记录uuid+"&"+节点uuid
    private static volatile ConcurrentHashMap<String, NodeLogger> executingNodeMap = new ConcurrentHashMap<>(2 << 4);

    @Resource
    private PipelineLoggerService pipelineLoggerService;
    @Resource
    private PipelineService pipelineService;
    @Resource
    private PipelineNodeInfoService pipelineNodeInfoService;
    @Resource
    private PipelineNodeManager pipelineNodeManager;
    @Resource
    private PipelineNodeLoggerService pipelineNodeLoggerService;
    @Resource
    private EventBusI eventBus;

    /**
     * 流水线执行入口
     *
     * @param id
     * @return
     */
    public Response execute(Long id) {

        PipelineDO pipelineDO = pipelineService.selectById(id);

        PipelineExecuteContext context = buildContext(pipelineDO);

        Assert.isTrue(Objects.nonNull(pipelineDO),"请先保存流水线");
        /**
         * 下面的executor是异步执行,renturn会返回一个pipelineLoggerDO，该DO已经初始化。
         */
        executor.submit(() -> {
            try {
                Thread.sleep(1000);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            execute(context, context.getStart());
        });
        return SingleResponse.of(context.getLogger());
    }

    /**
     * 生成执行记录
     *
     * @param pipelineDO
     */
    private PipelineLoggerDO logger(PipelineDO pipelineDO) {
        PipelineLoggerDO logger = new PipelineLoggerDO();
        logger.setCreator(UserHolder.get().getUsername());
        logger.setModifier(UserHolder.get().getUsername());
        logger.setGmtCreate(new Date());
        logger.setGmtModified(new Date());
        logger.setPipelineUuid(pipelineDO.getUuid());
        logger.setExecuteStartTime(new Date());
        logger.setGraphContent(JSON.toJSONString(pipelineDO.getPipelineContext()));
        logger.setTriggerWay(pipelineDO.getTriggerWay());
        logger.setUuid(OpsSnowFlake.getInstance().nextId());
        pipelineLoggerService.add(logger);
        return logger;
    }

    /**
     * 构建上下文
     *
     * @param pipelineDO
     */
    private PipelineExecuteContext buildContext(PipelineDO pipelineDO) {
        //生产流水线执行记录
        PipelineLoggerDO pipelineLoggerDO=logger(pipelineDO);
        PipelineExecuteContext context = new PipelineExecuteContext();
        context.setLogger(pipelineLoggerDO);
        //配置其他信息
        Graph graph=JSONObject.parseObject(pipelineDO.getPipelineContext(),Graph.class);
        List<Nodes> nodes = graph.getNodes();
        context.setNodes(nodes);
        context.setGraph(graph);

        for (Nodes node : nodes) {
            //暂无需理会结束节点
            if (NodeEnum.END.getName().equals(node.getName())) {
                continue;
            }
            PipelineNodeConfigInfo nodeConfigInfo = pipelineNodeInfoService.selectByNodeUuid(node.getId());
            if (Objects.isNull(nodeConfigInfo)) {
                log.error("node : {},config node info data is null", node);
            } else {
                context.setAttributes(node.getId(), nodeConfigInfo);
            }
            //初始化各个节点的执行记录，构建上下文期间，每个节点都是待执行状态。
            NodeLogger logger=new NodeLogger();
            logger.init();
            logger.setFinalStatus("待执行");
            logger.setNodeUuid(node.getId());
            logger.setNodeName(node.getNodeName());
            logger.setLoggerUuid(pipelineLoggerDO.getUuid());
            context.setAttributes(node.getId()+"logger",logger);
            pipelineNodeLoggerService.insertByNodeLogger(logger);

        }
        //获取开始节点
        context.setStart(nodes.stream().filter(node -> NodeEnum.START.toString().equals(node.getName())).findFirst().get());
        //映射每个节点对应的输入和输出节点
        for (Nodes n : nodes) {
            //当前节点n作为target
            n.getPoints().getTargets().forEach(uuid -> context.getTargetMap().put(uuid.replace("target-", ""), n));
            //当前节点n作为source
            n.getPoints().getSources().forEach(uuid -> context.getSourceMap().put(uuid.replace("source-", ""), n));
        }
        //获取执行路径
        List<String> edges = graph.getEdges();
        for (String edge : edges) {
            String[] lines = edge.split("&&");
            String source = lines[0].replace("source-", "");
            String target = lines[1].replace("target-", "");
            //target
            List<String> targetLineMapOrDefault = context.getTargetLineMap().getOrDefault(source, new LinkedList<>());
            targetLineMapOrDefault.add(target);
            context.getTargetLineMap().put(source, targetLineMapOrDefault);
            //source
            List<String> sourceLineMapOrDefault = context.getSourceLineMap().getOrDefault(target, new LinkedList<>());
            sourceLineMapOrDefault.add(source);
            context.getSourceLineMap().put(target, sourceLineMapOrDefault);
        }

        return context;
    }


    /**
     * 执行流水线
     *
     * @param context
     * @param node
     */
    public void execute(PipelineExecuteContext context, Nodes node) {

        //判断是否可执行
        if (!executable(context, node)) {
            return;
        }
        try {
            log.info("执行的节点：{}", node.getName());
            //执行结果
            Result result = new DefaultResult();
            //变更状态//进行中
            node.refreshStatus(NodeExecuteStatus.LOADING);
            //记录节点执行时间
            NodeLogger logger = (NodeLogger) context.getAttributes(node.getId()+"logger");
            if(logger!=null) {
                //结束节点是没有logger的，因为在构建上下文的时候是忽略结束节点的，其他节点都有，所以这里得给结束节点new一个logger；
                logger.setExecuteStartTime(new Date());
            }else {
                logger=new NodeLogger();
                logger.setExecuteStartTime(new Date());
            }
//            executingNodeMap.put(String.format("%s&%s", context.getLogger().getUuid(), node.getId()), node);
            executingNodeMap.put(String.format("%s&%s", context.getLogger().getUuid(), node.getId()), logger);
            //发送事件
            String loggerUuid=Long.toString(context.getLogger().getUuid());
            eventBus.publish(new PipelineNodeRefreshEvent(loggerUuid, node, context.getSourceLineMap(), NodeExecuteStatus.LOADING));
            //执行节点
            context.setAttributes(node.getName() + "logger-uuid", context.getLogger().getUuid());
            //这里通过pipeline的某个节点名称去获取到该节点的实际执行类，这里后面可以改为用ApplicationContextUtil.getBean(String name) 来获取
            Result ret = pipelineNodeManager.get(node.getName()).execute(context, node.getId());
            result.add(node.getName(), ret);
            int statuscode= (int) ret.get("status");
            if(statuscode!=0){
                //执行失败
                node.refreshStatus(NodeExecuteStatus.FAILED);
                context.getLogger().setFinalStatus("失败");
                log.error("execute error , cur node : {}", node);
            }else {
                //执行成功
                node.refreshStatus(NodeExecuteStatus.SUCCESS);
                //获取下一个执行的路线
                List<String> sources = node.getPoints().getSources();
                sources.forEach(sce -> {
                    String next = sce.replace("source-", "");
                    List<String> list = context.getTargetLineMap().getOrDefault(next, Collections.emptyList());
                    list.forEach(v -> executor.submit(() -> execute(context, context.getTargetMap().get(v))));
                });
            }
        } catch (Exception e) {
            //执行失败
            node.refreshStatus(NodeExecuteStatus.FAILED);
            context.getLogger().setFinalStatus("失败");
            log.error("execute error , cur node : {}", node);
            e.printStackTrace();
        }
        //执行完成移除出缓存
        executingNodeMap.remove(String.format("%s&%s", context.getLogger().getUuid(), node.getId()));
        //更新日志
        PipelineLoggerDO pipelineLoggerDO=context.getLogger();
        context.getGraph().setNodes(context.getNodes());
        pipelineLoggerDO.setGraphContent(JSON.toJSONString(context.getGraph()));
        pipelineLoggerService.updateByLoggerDO(pipelineLoggerDO);
        //推送节点状态和所有输出的边
        String loggerUuid=Long.toString(context.getLogger().getUuid());
        eventBus.publish(new PipelineNodeRefreshEvent(loggerUuid, node, context.getTargetLineMap()));
    }

    /**
     * 判断是否可执行当前节点（检查当前节点是否所有输入节点都已经执行完成）
     *
     * @param node 当前节点
     * @return
     */
    private synchronized boolean executable(PipelineExecuteContext context, Nodes node) {
        try {
            log.info("检查是否所有输入节点都已经执行完成:{}", node.getName());
            if (NodeExecuteStatus.SUCCESS.getName().equals(node.getData().getNodeState())) {
                return false;
            }
            //判断当前节点的所有前置节点是否已经执行完成
            List<String> targets = node.getPoints().getTargets();
            for (String t : targets) {
                String targetUUID = t.replace("target-", "");
                List<String> sourceUUIDList = context.getSourceLineMap().getOrDefault(targetUUID, Collections.emptyList());
                for (String sourceUUID : sourceUUIDList) {
                    Nodes sourceNode = context.getSourceMap().get(sourceUUID);
                    if (Objects.nonNull(sourceNode) && (Objects.isNull(sourceNode.getData().getNodeState()) ||
                            StringUtils.isBlank(sourceNode.getData().getNodeState()) ||
                            NodeExecuteStatus.LOADING.getName().equals(sourceNode.getData().getNodeState()) ||
                            NodeExecuteStatus.FAILED.getName().equals(sourceNode.getData().getNodeState()))) {
                        log.info("输入节点:{} 未执行完成，放弃执行当前节点 {}", sourceNode, node);
                        return false;
                    }
                }
            }
        } catch (Exception e) {
            e.printStackTrace();
            return false;
        }
        return true;
    }

    @Override
    public void destroy() {
        if (!executor.isShutdown()) {
            executor.shutdown();
        }
    }

    public NodeLogger getExecuteNode(String key) {
        return executingNodeMap.get(key);
    }
}
