package cn.dataling.dag.execute;

import cn.dataling.dag.enums.DagWorkflowNodeState;
import cn.dataling.dag.enums.DagWorkflowState;
import cn.dataling.dag.listener.DagNodeStateListener;
import cn.dataling.dag.listener.DagWorkflowListener;
import cn.dataling.dag.listener.DefaultDagNodeStateListener;
import cn.dataling.dag.listener.DefaultDagWorkflowListener;
import cn.dataling.dag.pojo.DagWorkflow;
import cn.dataling.dag.pojo.DagWorkflowEdge;
import cn.dataling.dag.pojo.DagWorkflowNode;
import cn.dataling.dag.repository.DagWorkflowNodeRepository;
import cn.dataling.dag.repository.DagWorkflowNodeRepositoryInMemory;
import cn.dataling.dag.repository.DagWorkflowRepository;
import cn.dataling.dag.repository.DagWorkflowRepositoryInMemory;
import cn.dataling.dag.util.ThreadPoolUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
import java.util.stream.Collectors;

/**
 * 工作流执行器
 */
public class DagWorkflowExecutor {

    /**
     * 工作流信息
     */
    private DagWorkflow dagWorkflow;

    /**
     * 工作流监听器
     */
    private final DagWorkflowListener dagWorkflowListener;

    /**
     * 节点状态监听器
     */
    private final DagNodeStateListener dagNodeStateListener;

    /**
     * 工作流仓库
     */
    private DagWorkflowRepository dagWorkflowRepository;
    /**
     * 工作流节点仓库
     */
    private final DagWorkflowNodeRepository dagWorkflowNodeRepository;

    /**
     * 节点状态
     */
    private final Map<String, DagWorkflowNodeState> dagNodeStateEnumMap = new ConcurrentHashMap<>();

    /**
     * 线程池
     */
    private final Executor executor;

    /**
     * 工作流节点
     */
    private List<DagWorkflowNode> dagWorkflowNodes;

    /**
     * 工作流节点树打印定时任务
     */
    private ScheduledFuture<?> dagTreePrintFuture;

    private final AtomicBoolean workflowCompleted = new AtomicBoolean(false);

    private static final Logger LOGGER = LoggerFactory.getLogger(DagWorkflowExecutor.class);

    private DagWorkflowExecutor(Builder builder) {
        this.dagWorkflowListener = builder.dagWorkflowListener;
        this.dagNodeStateListener = builder.dagNodeStateListener;
        this.executor = builder.executor;
        this.dagWorkflowRepository = builder.dagWorkflowRepository;
        this.dagWorkflowNodeRepository = builder.dagWorkflowNodeRepository;
        this.dagWorkflow = builder.dagWorkflow;
    }

    /**
     * 开始执行工作流
     */
    public void start() {
        dagWorkflowListener.onStart(dagWorkflow);

        dagWorkflow.setDagWorkflowState(DagWorkflowState.RUNNING);

        // 获取工作流节点和连线信息
        Map<String, DagWorkflowNode> dagWorkflowNodeMap = dagWorkflow.getDagWorkflowNodes()
                .stream()
                .collect(Collectors.toMap(DagWorkflowNode::getId, Function.identity()));

        List<DagWorkflowEdge> dagWorkflowEdges = dagWorkflow.getDagWorkflowEdges();

        // 根据连线信息 获取节点的依赖关系
        for (DagWorkflowEdge dagWorkflowEdge : dagWorkflowEdges) {
            String sourceNodeId = dagWorkflowEdge.getSourceNodeId();
            String targetNodeId = dagWorkflowEdge.getTargetNodeId();

            DagWorkflowNode dagWorkflowNode = dagWorkflowNodeMap.get(targetNodeId);
            dagWorkflowNode.prevNodes(dagWorkflowNodeMap.get(sourceNodeId));
        }

        Collection<DagWorkflowNode> dagWorkflowNodes = dagWorkflowNodeMap.values();
        for (DagWorkflowNode dagWorkflowNode : dagWorkflowNodes) {
            dagNodeStateEnumMap.put(dagWorkflowNode.getId(), dagWorkflowNode.getState());
        }
        this.dagWorkflowNodes = new ArrayList<>(dagWorkflowNodes);

        printDagTree(this.dagWorkflowNodes);

        executeNodes(this.dagWorkflowNodes);

        monitor();
    }

    /**
     * 打印dag树
     * author: deepseek
     */
    private void printDagTree(List<DagWorkflowNode> allNodes) {
        // 1. 找到所有根节点（没有前置节点的节点）
        List<DagWorkflowNode> roots = allNodes.stream()
                .filter(node -> node.getPrevNodes().isEmpty())
                .collect(Collectors.toList());

        // 2. 为每个根节点生成子树
        for (DagWorkflowNode root : roots) {
            printNode(root, "", true, new java.util.HashSet<>());
        }
    }

    private void printNode(DagWorkflowNode node, String prefix, boolean isTail, java.util.Set<String> visited) {
        // 避免循环依赖导致的无限递归
        if (visited.contains(node.getId())) {
            System.out.println(prefix + (isTail ? "└── " : "├── ") + node.getId() + " (循环依赖)");
            return;
        }
        visited.add(node.getId());

        // 打印当前节点
        System.out.println(prefix + (isTail ? "└── " : "├── ") + node.getId() + " [" + node.getState() + "]");

        // 递归打印子节点
        List<DagWorkflowNode> children = node.getNextNodes();
        for (int i = 0; i < children.size(); i++) {
            boolean isLastChild = (i == children.size() - 1);
            String newPrefix = prefix + (isTail ? "    " : "│   ");
            printNode(children.get(i), newPrefix, isLastChild, new java.util.HashSet<>(visited));
        }
    }

    public void pause() {
        LOGGER.info("暂停工作流");
        dagWorkflow.setDagWorkflowState(DagWorkflowState.PAUSED);
        List<DagWorkflowNode> dagWorkflowNodes = this.dagWorkflowNodes.stream().filter(e -> e.getState().equals(DagWorkflowNodeState.READY)).collect(Collectors.toList());
        dagWorkflowNodes.forEach(e -> {
            e.setState(DagWorkflowNodeState.PAUSED);
        });
        dagNodeStateListener.onStateChange(dagWorkflowNodes);
        dagWorkflowListener.onPaused(this.dagWorkflow);
    }

    /**
     * 恢复工作流
     */
    public void proceed() {
        LOGGER.info("恢复工作流");
        dagWorkflow.setDagWorkflowState(DagWorkflowState.RUNNING);
        List<DagWorkflowNode> dagWorkflowNodes = this.dagWorkflowNodes.stream().filter(e -> e.getState().equals(DagWorkflowNodeState.PAUSED)).collect(Collectors.toList());
        dagWorkflowNodes.forEach(e -> {
            e.setState(DagWorkflowNodeState.READY);
        });
        dagNodeStateListener.onStateChange(dagWorkflowNodes);
        dagWorkflowListener.onProceed(this.dagWorkflow);

        executeNodes(this.dagWorkflowNodes);
    }

    /**
     * 执行节点
     */
    private void executeNodes(List<DagWorkflowNode> dagNodes) {

        // 如果节点为空，则说明工作线路执行完毕 此时工作流中可能还有别的线路在执行
        if (dagNodes == null || dagNodes.isEmpty()) {
            // 从缓存对象中获取节点状态 如果所有节点都执行完毕，则说明工作流执行完毕
            boolean completed = dagNodeStateEnumMap.values().stream().allMatch(e -> {
                return e.equals(DagWorkflowNodeState.COMPLETED) || e.equals(DagWorkflowNodeState.CANCELED);
            });
            if (completed && workflowCompleted.compareAndSet(false, true)) {
                this.dagWorkflowNodes.clear();
                this.dagNodeStateEnumMap.clear();
                dagWorkflowListener.onCompleted(dagWorkflow);
            }
            return;
        }

        for (DagWorkflowNode dagNode : dagNodes) {
            // 获取当前节点的所有上一个节点
            List<DagWorkflowNode> prevNodes = dagNode.getPrevNodes();
            // 上一个节点都执行完毕之后 才能执行当前节点
            boolean allCompleted = prevNodes.stream().allMatch(e -> e.getState().equals(DagWorkflowNodeState.COMPLETED));
            if (allCompleted) {
                // 使用双重锁校验的方式 判断节点状态是否为 READY 如果为 READY 则执行
                // 这里可能会出现并发问题 A节点和B节点同时执行完毕了 需要执行下一个C节点 导致C节点执行多次 使用双重锁校验的方式来保证当前节点只能被执行一次
                if (dagNode.getState().equals(DagWorkflowNodeState.READY)) {
                    this.executeNode(dagNode);
                }
            }
        }
    }


    private void executeNode(DagWorkflowNode dagWorkflowNode) {

        if (dagWorkflowNode.getState().equals(DagWorkflowNodeState.PAUSED)) {
            LOGGER.info("{} DAG工作流暂停", dagWorkflow.getId());
            return;
        }

        CompletableFuture.supplyAsync(() -> {
            // 更新节点为运行中状态
            updateDagNodeState(dagWorkflowNode, DagWorkflowNodeState.RUNNING);
            dagWorkflowNode.getDagWorkflowNodeTask().run(dagWorkflowNode);
            return dagWorkflowNode;
        }, executor).handle((result, exception) -> {
            if (exception != null) {
                exception.printStackTrace();
                LOGGER.error("[{}:{}] : dag节点执行失败", dagWorkflowNode.getId(), dagWorkflowNode.getName());
                updateDagNodeState(dagWorkflowNode, DagWorkflowNodeState.RETRY);
                // 下面进行重试
                int retryTime = dagWorkflowNode.getRetryTimes().getAndDecrement();
                if (retryTime > 0) {
                    LOGGER.info("[{}:{}] : dag节点执行失败，重试次数：{}", dagWorkflowNode.getId(), dagWorkflowNode.getName(), retryTime);
                    // 重试节点实际上就是重新执行executeNode
                    this.executeNode(dagWorkflowNode);
                } else {
                    updateDagNodeState(dagWorkflowNode, DagWorkflowNodeState.FAILED);
                }
            } else {
                // 检查任务执行后节点的状态
                switch (dagWorkflowNode.getState()) {
                    case COMPLETED:
                        updateDagNodeState(dagWorkflowNode, DagWorkflowNodeState.COMPLETED);
                        this.executeNodes(dagWorkflowNode.getNextNodes());
                        break;
                    case CANCELED:
                        updateDagNodeState(dagWorkflowNode, DagWorkflowNodeState.CANCELED);
                        cancelDagNodeTask(dagWorkflowNode);
                        break;
                    case FAILED:
                        updateDagNodeState(dagWorkflowNode, DagWorkflowNodeState.FAILED);
                        break;
                    default:
                        LOGGER.warn("节点 [{}:{}] 完成后状态异常: {}",
                                dagWorkflowNode.getId(),
                                dagWorkflowNode.getName(),
                                dagWorkflowNode.getState());
                }
            }
            return result;
        });
    }

    /**
     * 取消节点任务
     */
    private void cancelDagNodeTask(DagWorkflowNode dagWorkflowNode) {
        updateDagNodeState(dagWorkflowNode, DagWorkflowNodeState.CANCELED);
        for (DagWorkflowNode nextNode : dagWorkflowNode.getNextNodes()) {
            this.cancelDagNodeTask(nextNode);
        }
    }

    /**
     * 更新节点状态
     */
    private void updateDagNodeState(DagWorkflowNode dagWorkflowNode, DagWorkflowNodeState state) {
        dagWorkflowNode.setState(state);
        this.dagNodeStateEnumMap.put(dagWorkflowNode.getId(), state);
        dagNodeStateListener.onStateChange(dagWorkflowNode);
    }


    private void monitor() {
        // 注册定时任务
        dagTreePrintFuture = ThreadPoolUtil.getScheduledExecutorService()
                .scheduleAtFixedRate(
                        () -> {
                            try {
                                if (this.dagWorkflowNodes == null || this.dagWorkflowNodes.isEmpty() || dagNodeStateEnumMap.isEmpty()) {
                                    LOGGER.info("DAG工作流任务为空 退出任务状态监控");
                                    dagTreePrintFuture.cancel(false);
                                }
                                printDagTree(this.dagWorkflowNodes);
                            } catch (Exception e) {
                                e.printStackTrace();
                            }
                        }
                        , 30
                        , 10
                        , TimeUnit.SECONDS
                );
    }

    /**
     * 构建器
     */
    public static class Builder {
        /**
         * 工作流信息
         */
        private final DagWorkflow dagWorkflow;
        /**
         * 线程池
         */
        private final Executor executor;
        // 可选参数（带默认值）
        /**
         * 工作流监听器
         */
        private DagWorkflowListener dagWorkflowListener = new DefaultDagWorkflowListener();
        /**
         * 节点状态监听器
         */
        private DagNodeStateListener dagNodeStateListener = new DefaultDagNodeStateListener();
        /**
         * 工作流仓库
         */
        private DagWorkflowRepository dagWorkflowRepository = new DagWorkflowRepositoryInMemory();
        /**
         * 节点仓库
         */
        private DagWorkflowNodeRepository dagWorkflowNodeRepository = new DagWorkflowNodeRepositoryInMemory();

        public Builder(DagWorkflow dagWorkflow, Executor executor) {
            if (dagWorkflow == null || executor == null) {
                throw new IllegalArgumentException("DagWorkflow and Executor must not be null");
            }
            this.dagWorkflow = dagWorkflow;
            this.executor = executor;
        }

        public Builder withWorkflowListener(DagWorkflowListener listener) {
            this.dagWorkflowListener = listener;
            return this;
        }

        public Builder withNodeStateListener(DagNodeStateListener listener) {
            this.dagNodeStateListener = listener;
            return this;
        }

        public Builder withWorkflowRepository(DagWorkflowRepository repository) {
            this.dagWorkflowRepository = repository;
            return this;
        }

        public Builder withNodeRepository(DagWorkflowNodeRepository repository) {
            this.dagWorkflowNodeRepository = repository;
            return this;
        }

        public DagWorkflowExecutor build() {
            return new DagWorkflowExecutor(this);
        }
    }

}
