package org.hgg.workflow.core;


import java.util.Set;
import java.util.concurrent.*;

/**
 * @author Jarvis
 * @date 2023/12/28 0:28
 */
public class TaskExecutor<T, R> extends DefaultExecutor<T, R> {

    //执行状态
    private final ExecutorState<T, R> state = new DefaultExecutorState<>();
    private final Dag<T, R> dag;
    private ExecutorService executionEngine = Executors.newFixedThreadPool(1);
    private ExecutorService immediatelyRetryExecutor;
    private ScheduledExecutorService scheduledRetryExecutor;

    public TaskExecutor(Dag<T, R> dag) {
        this.dag = dag;
    }

    private void doProcessNodes(final Set<Node<T, R>> nodes) {

        for (Node<T, R> node : nodes) {
            //共享变量 并发等待
            while (state.isReady(node)) {
                Task<T, R> task = new Task<>(node);
                Future<ExecutionResult<T, R>> future = executionEngine.submit(task);

                ExecutionResult<T, R> executionResult;
                try {
                    executionResult = future.get();
                } catch (InterruptedException e) {
                    R result = (R) e.getMessage();
                    executionResult = new ExecutionResult<>(node, result, false);
                } catch (ExecutionException e) {
                    R result = (R) e.getMessage();
                    executionResult = new ExecutionResult<>(node, result, false);
                }
                if (executionResult.isSuccess()) {
                    state.markProcessingDone(node);
                } else {
                    state.markProcessingFailed(node, executionResult);
                }
                //继续执行孩子节点
                doExecute(node.getChildren());
                break;
            }
        }
    }

    private void doExecute(Set<Node<T, R>> children) {
        doProcessNodes(children);
    }

    @Override
    public void execute() {
        doProcessNodes(dag.getRootNodes());
        executionEngine.shutdown();
    }
}
