package avicit.bdp.dds.server.master.runner;

import avicit.bdp.common.utils.DateUtils;
import avicit.bdp.common.utils.SpringApplicationContext;
import avicit.bdp.common.utils.redis.RedisCacheHelper;
import avicit.bdp.core.util.json.JSONUtils;
import avicit.bdp.dds.api.service.ProcessResourceConfigService;
import avicit.bdp.dds.common.Constants;
import avicit.bdp.dds.common.graph.DAG;
import avicit.bdp.dds.common.thread.Stopper;
import avicit.bdp.dds.common.thread.ThreadUtils;
import avicit.bdp.dds.common.utils.CommonUtils;
import avicit.bdp.dds.common.utils.LoggerUtils;
import avicit.bdp.dds.common.utils.OSUtils;
import avicit.bdp.dds.common.utils.ParameterUtils;
import avicit.bdp.dds.common.utils.TaskParametersUtils;
import avicit.bdp.dds.dao.entity.ProcessDefinition;
import avicit.bdp.dds.dao.entity.ProcessInstance;
import avicit.bdp.dds.dao.entity.ProcessResourceSettings;
import avicit.bdp.dds.dao.entity.Schedule;
import avicit.bdp.dds.dao.entity.TaskInstance;
import avicit.bdp.dds.dao.utils.DagHelper;
import avicit.bdp.dds.dispatch.enums.CommandType;
import avicit.bdp.dds.dispatch.enums.DataType;
import avicit.bdp.dds.dispatch.enums.DependResult;
import avicit.bdp.dds.dispatch.enums.Direct;
import avicit.bdp.dds.dispatch.enums.ExecutionStatus;
import avicit.bdp.dds.dispatch.enums.FailureStrategy;
import avicit.bdp.dds.dispatch.enums.Flag;
import avicit.bdp.dds.dispatch.enums.Priority;
import avicit.bdp.dds.dispatch.enums.TaskDependType;
import avicit.bdp.dds.dispatch.enums.TaskType;
import avicit.bdp.dds.dispatch.model.TaskNode;
import avicit.bdp.dds.dispatch.model.TaskNodeRelation;
import avicit.bdp.dds.dispatch.model.TwoTuple;
import avicit.bdp.dds.dispatch.process.ProcessDag;
import avicit.bdp.dds.dispatch.process.Property;
import avicit.bdp.dds.dispatch.task.AbstractParameters;
import avicit.bdp.dds.dispatch.task.conditions.ConditionsParameters;
import avicit.bdp.dds.remote.NettyRemotingClient;
import avicit.bdp.dds.server.master.config.MasterConfig;
import avicit.bdp.dds.server.utils.AlertManager;
import avicit.bdp.dds.service.process.ProcessService;
import avicit.bdp.dds.service.quartz.cron.CronUtils;
import avicit.platform6.commons.utils.ComUtil;
import com.alibaba.fastjson2.JSON;
import com.alibaba.fastjson2.JSONObject;
import com.alibaba.fastjson2.TypeReference;
import com.google.common.collect.Lists;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.redis.core.StringRedisTemplate;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;

import static avicit.bdp.dds.common.Constants.CMDPARAM_COMPLEMENT_DATA_END_DATE;
import static avicit.bdp.dds.common.Constants.CMDPARAM_COMPLEMENT_DATA_START_DATE;
import static avicit.bdp.dds.common.Constants.CMDPARAM_RECOVERY_START_NODE_STRING;
import static avicit.bdp.dds.common.Constants.CMDPARAM_START_NODE_NAMES;
import static avicit.bdp.dds.common.Constants.DEFAULT_WORKER_GROUP;
import static avicit.bdp.dds.common.Constants.SEC_2_MINUTES_TIME_UNIT;

/** master exec thread,split dag */
public class MasterExecThread implements Runnable {

  /** logger of MasterExecThread */
  private static final Logger logger = LoggerFactory.getLogger(MasterExecThread.class);

  /** process instance */
  private ProcessInstance processInstance;

  /** runing TaskNode */
  private final Map<MasterBaseTaskExecThread, Future<TwoTuple<Boolean, String>>> activeTaskNode =
      new ConcurrentHashMap<>();

  /** task exec service */
  private final ExecutorService taskExecService;

  private final StringRedisTemplate redisTemplate;

  /** submit failure nodes */
  private boolean taskFailedSubmit = false;

  /** recover node id list */
  private List<TaskInstance> recoverNodeIdList = new ArrayList<>();

  /** error task list */
  private Map<String, TaskInstance> errorTaskList = new ConcurrentHashMap<>();

  /** complete task list */
  private Map<String, TaskInstance> completeTaskList = new ConcurrentHashMap<>();

  /** ready to submit task list */
  private Map<String, TaskInstance> readyToSubmitTaskList = new ConcurrentHashMap<>();

  /** depend failed task map */
  private Map<String, TaskInstance> dependFailedTask = new ConcurrentHashMap<>();

  /** forbidden task map */
  private Map<String, TaskNode> forbiddenTaskList = new ConcurrentHashMap<>();

  /** skip task map */
  private Map<String, TaskNode> skipTaskNodeList = new ConcurrentHashMap<>();

  /** recover tolerance fault task list */
  private List<TaskInstance> recoverToleranceFaultTaskList = new ArrayList<>();

  /** alert manager */
  private AlertManager alertManager = new AlertManager();

  /** the object of DAG */
  private DAG<String, TaskNode, TaskNodeRelation> dag;

  /** process service */
  private ProcessService processService;

  /** master config */
  private MasterConfig masterConfig;

  /** nettyRemotingClient */
  private NettyRemotingClient nettyRemotingClient;

  private final ProcessResourceConfigService processResourceConfigService;

  /** task instance execute result cache key: task instance name value: task instance result */
  private final Map<String, Property> resultCache = new ConcurrentHashMap<>();

  /**
   * constructor of MasterExecThread
   *
   * @param processInstance processInstance
   * @param processService processService
   * @param nettyRemotingClient nettyRemotingClient
   */
  public MasterExecThread(
      ProcessInstance processInstance,
      ProcessService processService,
      ProcessResourceConfigService processResourceConfigService,
      NettyRemotingClient nettyRemotingClient,
      StringRedisTemplate redisTemplate) {

    this.processService = processService;
    this.processInstance = processInstance;
    this.processResourceConfigService = processResourceConfigService;
    this.masterConfig = SpringApplicationContext.getBean(MasterConfig.class);
    int masterTaskExecNum = masterConfig.getMasterExecTaskNum();
    this.taskExecService =
        ThreadUtils.newDaemonFixedThreadExecutor("Master-Task-Exec-Thread", masterTaskExecNum);
    this.nettyRemotingClient = nettyRemotingClient;
    this.redisTemplate = redisTemplate;
  }

  @Override
  public void run() {

    // process instance is null
    if (processInstance == null) {
      logger.info("process instance is not exists");
      return;
    }

    // check to see if it's done
    if (ExecutionStatus.of(processInstance.getState()).typeIsFinished()) {
      logger.info("process instance is done : {}", processInstance.getId());
      return;
    }

    try {
      ProcessDefinition definition =
          processService.findProcessDefineById(processInstance.getProcessDefinitionId());
      String type = definition.getDatasourceId();
      if (type != null && (type.startsWith(ProcessDefinition.DATASOURCE_TYPE_SPARK)
              || type.startsWith(ProcessDefinition.DATASOURCE_TYPE_PHM))) {
        executeSparkFlowProcess();
        return;
      }

      if (processInstance.isComplementData()
          && Flag.NO.getCode() == processInstance.getIsSubProcess()) {
        // sub process complement data
        executeComplementProcess();
      } else {
        logger.info("开始执行流程");
        // execute flow
        executeProcess();
      }
    } catch (Exception e) {
      logger.error("master exec thread exception", e);
      logger.error("process execute failed, process id:{}", processInstance.getId());
      processInstance.setState(ExecutionStatus.FAILURE.getCode());
      processInstance.setEndTime(new Date());
      processService.updateProcessInstance(processInstance);
    } finally {
      taskExecService.shutdown();
      // post handle
      postHandle();
    }
  }

  private void executeSparkFlowProcess() throws Exception {
    TaskInstance taskInstance = new TaskInstance();
    taskInstance.setId(ComUtil.getId());
    taskInstance.setTaskType(TaskType.SPARKFLOW.name());
    // task instance state
    taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS.getCode());
    taskInstance.setTaskInstancePriority(Priority.MEDIUM.getCode());
    // task name
    taskInstance.setName("sparkFlowTask");
    // process instance define id
    taskInstance.setProcessDefinitionId(processInstance.getProcessDefinitionId());
    // process instance id
    taskInstance.setProcessInstanceId(processInstance.getId());
    // 放入缓存,默认两天后失效
    RedisCacheHelper.getInstance()
        .set("bdp:dds:task:" + taskInstance.getId(), JSONUtils.toJson(taskInstance));
    RedisCacheHelper.getInstance().expire("bdp:dds:task:" + taskInstance.getId(), 60 * 60 * 48);
    boolean isSubmited = false;
    Future<TwoTuple<Boolean, String>> future = null;
    while (!processInstance.isProcessInstanceStop()) {
      if (!isSubmited) {
        if (canSubmitTaskToQueue()) {
          MasterBaseTaskExecThread abstractExecThread = new SparkFlowTaskExecThread(taskInstance);
          future = taskExecService.submit(abstractExecThread);
          isSubmited = true;
          if (!future.isDone()) {
            continue;
          }
        }
      } else {
        if (!future.isDone()) {
          continue;
        }
      }

      try {
        Thread.sleep(Constants.SLEEP_TIME_MILLIS);
      } catch (InterruptedException e) {
        logger.error(e.getMessage(), e);
      }
      // 监控实例状态变化
      ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId());
      ExecutionStatus state = ExecutionStatus.of(instance.getState());
      if (processInstance.getState() != state.getCode()) {
        logger.info(
            "work flow process instance [id: {}, name:{}], state change from {} to {}, cmd type: {}",
            processInstance.getId(),
            processInstance.getName(),
            ExecutionStatus.of(processInstance.getState()),
            state,
            processInstance.getCommandType());
        instance.setProcessDefinition(processInstance.getProcessDefinition());
        processInstance = instance;
      }
    }

    endProcess();
    // sparkFlow 实例状态由执行节点SparkFlowTask来控制
    // processInstance.setEndTime(new Date());
    // processService.updateProcessInstance(processInstance);

  }

  /**
   * execute process
   *
   * @throws Exception exception
   */
  private void executeProcess() throws Exception {
    prepareProcess();
    runProcess();
    endProcess();
  }

  /**
   * execute complement process
   *
   * @throws Exception exception
   */
  private void executeComplementProcess() throws Exception {

    Map<String, String> cmdParam = JSONUtils.toMap(processInstance.getCommandParam());

    Date startDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_START_DATE));
    Date endDate = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE));
    processService.saveProcessInstance(processInstance);

    // get schedules
    String processDefinitionId = processInstance.getProcessDefinitionId();
    List<Schedule> schedules =
        processService.queryReleaseSchedulerListByProcessDefinitionId(processDefinitionId);
    List<Date> listDate = Lists.newLinkedList();
    if (!CollectionUtils.isEmpty(schedules)) {
      for (Schedule schedule : schedules) {
        listDate.addAll(CronUtils.getSelfFireDateList(startDate, endDate, schedule.getCrontab()));
      }
    }
    // get first fire date
    Iterator<Date> iterator = null;
    Date scheduleDate;
    if (!CollectionUtils.isEmpty(listDate)) {
      iterator = listDate.iterator();
      scheduleDate = iterator.next();
      processInstance.setScheduleTime(scheduleDate);
      processService.updateProcessInstance(processInstance);
    } else {
      scheduleDate = processInstance.getScheduleTime();
      if (scheduleDate == null) {
        scheduleDate = startDate;
      }
    }

    while (Stopper.isRunning()) {

      logger.info(
          "process {} start to complement {} data",
          processInstance.getId(),
          DateUtils.dateToString(scheduleDate));
      // prepare dag and other info
      prepareProcess();

      if (dag == null) {
        logger.error(
            "process {} dag is null, please check out parameters", processInstance.getId());
        processInstance.setState(ExecutionStatus.SUCCESS.getCode());
        processService.updateProcessInstance(processInstance);
        return;
      }

      // execute process ,waiting for end
      runProcess();

      endProcess();
      // process instance failure ，no more complements
      if (!ExecutionStatus.of(processInstance.getState()).typeIsSuccess()) {
        logger.info(
            "process {} state {}, complement not completely!",
            processInstance.getId(),
            processInstance.getState());
        break;
      }
      //  current process instance success ,next execute
      if (null == iterator) {
        // loop by day
        scheduleDate = DateUtils.getSomeDay(scheduleDate, 1);
        if (scheduleDate.after(endDate)) {
          // all success
          logger.info("process {} complement completely!", processInstance.getId());
          break;
        }
      } else {
        // loop by schedule date
        if (!iterator.hasNext()) {
          // all success
          logger.info("process {} complement completely!", processInstance.getId());
          break;
        }
        scheduleDate = iterator.next();
      }
      // flow end
      // execute next process instance complement data
      processInstance.setScheduleTime(scheduleDate);
      if (cmdParam.containsKey(Constants.CMDPARAM_RECOVERY_START_NODE_STRING)) {
        cmdParam.remove(Constants.CMDPARAM_RECOVERY_START_NODE_STRING);
        processInstance.setCommandParam(JSONUtils.toJson(cmdParam));
      }

      processInstance.setState(ExecutionStatus.RUNNING_EXEUTION.getCode());
      processInstance.setGlobalParams(
          ParameterUtils.curingGlobalParams(
              processInstance.getProcessDefinition().getGlobalParamMap(),
              processInstance.getProcessDefinition().getGlobalParamList(),
              CommandType.COMPLEMENT_DATA.getCode(),
              processInstance.getScheduleTime()));
      processInstance.setId("");
      processInstance.setStartTime(new Date());
      processInstance.setEndTime(null);
      processService.saveProcessInstance(processInstance);
    }
  }

  /**
   * prepare process parameter
   *
   * @throws Exception exception
   */
  private void prepareProcess() throws Exception {
    // init task queue
    initTaskQueue();

    // gen process dag
    logger.info("开始生成DAG图");
    buildFlowDag();
    logger.info("prepare process :{} end", processInstance.getId());
  }

  /** process end handle */
  private void endProcess() {
    processInstance.setEndTime(new Date());
    processService.updateProcessInstance(processInstance);
    if (ExecutionStatus.of(processInstance.getState()).typeIsWaitingThread()) {
      processService.createRecoveryWaitingThreadCommand(null, processInstance);
    }

    List<TaskInstance> taskInstances =
        processService.findValidTaskListByProcessId(processInstance.getId());
    if (CollectionUtils.isNotEmpty(taskInstances)) {
      try {
        alertManager.sendAlertProcessInstance(processInstance, taskInstances);
      } catch (Exception e) {
        e.printStackTrace();
      }
    }
  }

  /**
   * generate process dag
   *
   * @throws Exception exception
   */
  private void buildFlowDag() throws Exception {
    recoverNodeIdList = getStartTaskInstanceList(processInstance.getCommandParam());

    forbiddenTaskList =
        DagHelper.getForbiddenTaskNodeMaps(processInstance.getProcessInstanceJson());
    // generate process to get DAG info
    List<String> recoveryNameList = getRecoveryNodeNameList();
    List<String> startNodeNameList = parseStartNodeName(processInstance.getCommandParam());
    ProcessDag processDag =
        generateFlowDag(
            processInstance.getProcessInstanceJson(),
            startNodeNameList,
            recoveryNameList,
            TaskDependType.of(processInstance.getTaskDependType()));
    if (processDag == null) {
      logger.error("processDag is null");
      return;
    }
    // generate process dag
    List<ProcessResourceSettings> processResourceConfigs =
        processResourceConfigService.getByProcessDefinitionId(
            processInstance.getProcessDefinitionId());
    dag = DagHelper.buildDagGraph(processDag, processResourceConfigs);
  }

  /** init task queue */
  private void initTaskQueue() {
    taskFailedSubmit = false;
    activeTaskNode.clear();
    dependFailedTask.clear();
    completeTaskList.clear();
    errorTaskList.clear();
    List<TaskInstance> taskInstanceList =
        processService.findValidTaskListByProcessId(processInstance.getId());
    for (TaskInstance task : taskInstanceList) {
      if (task.isTaskComplete()) {
        completeTaskList.put(task.getName(), task);
      }
      if (ExecutionStatus.of(task.getState()).typeIsFailure() && !task.taskCanRetry()) {
        errorTaskList.put(task.getName(), task);
      }
    }
  }

  /** process post handle */
  private void postHandle() {
    logger.info("develop mode is: {}", CommonUtils.isDevelopMode());
    RedisCacheHelper.getInstance().del("bdp:dds:processDefinitionId:" + processInstance.getProcessDefinitionId());
    if (!CommonUtils.isDevelopMode()) {
      // get exec dir
      String execLocalPath =
          avicit.bdp.dds.common.utils.FileUtils.getProcessExecDir(
              processInstance.getProcessDefinition().getProjectId(),
              processInstance.getProcessDefinitionId(),
              processInstance.getId());

      try {
        FileUtils.deleteDirectory(new File(execLocalPath));
      } catch (IOException e) {
        logger.error("delete exec dir failed ", e);
      }
    }
  }

  /**
   * submit task to execute
   *
   * @param taskInstance task instance
   * @return TaskInstance
   */
  private TaskInstance submitTaskExec(TaskInstance taskInstance) {
    MasterBaseTaskExecThread abstractExecThread;
    if (taskInstance.isSubProcess()) {
      abstractExecThread = new SubProcessTaskExecThread(taskInstance);
    } else if (taskInstance.isDependTask()) {
      abstractExecThread = new DependentTaskExecThread(taskInstance);
    } else if (taskInstance.isConditionsTask()) {
      abstractExecThread = new ConditionsTaskExecThread(taskInstance);
    } else {
      abstractExecThread = new MasterTaskExecThread(taskInstance);
    }
    Future<TwoTuple<Boolean, String>> future = taskExecService.submit(abstractExecThread);
    activeTaskNode.putIfAbsent(abstractExecThread, future);
    return abstractExecThread.getTaskInstance();
  }

  /**
   * find task instance in db. in case submit more than one same name task in the same time.
   *
   * @param taskName task name
   * @return TaskInstance
   */
  private TaskInstance findTaskIfExists(String taskName) {
    List<TaskInstance> taskInstanceList =
        processService.findValidTaskListByProcessId(this.processInstance.getId());
    for (TaskInstance taskInstance : taskInstanceList) {
      if (taskInstance.getName().equals(taskName)) {
        return taskInstance;
      }
    }
    return null;
  }

  /**
   * encapsulation task
   *
   * @param processInstance process instance
   * @param nodeName node name
   * @return TaskInstance
   */
  private TaskInstance createTaskInstance(
      ProcessInstance processInstance, String nodeName, TaskNode taskNode) {

    TaskInstance taskInstance = findTaskIfExists(nodeName);
    if (taskInstance == null) {
      taskInstance = new TaskInstance();
      // task name
      taskInstance.setName(nodeName);
      // process instance define id
      taskInstance.setProcessDefinitionId(processInstance.getProcessDefinitionId());
      // task instance state
      taskInstance.setState(ExecutionStatus.SUBMITTED_SUCCESS.getCode());
      // process instance id
      taskInstance.setProcessInstanceId(processInstance.getId());
      //hand process instance global param to task node local param,such as time param
      setTaskNodeLocalParam(processInstance, taskInstance, taskNode);
      // task instance node json
      taskInstance.setTaskJson(JSON.toJSONString(taskNode));
      // task instance type
      taskInstance.setTaskType(taskNode.getType());
      // task instance whether alert
      taskInstance.setAlertFlag(Flag.NO.getCode());

      // task instance start time
      taskInstance.setStartTime(new Date());

      // task instance flag
      taskInstance.setFlag(Flag.YES.getCode());

      // task instance retry times
      taskInstance.setRetryTimes(0);

      // max task instance retry times
      taskInstance.setMaxRetryTimes(taskNode.getMaxRetryTimes());

      // retry task instance interval
      taskInstance.setRetryInterval(taskNode.getRetryInterval());

      // task instance priority
      if (taskNode.getTaskInstancePriority() == null) {
        taskInstance.setTaskInstancePriority(Priority.MEDIUM.getCode());
      } else {
        taskInstance.setTaskInstancePriority(taskNode.getTaskInstancePriority().getCode());
      }

      String processWorkerGroup = processInstance.getWorkerGroup();
      processWorkerGroup =
          StringUtils.isBlank(processWorkerGroup) ? DEFAULT_WORKER_GROUP : processWorkerGroup;
      String taskWorkerGroup =
          StringUtils.isBlank(taskNode.getWorkerGroup())
              ? processWorkerGroup
              : taskNode.getWorkerGroup();
      if (!processWorkerGroup.equals(DEFAULT_WORKER_GROUP)
          && taskWorkerGroup.equals(DEFAULT_WORKER_GROUP)) {
        taskInstance.setWorkerGroup(processWorkerGroup);
      } else {
        taskInstance.setWorkerGroup(taskWorkerGroup);
      }
    }
    return taskInstance;
  }

  // 运行时动态替换HDFS_FILE_PATH文件路径的值
  private void setTaskNodeLocalParam(ProcessInstance processInstance, TaskInstance taskInstance, TaskNode taskNode) {
    //如果节点禁止运行或者流程实例的任务类型不是同步任务，结束流程
    if (taskNode.isForbidden()) {
      return;
    }
    AbstractParameters params = TaskParametersUtils.getParameters(taskNode.getType(), taskNode.getParams());
    if (params != null) {
      taskInstance.setNeedReturnResult(params.isNeedReturnResult());
      Map<String, Property> localParametersMap = params.getLocalParametersMap();
      if (localParametersMap == null) {
        localParametersMap = new HashMap<>(16);
      }

      if (logger.isDebugEnabled()) {
        logger.debug("before replace, localParametersMap:{}", JSONObject.toJSONString(localParametersMap));
      }

      String extraParams = processInstance.getExtraParams();
      if (StringUtils.isNotBlank(extraParams)) {
        //将附加参数替换到节点参数里
        List<Property> properties = JSONObject.parseObject(extraParams,
                new TypeReference<List<Property>>() {
                });
        if (CollectionUtils.isNotEmpty(properties)) {
          Map<String, Property> globalParamsMap = parseListToMap(properties);
          if (globalParamsMap != null && !globalParamsMap.isEmpty()) {
            for (Map.Entry<String, Property> entry : globalParamsMap.entrySet()) {
              localParametersMap.put(entry.getKey(), entry.getValue());
              logger.debug("entry.getKey:{}", entry.getKey());
            }
          }
        }
      }

      //如果当前节点需要任务执行上下文中的参数，则从结果缓存中获取参数放置到localParametersMap中,同名参数优于上步逻辑中参数
      boolean isHitCache = false;
      List<String> paramKeyNames = params.getParamKeyNames();
      if (CollectionUtils.isNotEmpty(paramKeyNames)) {
        for (String paramKeyName : paramKeyNames) {
          if (resultCache.containsKey(paramKeyName)) {
            localParametersMap.put(paramKeyName, resultCache.get(paramKeyName));
            isHitCache = true;
          }
        }
        if (!isHitCache) {
          taskNode.setRunFlag("FORBIDDEN");
        }
      }

      if (!localParametersMap.isEmpty()) {
        Collection<Property> values = localParametersMap.values();
        params.setLocalParams(new ArrayList<>(values));
        taskNode.setParams(JSONUtils.toJsonString(params));
        if (logger.isDebugEnabled()) {
          logger.debug("after replace, localParametersMap:{}", JSONObject.toJSONString(localParametersMap));
        }
      }
    }
  }

  /**
   * if all of the task dependence are skip, skip it too.
   *
   * @param taskNode
   * @return
   */
  private boolean isTaskNodeNeedSkip(TaskNode taskNode) {
    if (CollectionUtils.isEmpty(taskNode.getDepList())) {
      return false;
    }
    for (String depNode : taskNode.getDepList()) {
      if (!skipTaskNodeList.containsKey(depNode)) {
        return false;
      }
    }
    return true;
  }

  /**
   * set task node skip if dependence all skip
   *
   * @param taskNodesSkipList taskNodesSkipList
   */
  private void setTaskNodeSkip(List<String> taskNodesSkipList) {
    for (String skipNode : taskNodesSkipList) {
      skipTaskNodeList.putIfAbsent(skipNode, dag.getNode(skipNode));
      Collection<String> postNodeList = DagHelper.getStartVertex(skipNode, dag, completeTaskList);
      List<String> postSkipList = new ArrayList<>();
      for (String post : postNodeList) {
        TaskNode postNode = dag.getNode(post);
        if (isTaskNodeNeedSkip(postNode)) {
          postSkipList.add(post);
        }
      }
      setTaskNodeSkip(postSkipList);
    }
  }

  /**
   * parse condition task find the branch process set skip flag for another one.
   *
   * @param nodeName nodeName
   * @return Condition Task
   */
  private List<String> parseConditionTask(String nodeName) {
    List<String> conditionTaskList = new ArrayList<>();
    TaskNode taskNode = dag.getNode(nodeName);
    if (!taskNode.isConditionsTask()) {
      return conditionTaskList;
    }
    ConditionsParameters conditionsParameters =
        JSONUtils.parseObject(taskNode.getConditionResult(), ConditionsParameters.class);

    TaskInstance taskInstance = completeTaskList.get(nodeName);
    if (taskInstance == null) {
      logger.error("task instance {} cannot find, please check it!", nodeName);
      return conditionTaskList;
    }

    if (ExecutionStatus.of(taskInstance.getState()).typeIsSuccess()) {
      conditionTaskList = conditionsParameters.getSuccessNode();
      setTaskNodeSkip(conditionsParameters.getFailedNode());
    } else if (ExecutionStatus.of(taskInstance.getState()).typeIsFailure()) {
      conditionTaskList = conditionsParameters.getFailedNode();
      setTaskNodeSkip(conditionsParameters.getSuccessNode());
    } else {
      conditionTaskList.add(nodeName);
    }
    return conditionTaskList;
  }

  /**
   * parse post node list of previous node if condition node: return process according to the
   * settings if post node completed, return post nodes of the completed node
   *
   * @param previousNodeName previousNodeName
   * @return Post Node Lis
   */
  private List<String> parsePostNodeList(String previousNodeName) {
    List<String> postNodeList = new ArrayList<>();

    TaskNode taskNode = dag.getNode(previousNodeName);
    if (taskNode != null && taskNode.isConditionsTask()) {
      return parseConditionTask(previousNodeName);
    }
    Collection<String> postNodeCollection =
        DagHelper.getStartVertex(previousNodeName, dag, completeTaskList);
    List<String> postSkipList = new ArrayList<>();
    // delete success node, parse the past nodes
    // if conditions node,
    //  1. parse the branch process according the conditions setting
    //  2. set skip flag on anther branch process
    for (String postNode : postNodeCollection) {
      if (completeTaskList.containsKey(postNode)) {
        TaskInstance postTaskInstance = completeTaskList.get(postNode);
        if (dag.getNode(postNode).isConditionsTask()) {
          List<String> conditionTaskNodeList = parseConditionTask(postNode);
          for (String conditions : conditionTaskNodeList) {
            postNodeList.addAll(parsePostNodeList(conditions));
          }
        } else if (ExecutionStatus.of(postTaskInstance.getState()).typeIsSuccess()) {
          postNodeList.addAll(parsePostNodeList(postNode));
        } else {
          postNodeList.add(postNode);
        }

      } else if (isTaskNodeNeedSkip(dag.getNode(postNode))) {
        postSkipList.add(postNode);
        setTaskNodeSkip(postSkipList);
        postSkipList.clear();
      } else {
        postNodeList.add(postNode);
      }
    }
    return postNodeList;
  }

  /**
   * submit post node
   *
   * @param parentNodeName parent node name
   */
  private void submitPostNode(String parentNodeName) {

    List<String> submitTaskNodeList = parsePostNodeList(parentNodeName);

    List<TaskInstance> taskInstances = new ArrayList<>();
    for (String taskNodeName : submitTaskNodeList) {

      // TODO 如果taskNode.getRunFlag()不等于FORBIDDEN，则该任务节点不执行
      // TODO 用于解决半结构化采集，若获取文件路径节点返回文件为空，则不执行datax节点
      TaskNode taskNode = dag.getNode(taskNodeName);
      // 兼容部分节点未传该参数，默认为NORMAL
      if (taskNode.getRunFlag() == null) {
        taskNode.setRunFlag("NORMAL");
      }
      TaskInstance instance = createTaskInstance(processInstance, taskNodeName, taskNode);
      if (!"FORBIDDEN".equals(taskNode.getRunFlag())) {
        taskInstances.add(instance);
      }
      // taskInstances.add(createTaskInstance(processInstance, taskNodeName,
      // dag.getNode(taskNodeName)));
    }

    // if previous node success , post node submit
    for (TaskInstance task : taskInstances) {
      if (readyToSubmitTaskList.containsKey(task.getName())) {
        continue;
      }

      if (completeTaskList.containsKey(task.getName())) {
        logger.info("task {} has already run success", task.getName());
        continue;
      }
      if (ExecutionStatus.of(task.getState()).typeIsPause()
          || ExecutionStatus.of(task.getState()).typeIsCancel()) {
        logger.info("task {} stopped, the state is {}", task.getName(), task.getState());
      } else {
        addTaskToStandByList(task);
      }
    }
  }

  /**
   * determine whether the dependencies of the task node are complete
   *
   * @return DependResult
   */
  private DependResult isTaskDepsComplete(String taskName) {

    Collection<String> startNodes = dag.getBeginNode();
    // if vertex,returns true directly
    if (startNodes.contains(taskName)) {
      return DependResult.SUCCESS;
    }

    TaskNode taskNode = dag.getNode(taskName);
    List<String> depNameList = taskNode.getDepList();
    for (String depsNode : depNameList) {
      if (!dag.containsNode(depsNode)
          || forbiddenTaskList.containsKey(depsNode)
          || skipTaskNodeList.containsKey(depsNode)) {
        continue;
      }
      // dependencies must be fully completed
      if (!completeTaskList.containsKey(depsNode)) {
        return DependResult.WAITING;
      }
      ExecutionStatus depTaskState = ExecutionStatus.of(completeTaskList.get(depsNode).getState());
      // conditions task would not return failed.
      if (depTaskState.typeIsFailure()
          && !DagHelper.haveConditionsAfterNode(depsNode, dag)
          && !dag.getNode(depsNode).isConditionsTask()) {
        return DependResult.FAILED;
      }

      if (depTaskState.typeIsPause() || depTaskState.typeIsCancel()) {
        return DependResult.WAITING;
      }
    }

    logger.info(
        "taskName: {} completeDependTaskList: {}",
        taskName,
        Arrays.toString(completeTaskList.keySet().toArray()));

    return DependResult.SUCCESS;
  }

  /**
   * query task instance by complete state
   *
   * @param state state
   * @return task instance list
   */
  private List<TaskInstance> getCompleteTaskByState(Integer state) {
    List<TaskInstance> resultList = new ArrayList<>();
    for (Map.Entry<String, TaskInstance> entry : completeTaskList.entrySet()) {
      if (entry.getValue().getState().equals(state)) {
        resultList.add(entry.getValue());
      }
    }
    return resultList;
  }

  /**
   * where there are ongoing tasks
   *
   * @param state state
   * @return ExecutionStatus
   */
  private ExecutionStatus runningState(ExecutionStatus state) {
    if (state == ExecutionStatus.READY_STOP
        || state == ExecutionStatus.READY_PAUSE
        || state == ExecutionStatus.WAITTING_THREAD) {
      // if the running task is not completed, the state remains unchanged
      return state;
    } else {
      return ExecutionStatus.RUNNING_EXEUTION;
    }
  }

  /**
   * exists failure task,contains submit failure、dependency failure,execute failure(retry after)
   *
   * @return Boolean whether has failed task
   */
  private boolean hasFailedTask() {
    if (this.taskFailedSubmit) {
      return true;
    }
    if (this.errorTaskList.size() > 0) {
      return true;
    }
    return this.dependFailedTask.size() > 0;
  }

  /**
   * process instance failure
   *
   * @return Boolean whether process instance failed
   */
  private boolean processFailed() {
    if (hasFailedTask()) {
      if (processInstance.getFailureStrategy() == FailureStrategy.END.getCode()) {
        return true;
      }
      if (processInstance.getFailureStrategy() == FailureStrategy.CONTINUE.getCode()) {
        return readyToSubmitTaskList.size() == 0 || activeTaskNode.size() == 0;
      }
    }
    return false;
  }

  /**
   * whether task for waiting thread
   *
   * @return Boolean whether has waiting thread task
   */
  private boolean hasWaitingThreadTask() {
    List<TaskInstance> waitingList =
        getCompleteTaskByState(ExecutionStatus.WAITTING_THREAD.getCode());
    return CollectionUtils.isNotEmpty(waitingList);
  }

  /**
   * prepare for pause 1，failed retry task in the preparation queue , returns to failure directly
   * 2，exists pause task，complement not completed, pending submission of tasks, return to suspension
   * 3，success
   *
   * @return ExecutionStatus
   */
  private ExecutionStatus processReadyPause() {
    if (hasRetryTaskInStandBy()) {
      return ExecutionStatus.FAILURE;
    }

    List<TaskInstance> pauseList = getCompleteTaskByState(ExecutionStatus.PAUSE.getCode());
    if (CollectionUtils.isNotEmpty(pauseList)
        || !isComplementEnd()
        || readyToSubmitTaskList.size() > 0) {
      return ExecutionStatus.PAUSE;
    } else {
      return ExecutionStatus.SUCCESS;
    }
  }

  /**
   * generate the latest process instance status by the tasks state
   *
   * @return process instance execution status
   */
  private ExecutionStatus getProcessInstanceState() {
    ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId());
    ExecutionStatus state = ExecutionStatus.of(instance.getState());

    if (activeTaskNode.size() > 0 || retryTaskExists()) {
      // active task and retry task exists
      return runningState(state);
    }
    // process failure
    if (processFailed()) {
      return ExecutionStatus.FAILURE;
    }

    // waiting thread
    if (hasWaitingThreadTask()) {
      return ExecutionStatus.WAITTING_THREAD;
    }

    // pause
    if (state == ExecutionStatus.READY_PAUSE) {
      return processReadyPause();
    }

    // stop
    if (state == ExecutionStatus.READY_STOP) {
      List<TaskInstance> stopList = getCompleteTaskByState(ExecutionStatus.STOP.getCode());
      List<TaskInstance> killList = getCompleteTaskByState(ExecutionStatus.KILL.getCode());
      if (CollectionUtils.isNotEmpty(stopList)
          || CollectionUtils.isNotEmpty(killList)
          || !isComplementEnd()) {
        return ExecutionStatus.STOP;
      } else {
        return ExecutionStatus.SUCCESS;
      }
    }

    // success
    if (state == ExecutionStatus.RUNNING_EXEUTION) {
      List<TaskInstance> killTasks = getCompleteTaskByState(ExecutionStatus.KILL.getCode());
      if (readyToSubmitTaskList.size() > 0) {
        // tasks currently pending submission, no retries, indicating that depend is waiting to
        // complete
        return ExecutionStatus.RUNNING_EXEUTION;
      } else if (CollectionUtils.isNotEmpty(killTasks)) {
        // tasks maybe killed manually
        return ExecutionStatus.FAILURE;
      } else {
        //  if the waiting queue is empty and the status is in progress, then success
        return ExecutionStatus.SUCCESS;
      }
    }

    return state;
  }

  /** whether standby task list have retry tasks */
  private boolean retryTaskExists() {

    boolean result = false;

    for (String taskName : readyToSubmitTaskList.keySet()) {
      TaskInstance task = readyToSubmitTaskList.get(taskName);
      if (ExecutionStatus.of(task.getState()).typeIsFailure()) {
        result = true;
        break;
      }
    }
    return result;
  }

  /**
   * whether complement end
   *
   * @return Boolean whether is complement end
   */
  private boolean isComplementEnd() {
    if (!processInstance.isComplementData()) {
      return true;
    }

    try {
      Map<String, String> cmdParam = JSONUtils.toMap(processInstance.getCommandParam());
      Date endTime = DateUtils.getScheduleDate(cmdParam.get(CMDPARAM_COMPLEMENT_DATA_END_DATE));
      return processInstance.getScheduleTime().equals(endTime);
    } catch (Exception e) {
      logger.error("complement end failed ", e);
      return false;
    }
  }

  /**
   * updateProcessInstance process instance state after each batch of tasks is executed, the status
   * of the process instance is updated
   */
  private void updateProcessInstanceState() {
    ExecutionStatus state = getProcessInstanceState();
    if (processInstance.getState() != state.getCode()) {
      logger.info(
          "work flow process instance [id: {}, name:{}], state change from {} to {}, cmd type: {}",
          processInstance.getId(),
          processInstance.getName(),
          processInstance.getState(),
          state,
          processInstance.getCommandType());

      ProcessInstance instance = processService.findProcessInstanceById(processInstance.getId());
      instance.setState(state.getCode());
      instance.setProcessDefinition(processInstance.getProcessDefinition());
      processService.updateProcessInstance(instance);
      processInstance = instance;
    }
  }

  /**
   * get task dependency result
   *
   * @param taskInstance task instance
   * @return DependResult
   */
  private DependResult getDependResultForTask(TaskInstance taskInstance) {
    return isTaskDepsComplete(taskInstance.getName());
  }

  /**
   * add task to standby list
   *
   * @param taskInstance task instance
   */
  private void addTaskToStandByList(TaskInstance taskInstance) {
    logger.info("add task to stand by list: {}", taskInstance.getName());
    readyToSubmitTaskList.putIfAbsent(taskInstance.getName(), taskInstance);
  }

  /**
   * remove task from stand by list
   *
   * @param taskInstance task instance
   */
  private void removeTaskFromStandbyList(TaskInstance taskInstance) {
    logger.info("remove task from stand by list: {}", taskInstance.getName());
    readyToSubmitTaskList.remove(taskInstance.getName());
  }

  /**
   * has retry task in standby
   *
   * @return Boolean whether has retry task in standby
   */
  private boolean hasRetryTaskInStandBy() {
    for (Map.Entry<String, TaskInstance> entry : readyToSubmitTaskList.entrySet()) {
      if (ExecutionStatus.of(entry.getValue().getState()).typeIsFailure()) {
        return true;
      }
    }
    return false;
  }

  /** submit and watch the tasks, until the work flow stop */
  private void runProcess() {
    logger.info("开始提交和监控任务");
    // submit start node
    submitPostNode(null);
    boolean sendTimeWarning = false;
    while (!processInstance.isProcessInstanceStop()) {

      // send warning email if process time out.
      if (!sendTimeWarning && checkProcessTimeOut(processInstance)) {
        alertManager.sendProcessTimeoutAlert(
            processInstance,
            processService.findProcessDefineById(processInstance.getProcessDefinitionId()));
        sendTimeWarning = true;
      }
      for (Map.Entry<MasterBaseTaskExecThread, Future<TwoTuple<Boolean, String>>> entry :
          activeTaskNode.entrySet()) {
        Future<TwoTuple<Boolean, String>> future = entry.getValue();
        TaskInstance task = entry.getKey().getTaskInstance();

        if (!future.isDone()) {
          continue;
        }

        // node monitor thread complete
        task = this.processService.findTaskInstanceById(task.getId());

        if (task == null) {
          this.taskFailedSubmit = true;
          activeTaskNode.remove(entry.getKey());
          continue;
        }
        ExecutionStatus state = ExecutionStatus.of(task.getState());
        // node monitor thread complete
        if (state.typeIsFinished()) {
          activeTaskNode.remove(entry.getKey());
          // 打印节点结束的日志
          printFinishTaskLog(task);
        }

        logger.info(
            "task :{}, id:{} complete, state is {} ",
            task.getName(),
            task.getId(),
            task.getState());
        // node success , post node submit
        if (state == ExecutionStatus.SUCCESS) {
          TaskNode taskNode = dag.getNode(task.getName());
          AbstractParameters params =
              TaskParametersUtils.getParameters(taskNode.getType(), taskNode.getParams());
          if (params != null && params.isNeedReturnResult()) {
            String resultParamName = params.getResultKeyName();
            try {
              String executeResult = future.get().getSecond();
              if (StringUtils.isNotEmpty(executeResult)) {
                Property resultProperty =
                    new Property(resultParamName, Direct.OUT, DataType.VARCHAR, executeResult);
                resultCache.put(resultParamName, resultProperty);
              }
            } catch (Exception e) {
              e.printStackTrace();
            }
          }
          completeTaskList.put(task.getName(), task);
          submitPostNode(task.getName());

          // TODO 任务执行结果通知到DCS采集系统
          this.processService.updateDCSTaskStatus(task);

          // 更新数据质量任务状态
          this.processService.updateDgsQualityTaskStatus(task);

          continue;
        }
        // node fails, retry first, and then execute the failure process
        if (state.typeIsFailure()) {
          if (state == ExecutionStatus.NEED_FAULT_TOLERANCE) {
            this.recoverToleranceFaultTaskList.add(task);
          }
          if (task.taskCanRetry()) {
            addTaskToStandByList(task);
          } else {
            completeTaskList.put(task.getName(), task);
            if (task.isConditionsTask() || DagHelper.haveConditionsAfterNode(task.getName(), dag)) {
              submitPostNode(task.getName());
            } else {
              errorTaskList.put(task.getName(), task);
              if (processInstance.getFailureStrategy() == FailureStrategy.END.getCode()) {
                killTheOtherTasks();
              }
            }
          }

          // 更新数据质量任务状态
          this.processService.updateDgsQualityTaskStatus(task);

          continue;
        }
        // other status stop/pause
        completeTaskList.put(task.getName(), task);
      }
      // send alert
      if (CollectionUtils.isNotEmpty(this.recoverToleranceFaultTaskList)) {
        alertManager.sendAlertWorkerToleranceFault(processInstance, recoverToleranceFaultTaskList);
        this.recoverToleranceFaultTaskList.clear();
      }
      // updateProcessInstance completed task status
      // failure priority is higher than pause
      // if a task fails, other suspended tasks need to be reset kill
      if (errorTaskList.size() > 0) {
        for (Map.Entry<String, TaskInstance> entry : completeTaskList.entrySet()) {
          TaskInstance completeTask = entry.getValue();
          if (completeTask.getState() == ExecutionStatus.PAUSE.getCode()) {
            completeTask.setState(ExecutionStatus.KILL.getCode());
            completeTaskList.put(entry.getKey(), completeTask);
            processService.updateTaskInstance(completeTask);
          }
        }
      }
      if (canSubmitTaskToQueue()) {
        submitStandByTask();
      }
      try {
        Thread.sleep(Constants.SLEEP_TIME_MILLIS);
      } catch (InterruptedException e) {
        logger.error(e.getMessage(), e);
      }
      updateProcessInstanceState();
    }

    logger.info("process:{} end, state :{}", processInstance.getId(), processInstance.getState());
  }

  /**
   * 增加统一的task日志输出
   *
   * @param task task
   */
  private void printFinishTaskLog(TaskInstance task) {
    // custom logger
    Logger taskLogger =
        LoggerFactory.getLogger(
            LoggerUtils.buildTaskId(
                LoggerUtils.TASK_LOGGER_INFO_PREFIX,
                task.getProcessDefinitionId(),
                task.getProcessInstanceId(),
                task.getId()));
    String threadLoggerInfoName =
        String.format(LoggerUtils.TASK_LOGGER_THREAD_NAME + "-%s", task.getId());
    String info =
        "\r\n\n\t任务节点名称："
            + task.getName()
            + "\r\n\t任务节点类型："
            + task.getTaskType()
            + "\r\n\t任务执行状态："
            + ExecutionStatus.of(task.getState()).getDescp()
            + "\r\n\t任务开始时间："
            + DateUtils.format(task.getStartTime())
            + "\r\n\t任务结束时间："
            + DateUtils.format(task.getEndTime());
    ExecutorService parseProcessOutputExecutorService =
        ThreadUtils.newDaemonSingleThreadExecutor(threadLoggerInfoName);
    parseProcessOutputExecutorService.submit(() -> taskLogger.info(info));
    parseProcessOutputExecutorService.shutdown();
  }

  /**
   * whether check process time out
   *
   * @param processInstance task instance
   * @return true if time out of process instance > running time of process instance
   */
  private boolean checkProcessTimeOut(ProcessInstance processInstance) {
    if (processInstance.getTimeout() == 0) {
      return false;
    }

    Date now = new Date();
    long runningTime = DateUtils.diffMin(now, processInstance.getStartTime());

    return runningTime > processInstance.getTimeout();
  }

  /**
   * whether can submit task to queue
   *
   * @return boolean
   */
  private boolean canSubmitTaskToQueue() {
    return OSUtils.checkResource(
        masterConfig.getMasterMaxCpuloadAvg(), masterConfig.getMasterReservedMemory());
  }

  /** close the on going tasks */
  private void killTheOtherTasks() {
    logger.info(
        "kill called on process instance id: {}, num: {}",
        processInstance.getId(),
        activeTaskNode.size());
    for (Map.Entry<MasterBaseTaskExecThread, Future<TwoTuple<Boolean, String>>> entry :
        activeTaskNode.entrySet()) {
      MasterBaseTaskExecThread taskExecThread = entry.getKey();
      Future<TwoTuple<Boolean, String>> future = entry.getValue();

      TaskInstance taskInstance = taskExecThread.getTaskInstance();
      taskInstance = processService.findTaskInstanceById(taskInstance.getId());
      if (taskInstance != null && ExecutionStatus.of(taskInstance.getState()).typeIsFinished()) {
        continue;
      }

      if (!future.isDone()) {
        // record kill info
        logger.info(
            "kill process instance, id: {}, task: {}",
            processInstance.getId(),
            taskExecThread.getTaskInstance().getId());

        //  kill node
        taskExecThread.kill();
      }
    }
  }

  /**
   * whether the retry interval is timed out
   *
   * @param taskInstance task instance
   * @return Boolean
   */
  private boolean retryTaskIntervalOverTime(TaskInstance taskInstance) {
    if (taskInstance.getState() != ExecutionStatus.FAILURE.getCode()) {
      return true;
    }
    if (StringUtils.isEmpty(taskInstance.getId())
        || taskInstance.getMaxRetryTimes() == 0
        || taskInstance.getRetryInterval() == 0) {
      return true;
    }
    Date now = new Date();
    long failedTimeInterval = DateUtils.differSec(now, taskInstance.getEndTime());
    // task retry does not over time, return false
    return taskInstance.getRetryInterval() * SEC_2_MINUTES_TIME_UNIT < failedTimeInterval;
  }

  /** handling the list of tasks to be submitted */
  private void submitStandByTask() {
    for (Map.Entry<String, TaskInstance> entry : readyToSubmitTaskList.entrySet()) {
      TaskInstance task = entry.getValue();
      DependResult dependResult = getDependResultForTask(task);
      if (DependResult.SUCCESS == dependResult) {
        if (retryTaskIntervalOverTime(task)) {
          submitTaskExec(task);
          removeTaskFromStandbyList(task);
        }
      } else if (DependResult.FAILED == dependResult) {
        // if the dependency fails, the current node is not submitted and the state changes to
        // failure.
        dependFailedTask.put(entry.getKey(), task);
        removeTaskFromStandbyList(task);
        logger.info("task {},id:{} depend result : {}", task.getName(), task.getId(), dependResult);
      }
    }
  }

  /**
   * get recovery task instance
   *
   * @param taskId task id
   * @return recovery task instance
   */
  private TaskInstance getRecoveryTaskInstance(String taskId) {
    if (!StringUtils.isNotEmpty(taskId)) {
      return null;
    }
    try {
      TaskInstance task = processService.findTaskInstanceById(taskId);
      if (task == null) {
        logger.error("start node id cannot be found: {}", taskId);
      } else {
        return task;
      }
    } catch (Exception e) {
      logger.error("get recovery task instance failed ", e);
    }
    return null;
  }

  /**
   * get start task instance list
   *
   * @param cmdParam command param
   * @return task instance list
   */
  private List<TaskInstance> getStartTaskInstanceList(String cmdParam) {

    List<TaskInstance> instanceList = new ArrayList<>();
    Map<String, String> paramMap = JSONUtils.toMap(cmdParam);

    if (paramMap != null && paramMap.containsKey(CMDPARAM_RECOVERY_START_NODE_STRING)) {
      String[] idList = paramMap.get(CMDPARAM_RECOVERY_START_NODE_STRING).split(Constants.COMMA);
      for (String nodeId : idList) {
        TaskInstance task = getRecoveryTaskInstance(nodeId);
        if (task != null) {
          instanceList.add(task);
        }
      }
    }
    return instanceList;
  }

  /**
   * parse "StartNodeNameList" from cmd param
   *
   * @param cmdParam command param
   * @return start node name list
   */
  private List<String> parseStartNodeName(String cmdParam) {
    List<String> startNodeNameList = new ArrayList<>();
    Map<String, String> paramMap = JSONUtils.toMap(cmdParam);
    if (paramMap == null) {
      return startNodeNameList;
    }
    if (paramMap.containsKey(CMDPARAM_START_NODE_NAMES)) {
      startNodeNameList =
          Arrays.asList(paramMap.get(CMDPARAM_START_NODE_NAMES).split(Constants.COMMA));
    }
    return startNodeNameList;
  }

  /**
   * generate start node name list from parsing command param; if "StartNodeIdList" exists in
   * command param, return StartNodeIdList
   *
   * @return recovery node name list
   */
  private List<String> getRecoveryNodeNameList() {
    List<String> recoveryNodeNameList = new ArrayList<>();
    if (CollectionUtils.isNotEmpty(recoverNodeIdList)) {
      for (TaskInstance task : recoverNodeIdList) {
        recoveryNodeNameList.add(task.getName());
      }
    }
    return recoveryNodeNameList;
  }

  /**
   * generate flow dag
   *
   * @param processDefinitionJson process definition json
   * @param startNodeNameList start node name list
   * @param recoveryNodeNameList recovery node name list
   * @param depNodeType depend node type
   * @return ProcessDag process dag
   * @throws Exception exception
   */
  public ProcessDag generateFlowDag(
      String processDefinitionJson,
      List<String> startNodeNameList,
      List<String> recoveryNodeNameList,
      TaskDependType depNodeType)
      throws Exception {
    return DagHelper.generateFlowDag(
        processDefinitionJson, startNodeNameList, recoveryNodeNameList, depNodeType);
  }

  private Map<String, Property> parseListToMap(List<Property> list) {
    if (CollectionUtils.isNotEmpty(list)) {
      Map<String, Property> map = new LinkedHashMap<>();
      for (Property property : list) {
        map.put(property.getProp(), property);
      }
      return map;
    }
    return null;
  }
}
