package com.niodata.dp.monitor.core.task;

import com.niodata.dp.log.LogUtil;
import com.niodata.dp.monitor.common.configure.MonitorConfigurationKeys;
import com.niodata.dp.monitor.common.util.JsonUtil;
import com.niodata.dp.monitor.components.task.dao.TaskExecutionMapper;
import com.niodata.dp.monitor.core.task.az.AzFlowExecutionHistory;
import com.niodata.dp.monitor.core.task.az.AzkabanApi;
import com.niodata.dp.monitor.core.task.az.Executor;
import com.niodata.dp.monitor.core.task.az.ExecutorInfo;
import com.niodata.dp.monitor.core.task.az.FlowExecutionDetail;
import com.niodata.dp.monitor.core.task.dao.JobLogMappingMonitorMapper;
import com.niodata.dp.monitor.core.task.dao.TaskMonitorMapper;
import com.niodata.dp.monitor.core.task.entity.ExpectObj;
import com.niodata.dp.monitor.core.task.entity.FlowExecutionHistory;
import com.niodata.dp.monitor.core.task.entity.JobLogMapping;
import com.niodata.dp.monitor.core.task.entity.LogType;
import com.niodata.dp.monitor.core.task.entity.TaskInfo;
import com.niodata.dp.monitor.sharelib.entity.az.FlowExecutor;
import com.niodata.dp.monitor.sharelib.entity.task.TaskExecution;
import com.niodata.dp.util.TimeUtil;
import jakarta.annotation.PostConstruct;
import java.io.File;
import java.io.IOException;
import java.sql.Timestamp;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.log4j.Logger;
import org.quartz.TriggerUtils;
import org.quartz.impl.triggers.CronTriggerImpl;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.ObjectUtils;

@Service("taskManagerImpl")
@Transactional
public class TaskManagerImpl implements TaskExecutionManager {

  private static final Logger logger = Logger.getLogger(TaskManagerImpl.class);
  private final int taskRefreshSeconds = 60;
  private AzkabanApi azkabanApi;
  @Value("${" + MonitorConfigurationKeys.AZ_SERVER_URL + "}")
  private String azServerUrl;
  @Value("${" + MonitorConfigurationKeys.AZ_USER + "}")
  private String azUser;
  @Value("${" + MonitorConfigurationKeys.AZ_PASSWORD + "}")
  private String azUserPassword;
  @Value("${" + MonitorConfigurationKeys.AZ_EXEC_HOME + "}")
  private String azExecHome;
  @Value("${" + MonitorConfigurationKeys.API_SERVER + "}")
  private String apiServer;
  @Value("${" + MonitorConfigurationKeys.API_SERVER_PORT + "}")
  private String apiServerPort;
  @Value("${" + MonitorConfigurationKeys.GAVIAL_URL + "}")
  private String dpUrl;
  @Value("${" + MonitorConfigurationKeys.BASE_AZ_LOG_DIR + "}")
  private String baseAzLogDir;
  @Autowired
  private TaskMonitorMapper taskMapper;
  @Autowired
  private TaskExecutionMapper executionMapper;
  @Autowired
  private JobLogMappingMonitorMapper jobLogMappingMapper;
  private Map<Integer, TaskExecution> runningExecutions;
  private Map<String, Object> taskCache = new HashMap<>();
  private ScheduledExecutorService taskInfoRefreshThread;

  private Map<String, Map<String, String>> timeoutAlertMap;

  private Map<String, List<ExpectObj>> allExpectMap;

  private Map<Integer, Executor> executorMap;

  private volatile long executorFreshTime = 0;

  @Autowired
  @Qualifier("TaskMonitorAlert")
  private TaskMonitorAlert taskMonitorAlert;


  // 1970-01-02 00:00:00
  // it is a database bug
  // mysql> ALTER TABLE TASK_EXECUTION MODIFY
  // COLUMN END_TIME timestamp not null default '1970-01-01 00:00:00';
  // ERROR 1067 (42000): Invalid default value for 'END_TIME'
  private long defaultMinTime = 24 * 3600;

  public TaskManagerImpl() {
  }

  public TaskManagerImpl(String azserverUrl, String azuser, String azuserPassword) {
    this.azServerUrl = azserverUrl;
    this.azUser = azuser;
    this.azUserPassword = azuserPassword;
    init();
  }

  /**
   * init,load running tasks into memory,start a check thread to check executions left in db.
   */
  @PostConstruct
  public void init() {
    taskInfoRefreshThread = Executors.newSingleThreadScheduledExecutor();
    taskInfoRefreshThread.scheduleWithFixedDelay(
          new Runnable() {
            @Override
            public void run() {
              synchronized (taskCache) {
                try {
                  logger.info("refresh task cache,old size:" + taskCache.size());
                  taskCache.clear();
                  taskMapper
                        .getAllTasks()
                        .stream()
                        .forEach(
                              new Consumer<TaskInfo>() {
                                @Override
                                public void accept(TaskInfo taskInfo) {
                                  taskCache.put(taskInfo.getName(), taskInfo);
                                }
                              });
                  logger.info("refresh task cache,new size:" + taskCache.size());
                } catch (Exception e) {
                  logger.error("refresh task cache error", e);
                }
              }
            }
          },
          20,
          taskRefreshSeconds,
          TimeUnit.SECONDS);
    this.azkabanApi = new AzkabanApi(this.azServerUrl, this.azUser, this.azUserPassword);
    taskMapper
          .getAllTasks()
          .stream()
          .forEach(
                new Consumer<TaskInfo>() {
                  @Override
                  public void accept(TaskInfo taskInfo) {
                    taskCache.put(taskInfo.getName(), taskInfo);
                  }
                });

    this.runningExecutions = new ConcurrentHashMap<>();
    this.timeoutAlertMap = new ConcurrentHashMap<>();
    this.allExpectMap = new ConcurrentHashMap<>();

    executionMapper
          .getRunningTaskExecution()
          .stream()
          .forEach(
                new Consumer<TaskExecution>() {
                  @Override
                  public void accept(TaskExecution taskExecution) {
                    runningExecutions.put(taskExecution.getExecId(), taskExecution);
                  }
                });

    checkExecutionStatusInDb();
    addExpectList();

  }


  /**
   * check executions status in db.
   */
  public void checkExecutionStatusInDb() {
    final List<TaskExecution> executions = this.getAllRunningExecutions();
    new Thread() {
      public void run() {
        for (TaskExecution execution : executions) {
          FlowExecutionDetail detail = azkabanApi.getFlowExecution(execution.getExecId());
          Date endTime = execution.getEndTime();
          if ("FAILED SUCCEEDED".indexOf(detail.getStatus()) >= 0) {
            endTime = detail.getUpdateTime();
          }
          //database bug to see {defaultMinTime}
          if (detail.getStartTime().getTime() < defaultMinTime) {
            detail.setStartTime(new Date(defaultMinTime));
          }
          if (endTime.getTime() < defaultMinTime) {
            endTime = new Date(defaultMinTime);
          }
          updateExecutionStatus(
                execution.getExecId(), detail.getStatus(), detail.getStartTime(), endTime);
        }
      }
    }.start();
  }

  /**
   * 如果execution的执行时间落在期望序列的区间里，则删除区间中小的节点 .
   *
   * @param execution execution
   */
  public void removeTimeList(TaskExecution execution) {
    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    logger.info("fetch:" + execution.getTaskName() + "," + sdf.format(execution.getStartTime()));
    //期望执行时间序列
    List<ExpectObj> list = this.allExpectMap.get(execution.getTaskName());
    if (null == list || list.size() < 1) {
      return;
    }
    List<ExpectObj> arrList = new ArrayList(list);
    Integer index = null;

    if (arrList.size() == 1) {
      if (execution.getStartTime().after(arrList.get(0).getExpectDate())
            || execution.getStartTime().equals(arrList.get(0).getExpectDate())) {
        index = 0;
      }
    } else {
      for (int i = 0; i < arrList.size() - 1; i++) {
        if ((execution.getStartTime().before(arrList.get(i + 1).getExpectDate())
              && execution.getStartTime().after(arrList.get(i).getExpectDate()))
              || execution.getStartTime().equals(arrList.get(i).getExpectDate())) {
          index = i;
        }
      }
    }

    if (null != index) {
      ExpectObj removeObj = arrList.remove(index.intValue());
      try {
        logger.info("fetch remove:" + execution.getTaskName() + JsonUtil.toJson(removeObj));
      } catch (IOException e) {
        LogUtil.logStackTrace(logger, e);
      }
    }

    this.allExpectMap.put(execution.getTaskName(), arrList);

  }

  @Override
  public void checkExecutionStatusInMemory() {
    final List<TaskExecution> executions = this.getAllRunningExecutions();
    for (TaskExecution execution : executions) {
      FlowExecutionDetail detail = azkabanApi.getFlowExecution(execution.getExecId());
      execution.setStartTime(detail.getStartTime());
      Date endTime = execution.getEndTime();
      if ("FAILED SUCCEEDED".indexOf(detail.getStatus()) >= 0) {
        endTime = detail.getUpdateTime();
      }
      updateExecutionStatus(
            execution.getExecId(), detail.getStatus(), detail.getStartTime(), endTime);
    }
  }

  @Override
  public synchronized void syncExecutionList() {
    List<TaskExecution> recentList = executionMapper.getRecentlyExecutions();
    int maxExecId =
          recentList == null || recentList.size() == 0 ? 0 : recentList.get(0).getExecId();
    int offset = 0;
    int page = 100;
    List<FlowExecutionHistory> list = this.getExecutionHistory(offset, page);
    while (true) {
      int minId = 0;
      List<TaskExecution> tobeInsert = new ArrayList<>();
      for (FlowExecutionHistory hist : list) {
        minId = hist.getExecutionId();
        if (minId <= maxExecId) {
          break;
        } else {
          TaskExecution execution = new TaskExecution();
          execution.setEndTime(hist.getEndTime());
          logger.info("job execution end time:" + execution.getEndTime().getTime());
          if (execution.getEndTime().getTime() < 10) {
            // change default endtime(1970-01-01 00:00:00 to 1970-01-02 00:00:00)
            execution.setEndTime(TimeUtil.addDays(new Date(0), 2));
          }
          execution.setStartTime(hist.getStartTime());
          if (execution.getStartTime().getTime() < 10) {
            execution.setStartTime(new Date());
          }
          execution.setStatus(hist.getStatus());
          execution.setExecId(hist.getExecutionId());
          execution.setFlowName(hist.getFlow());
          execution.setExecutorId(hist.getExecutorId());
          TaskInfo taskInfo = this.getTaskByName(hist.getProjectName());
          execution.setOwnerId(taskInfo.getOwnerId());
          execution.setOwnerType(taskInfo.getOwnerType());
          execution.setSuccess("SUCCEEDED".equals(hist.getStatus()));
          execution.setCreateTime(hist.getSubmitTime());
          execution.setTaskId(taskInfo.getTaskId());
          execution.setTaskName(taskInfo.getName());
          tobeInsert.add(execution);
          logger.info("insert job exec:" + execution.getExecId() + ", start=" + execution.getStartTime() + ",end=" + execution.getEndTime());
          //check
          try {
            this.taskMonitorAlert.sendAlertIfNeeded(execution);
          } catch (Exception e) {
            LogUtil.logStackTrace(logger, e);
          }

          insertJobLogMapping(execution, true, null);
          removeTimeList(execution);
          if ("RUNNING PREPARING".indexOf(execution.getStatus()) >= 0) {
            if (!this.runningExecutions.containsKey(execution.getExecId())) {
              this.runningExecutions.put(execution.getExecId(), execution);
            }
          }
        }
      }
      if (tobeInsert.size() > 0) {
        this.executionMapper.insert(tobeInsert);
      }
      if (minId <= maxExecId) {
        break;
      }
      offset = offset + list.size();
    }
  }


  private JobLogMapping buildJobLogMapping(TaskExecution execution) {
    Executor executor = this.getExecutor(execution.getExecutorId());
    JobLogMapping jobLog = new JobLogMapping();
    jobLog.setLogType(LogType.TASKLOG);
    jobLog.setHostAddr(executor.getHost());
    jobLog.setHostName(executor.getHost());
    jobLog.setJobName(execution.getFlowName());
    jobLog.setTaskName(execution.getTaskName());
    jobLog.setExecId(String.valueOf(execution.getExecId()));
    ///home/azkaban/data/executions/2927/_job.2927.SparkTest.log
    String logDir = baseAzLogDir + File.separator + execution.getExecId();
    jobLog.setLogDir(logDir);
    jobLog.setCreateTime(new Timestamp(new Date().getTime()));
    return jobLog;
  }

  @Override
  public Map<String, String> getTimeOutMap(String taskName) {
    return this.timeoutAlertMap.get(taskName);
  }

  @Override
  public void putToTimeOutMap(String taskName, Map<String, String> execMap) {
    this.timeoutAlertMap.put(taskName, execMap);
  }

  @Override
  public void removeTimeOutMap(String taskName) {
    this.timeoutAlertMap.remove(taskName);
  }

  @Override
  public void checkNotFound() {
    Date now = new Date();
    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
    for (String taskName : this.allExpectMap.keySet()) {
      TaskInfo taskInfo = this.getTaskByName(taskName);
      if (null == taskInfo || !taskInfo.isScheduled()) {
        continue;
      }
      //期望执行序列
      List<ExpectObj> expectList = this.allExpectMap.get(taskName);
      List<ExpectObj> dateList = new ArrayList<>(expectList);
      List<Integer> indexs = new ArrayList<>();
      int i = 0;
      for (ExpectObj date : dateList) {
        if (date.getExpectDate().before(DateUtils.addMinutes(now, -2))) {
          //not found 期望列表中剩余的小于当前时间的节点，说明已经not found了
          String msg = "[点击查看](" + dpUrl + "#/flowmanage/executions?taskId="
                + taskInfo.getTaskId() + ")";
          Map<Object, Object> map = new HashMap<>();
          map.put("taskName", taskInfo.getName());
          map.put("status", "NOT FOUND");
          map.put("msg", msg);
          map.put("expectTime", sdf.format(date.getExpectDate()));
          try {
            this.taskMonitorAlert.sendMsg("NOT_FOUND", taskInfo,
                  "Dp任务状态监控", map);
          } catch (Exception e) {
            LogUtil.logStackTrace(logger, e);
          }
          indexs.add(i);
        }
        i++;
      }

      if (null != indexs && indexs.size() > 0) {
        for (int index : indexs) {
          ExpectObj remDate = dateList.remove(index);
          try {
            logger.error("remove...." + taskName + "-->" + JsonUtil.toJson(remDate));
          } catch (IOException e) {
            LogUtil.logStackTrace(logger, e);
          }
        }
      }
      this.allExpectMap.put(taskName, dateList);
    }
  }

  /**
   * add expect time sequence .
   */
  public void addExpectList() {
    logger.info("addExpectList start ...");
    List<TaskInfo> taskInfos = this.getAllTasks();

    for (TaskInfo taskInfo : taskInfos) {
      if (!taskInfo.isScheduled()) {
        if (!ObjectUtils.isEmpty(this.allExpectMap.get(taskInfo.getName()))) {
          this.allExpectMap.remove(taskInfo.getName());
        }
        continue;
      }

      Date now = new Date();
      Date start = now;
      Date stop = DateUtils.addMinutes(start, 20);
      boolean splicing = false;
      CronTriggerImpl cronTriggerImpl = new CronTriggerImpl();
      List<Date> dates = new ArrayList<>();
      //要拼接的时间序列
      List<Date> newDateList = new ArrayList<>();
      List<ExpectObj> lastExpect = this.allExpectMap.get(taskInfo.getName());
      if (null != lastExpect) {
        int size = lastExpect.size();
        if (size > 0) {
          //如果cron表达式有变化，则重新生成时间序列，并覆盖掉以前的
          if (!taskInfo.getScheduleCron().equals(lastExpect.get(size - 1))) {
            try {
              cronTriggerImpl.setCronExpression(taskInfo.getScheduleCron());
            } catch (ParseException e) {
              e.printStackTrace();
            }
            dates = TriggerUtils
                  .computeFireTimesBetween(cronTriggerImpl, null,
                        start, stop);
          } else {
            //cron表达式没有变化，则接着上次序列的结束时间生成新的序列，拼在list后面
            start = lastExpect.get(size - 1).getLastCronEndTime();
            stop = DateUtils.addMinutes(start, 10);
            newDateList = TriggerUtils
                  .computeFireTimesBetween(cronTriggerImpl, null,
                        start, stop);
            splicing = true;
          }
        }

      } else {
        //以前没有，则生成新的时间序列
        try {
          cronTriggerImpl.setCronExpression(taskInfo.getScheduleCron());
        } catch (ParseException e) {
          e.printStackTrace();
        }
        dates = TriggerUtils
              .computeFireTimesBetween(cronTriggerImpl, null,
                    start, stop);
      }

      List<ExpectObj> expectObjList = new ArrayList<>();
      //需要拼接
      if (splicing) {
        List<ExpectObj> newExpectObjList = new ArrayList<>();
        for (Date date : newDateList) {
          ExpectObj expectObj = new ExpectObj();
          expectObj.setCron(taskInfo.getScheduleCron());
          expectObj.setExpectDate(date);
          expectObj.setLastCronEndTime(stop);
          newExpectObjList.add(expectObj);
        }
        lastExpect.addAll(newExpectObjList);
        expectObjList = lastExpect;
      } else {
        for (Date date : dates) {
          ExpectObj expectObj = new ExpectObj();
          expectObj.setCron(taskInfo.getScheduleCron());
          expectObj.setExpectDate(date);
          expectObj.setLastCronEndTime(stop);
          expectObjList.add(expectObj);
        }
      }
      this.allExpectMap.put(taskInfo.getName(), expectObjList);
    }
  }


  @Override
  public TaskInfo getTaskByName(String taskName) {
    TaskInfo taskInfo = (TaskInfo) this.taskCache.get(taskName);
    if (taskInfo == null) {
      synchronized (taskCache) {
        taskInfo = taskMapper.getTaskByName(taskName);
        if (null != taskInfo) {
          taskCache.put(taskInfo.getName(), taskInfo);
        }
      }
    }
    return taskInfo;
  }

  @Override
  public List<TaskInfo> getAllTasks() {
    return taskMapper.getAllTasks();
  }

  /**
   * get execution history.
   *
   * @param offset fetch offset
   * @param count max count
   * @return history list
   */
  @Override
  public List<FlowExecutionHistory> getExecutionHistory(int offset, int count) {
    List<AzFlowExecutionHistory> list = azkabanApi.getFlowExecutionHistory(offset, count);
    List<FlowExecutionHistory> executions = new ArrayList<>(list.size());

    for (AzFlowExecutionHistory flow : list) {
      FlowExecutionHistory exec = new FlowExecutionHistory();
      TaskInfo taskInfo = this.taskMapper.getTaskByName(flow.getProjectName());
      if (taskInfo != null) {
        exec.setEndTime(flow.getEndTime());
        exec.setStartTime(flow.getStartTime());
        exec.setExecutionId(flow.getExecutionId());
        exec.setFlow(flow.getFlow());
        exec.setProjectId(flow.getProjectId());
        exec.setProjectName(flow.getProjectName());
        exec.setTaskId(taskInfo.getTaskId());
        exec.setTaskInfo(taskInfo);
        exec.setSubmitTime(flow.getSubmitTime());
        exec.setLastModifiedTimestamp(flow.getLastModifiedTimestamp());
        exec.setVersion(flow.getVersion());
        exec.setScheduleId(flow.getScheduleId());
        exec.setStatus(flow.getStatus());
        exec.setExecutionPath(flow.getExecutionPath());
        exec.setExecutorId(flow.getExecutorId());
        executions.add(exec);
      }
    }
    return executions;
  }

  @Override
  public FlowExecutionDetail getTaskExecutionDetail(int execid) {
    return this.azkabanApi.getFlowExecution(execid);
  }

  private Executor getExecutor(int id) {
    if (this.executorMap == null || System.currentTimeMillis() - this.executorFreshTime > 300000) {
      synchronized (this) {
        if (this.executorMap == null || System.currentTimeMillis()
              - this.executorFreshTime > 300000) {
          List<Executor> executors = this.azkabanApi.getAllActiveExecutors();
          if (null != executors && executors.size() > 0) {
            Map<Integer, Executor> map = new HashMap<>();
            executors.stream().forEach(
                  new Consumer<Executor>() {
                    @Override
                    public void accept(Executor executor) {
                      map.put(executor.getId(), executor);
                    }
                  }
            );
            this.executorMap = map;
            this.executorFreshTime = System.currentTimeMillis();
          }
        }
      }
    }
    Executor executor = executorMap.get(id);
    if (executor == null) {
      logger.error("no executor found for id " + id);
      executor = new Executor();
      executor.setActive(false);
      executor.setExecutorInfo(null);
      executor.setHost("NA");
      executor.setId(id);
      executor.setPort(0);
    }
    return executor;
  }


  @Override
  public List<FlowExecutionHistory> getRecentlyFinished() {
    List<FlowExecutionHistory> list = azkabanApi.getRecentlyFinished();
    for (FlowExecutionHistory hist : list) {
      TaskInfo taskInfo = this.getTaskByName(hist.getProjectName());
      if (null != taskInfo) {
        hist.setTaskId(taskInfo.getTaskId());
        hist.setTaskInfo(taskInfo);
      }
    }
    return list;
  }

  @Override
  public List<TaskExecution> getAllRunningExecutions() {
    return this.runningExecutions.values().stream().collect(Collectors.toList());
  }

  @Override
  public List<FlowExecutor> getFlowExecutors() throws IOException {
    List<Executor> executors = this.azkabanApi.getAllActiveExecutors();
    return executors
          .stream()
          .map(
                new Function<Executor, FlowExecutor>() {
                  @Override
                  public FlowExecutor apply(Executor executor) {
                    FlowExecutor flowExecutor = new FlowExecutor();
                    BeanUtils.copyProperties(executor, flowExecutor);
                    ExecutorInfo executorInfo = executor.getExecutorInfo();
                    if (executorInfo == null) {
                      try {
                        executorInfo = azkabanApi.getExecutorInfo(executor);
                      } catch (IOException e) {
                        e.printStackTrace();
                      }
                    }
                    if (executorInfo != null) {
                      BeanUtils.copyProperties(executorInfo, flowExecutor);
                    }
                    return flowExecutor;
                  }
                })
          .collect(Collectors.toList());
  }


  @Override
  public List<TaskExecution> getPreparingExecutions() {
    Set<Integer> keys = runningExecutions.keySet();
    List<TaskExecution> list = new ArrayList<>(10);
    for (Integer execId : keys) {
      TaskExecution execution = runningExecutions.get(execId);
      if (execution != null && execution.getStatus().equals("PREPARING")) {
        list.add(execution);
      }
    }
    return list;
  }

  @Override
  public void updateExecutionStatus(int execId,String status, Date startTime, Date endTime) {
    TaskExecution execution = this.runningExecutions.get(execId);
    if (execution == null) {
      return;
    }
    boolean updateExecutorId = execution.getExecutorId() == 0;
    if (updateExecutorId) {
      FlowExecutionDetail detail = azkabanApi.getFlowExecution(execId);
      execution.setExecutorId(detail.getExecutorId());
    }
    // insert joblog mapping which last status is PREPARING
    execution = insertJobLogMapping(execution, false, status);

    if (!status.equals("PREPARING") && !status.equals("RUNNING")) {
      runningExecutions.remove(execId);
      execution.setStatus(status);
      // only if finished ,update end time
      execution.setEndTime(endTime);
    }
    //database bug to see {defaultMinTime}
    if (startTime.getTime() < defaultMinTime) {
      startTime = new Date(defaultMinTime);
    }
    if (execution.getEndTime().getTime() < defaultMinTime) {
      execution.setEndTime(new Date(defaultMinTime));
    }
    if (updateExecutorId) {
      this.executionMapper.updateWithExecutorId(execId, execution.getExecutorId(),
        status, startTime, execution.getEndTime());
      logger.info("update execution executorId:" + execId + " , " + execution.getExecutorId()) ;
    } else {
      this.executionMapper.update(execId, status, startTime, execution.getEndTime());
    }
    execution.setStartTime(startTime);
    execution.setStatus(status);
    removeTimeList(execution);
    try {
      this.taskMonitorAlert.sendAlertIfNeeded(execution);
    } catch (Exception e) {
      LogUtil.logStackTrace(logger, e);
    }

  }

  /**
   * 插入joblogmapping；第一次fetch时，executor host非NA时直接插入，不是第一次fetch时，如果记录更新之前的 状态是PREPARING，则调azkaban
   * running flow api，查询executor信息 .
   *
   * @param execution taskExecution in DB
   * @param firstFetched is first fetched
   * @param nowStatus nowStatus
   * @return TaskExecution
   */
  private TaskExecution insertJobLogMapping(TaskExecution execution, boolean firstFetched,
        String nowStatus) {
    logger.info("prepare insert job log mapping:" + execution.getExecId()
          + "," + execution.getExecutorId());
    boolean insert = false;

    if (firstFetched) {
      insert = true;
    } else {
      List<FlowExecutionHistory> flows = new ArrayList<>();

      if (!nowStatus.equals("RUNNING") && !nowStatus.equals("PREPARING")) {
        flows = azkabanApi.getRecentlyFinished();
      } else {
        // last status is PREPARING or not
        if (!execution.getStatus().equals("PREPARING")) {
          return execution;
        }
        flows = azkabanApi.getRunningFlows();
      }
      for (FlowExecutionHistory flow : flows) {
        if (flow.getExecutionId() == execution.getExecId()) {
          insert = true;
          execution.setExecutorId(flow.getExecutorId());
        }
      }
    }

    JobLogMapping jobLog = jobLogMappingMapper
          .getJobLogByExecId(String.valueOf(execution.getExecId()), execution.getFlowName());
    JobLogMapping jobLogMapping = this.buildJobLogMapping(execution);
    if (jobLog == null) {
      if (!jobLogMapping.getHostAddr().equals("NA")) {
        jobLogMappingMapper.insert(jobLogMapping);
      }
    }
    return execution;
  }

}
