package com.niodata.dp.task.service.impl;

import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.niodata.dp.core.ConfigurationKeys;
import com.niodata.dp.core.DpConfiguration;
import com.niodata.dp.core.OwnerType;
import com.niodata.dp.core.exception.InternalException;
import com.niodata.dp.core.exception.InvalidStatusException;
import com.niodata.dp.core.exception.ResourceNotFoundException;
import com.niodata.dp.core.resource.AbstractResourceDetailResolver;
import com.niodata.dp.core.resource.ResourceDetail;
import com.niodata.dp.core.resource.ResourceType;
import com.niodata.dp.core.usermgr.model.GroupInfo;
import com.niodata.dp.core.usermgr.model.UserGroup;
import com.niodata.dp.core.usermgr.model.UserInfo;
import com.niodata.dp.core.usermgr.op.GroupInfoService;
import com.niodata.dp.core.usermgr.op.UserService;
import com.niodata.dp.log.LogUtil;
import com.niodata.dp.task.JobTypeParamSchemas;
import com.niodata.dp.task.JobTypeParamSchemasV2;
import com.niodata.dp.task.az.AzFlowExecutionHistory;
import com.niodata.dp.task.az.AzkabanApi;
import com.niodata.dp.task.az.Executor;
import com.niodata.dp.task.az.ExecutorInfo;
import com.niodata.dp.task.az.FlowDetailNode;
import com.niodata.dp.task.az.FlowExecuteOptions;
import com.niodata.dp.task.az.FlowExecuteResponse;
import com.niodata.dp.task.az.FlowExecution;
import com.niodata.dp.task.az.FlowExecutionDetail;
import com.niodata.dp.task.az.FlowExecutions;
import com.niodata.dp.task.az.FlowSchedule;
import com.niodata.dp.task.az.ProjectFlows;
import com.niodata.dp.task.dao.JobMapper;
import com.niodata.dp.task.dao.PluginPackageMapperV2;
import com.niodata.dp.task.dao.TaskMapper;
import com.niodata.dp.task.entity.FlowExecutionHistory;
import com.niodata.dp.task.entity.FlowExecutor;
import com.niodata.dp.task.entity.JobDepInfo;
import com.niodata.dp.task.entity.JobExecData;
import com.niodata.dp.task.entity.JobInfo;
import com.niodata.dp.task.entity.JobLog;
import com.niodata.dp.task.entity.JobState;
import com.niodata.dp.task.entity.PackageDefaultParams;
import com.niodata.dp.task.entity.PackageParam;
import com.niodata.dp.task.entity.TaskExecution;
import com.niodata.dp.task.entity.TaskExecutionDetail;
import com.niodata.dp.task.entity.TaskInfo;
import com.niodata.dp.task.entity.TaskSchedule;
import com.niodata.dp.task.entity.v2.PluginPackageV2;
import com.niodata.dp.task.service.PluginManagerV2;
import com.niodata.dp.task.service.TaskManager;
import com.niodata.dp.util.FileUtil;
import com.niodata.dp.util.JsonUtil;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import javax.annotation.PostConstruct;

import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.support.CronExpression;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

@Service("azTaskManager")
@Transactional(rollbackFor = Exception.class)
public class AzBasedTaskManagerV2 extends AbstractResourceDetailResolver implements TaskManager {

  private static final int JOB_RETRY_TIMES = 3;
  private static final int JOB_RETRY_INTERVAL_TIME = 60 * 1000; //millisec
  private static Logger logger = Logger.getLogger(AzBasedTaskManagerV2.class);
  private AzkabanApi azkabanApi;
  @Value("${" + ConfigurationKeys.AZKABAN_SERVER + "}")
  private String azServerUrl;
  @Value("${" + ConfigurationKeys.AZKABAN_USER + "}")
  private String azUser;
  @Value("${" + ConfigurationKeys.AZKABAN_PASSWORD + "}")
  private String azUserPassword;
  @Value("${" + ConfigurationKeys.AZKABAN_EXECUTOR_HOME + "}")
  private String azExecHome;
  @Value("${" + ConfigurationKeys.TASK_API_SERVER + "}")
  private String apiServer;
  @Value("${" + ConfigurationKeys.TASK_API_SERVER_PORT + "}")
  private String apiServerPort;
  @Autowired
  private TaskMapper taskMapper;
  @Autowired
  private JobMapper jobMapper;
  @Autowired
  private PluginPackageMapperV2 pluginPackageMapper;

  @Autowired
  @Qualifier("PluginManagerV2Impl")
  private PluginManagerV2 packageManager;

  @Autowired
  @Qualifier("DpUserMgrUser")
  private UserService userOperation;

  @Autowired
  @Qualifier("DpUserMgrGroup")
  private GroupInfoService groupOperation;

  public AzBasedTaskManagerV2() {
  }

  public AzBasedTaskManagerV2(String azserverUrl, String azuser, String azuserPassword) {
    this.azServerUrl = azserverUrl;
    this.azUser = azuser;
    this.azUserPassword = azuserPassword;
    init();
  }

  @PostConstruct
  public void init() {
    this.azkabanApi = new AzkabanApi(this.azServerUrl, this.azUser, this.azUserPassword);
  }

  @Override
  public void createTask(TaskInfo taskInfo) throws Exception {
    boolean createSuccess = true;
    boolean exception = false;
    try {
      // save task basic info
      taskInfo.setScheduledTime(new Date());
      taskInfo.setCreateTime(new Date());
      taskInfo.setModifyTime(new Date());
      this.taskMapper.saveTaskInfo(taskInfo);
      List<JobInfo> jobInfoList = taskInfo.getJobInfoList();
      // save job dependencies
      for (JobInfo jobInfo : jobInfoList) {
        this.jobMapper.saveJobInfo(jobInfo);
        List<JobInfo> depJobs = jobInfo.getDepJobs();
        if (depJobs != null && depJobs.size() > 0) {
          for (JobInfo depJob : depJobs) {
            this.jobMapper.saveJobDepInfo(
                taskInfo.getTaskId(), jobInfo.getJobId(), depJob.getJobId());
          }
        }
      }
      this.azkabanApi.createProject(taskInfo.getName(), taskInfo.getDescription());
      ProjectFlows flows = this.azkabanApi.getProjectFlows(taskInfo.getName());
      if (flows == null || flows.getProjectId() == null) {
        createSuccess = false;
        throw new RuntimeException("create task on task engine failed");
      }
      int projectId = Integer.parseInt(flows.getProjectId());
      taskInfo.setAzProjectId(projectId);
      taskMapper.updateTaskInfo(taskInfo);
      this.submitTask(taskInfo.getTaskId());
      this.packageManager.onPluginUseAgeChanged(
          jobInfoList
              .stream()
              .map(
                  new Function<JobInfo, String>() {
                    @Override
                    public String apply(JobInfo jobInfo) {
                      return jobInfo.getPkgId();
                    }
                  })
              .collect(Collectors.toList()));
      if (taskInfo.isScheduled()) {
        this.scheduleTask(taskInfo.getTaskId());
      }
    } catch (Exception e) {
      exception = true;
      throw e;
    } finally {
      if (exception && createSuccess) {
        this.azkabanApi.deleteProject(taskInfo.getName());
      }
    }
  }

  @Override
  public void deleteJobInfo(String taskId, String jobId) {
    JobInfo jobInfo = this.jobMapper.getById(jobId);
    if (jobInfo != null) {
      JobInfo postJob = this.jobMapper.getPostJob(jobId);
      if (postJob != null) {
        throw new RuntimeException("job is dependent by another one [" + jobId + "]");
      }
    }
  }

  @Override
  public TaskInfo deleteTaskById(String taskId) {
    List<JobInfo> jobInfoList = this.jobMapper.getJobsOfTask(taskId);
    TaskInfo taskInfo = taskMapper.getTaskById(taskId);
    checkRunningOrKillFlows(taskInfo.getAzProjectId(), false);
    if (taskInfo.isScheduled()) {
      this.unscheduleTask(taskId);
    }
    checkRunningOrKillFlows(taskInfo.getAzProjectId(), true);
    this.azkabanApi.deleteProject(taskInfo.getName());
    jobMapper.deleteJobTasks(taskId);
    taskMapper.deleteTask(taskId);
    jobMapper.deleteJobDeps(taskId);
    this.packageManager.onPluginUseAgeChanged(
        jobInfoList
            .stream()
            .map(
                new Function<JobInfo, String>() {
                  @Override
                  public String apply(JobInfo jobInfo) {
                    return jobInfo.getPkgId();
                  }
                })
            .collect(Collectors.toList()));

    return taskInfo;
  }

  private void checkRunningOrKillFlows(int azProjectId, boolean kill) {
    List<AzFlowExecutionHistory> runningFlowList = this.azkabanApi.getRunningFlows();
    runningFlowList.forEach(runningFlow -> {
      if (runningFlow.getProjectId() == azProjectId) {
        if (kill) {
          this.azkabanApi.cancelFlowExecution(runningFlow.getExecutionId());
        } else {
          throw new InvalidStatusException("有正在执行的任务不能删除，请先【终止任务】");
        }
      }
    });
  }

  @Override
  public void deleteTaskByName(String name) {
    TaskInfo taskInfo = taskMapper.getTaskByName(name);
    if (taskInfo == null) {
      return;
    }
    if (taskInfo.isScheduled()) {
      this.unscheduleTask(taskInfo.getTaskId());
    }
    this.azkabanApi.deleteProject(taskInfo.getName());
    jobMapper.deleteJobTasks(taskInfo.getTaskId());
    taskMapper.deleteTask(taskInfo.getTaskId());
    jobMapper.deleteJobDeps(taskInfo.getTaskId());
  }

  @Override
  public TaskInfo getTaskByName(String taskname) {
    return this.taskMapper.getTaskByName(taskname);
  }

  private Map<String, String> getDefaultParams(JobInfo jobInfo) throws IOException {
    Properties properties = new Properties();
    File file = new File(jobInfo.getPkgPath() + File.separator + "defaultParam.properties");
    if (!file.exists()) {
      return new HashMap<>();
    }
    properties.load(new FileInputStream(file));
    Map<String, String> map = new HashMap<>();
    for (Object key : properties.keySet()) {
      map.put(key.toString(), properties.get(key).toString());
    }
    return map;
  }

  private void writeJobPkgParams(TaskInfo taskInfo,
      JobInfo jobInfo, String workDir, String jobDir, String principal,
      byte[] keytab, String ownerType, Long ownerId) throws IOException {
    File jobDirFile = new File(jobDir);
    jobDirFile.mkdirs();
    FileOutputStream runtTimeFileOus =
        new FileOutputStream(jobDir + File.separator + "runtime.properties");
    runtTimeFileOus.write(("az.executions.dir=" + azExecHome
        + "/executions" + "\r\n").getBytes(StandardCharsets.UTF_8));
    runtTimeFileOus.write(("job.type=" + jobInfo.getJobType() + "\r\n").getBytes());
    runtTimeFileOus.write(("job.name=" + jobInfo.getJobName() + "\r\n").getBytes());
    runtTimeFileOus
        .write(("hadoop.enabled=" + DpConfiguration.isHadoopEnabled() + "\r\n").getBytes());
    runtTimeFileOus.write(("api.server=" + this.apiServer + "\r\n").getBytes());
    runtTimeFileOus.write(("api.server.port=" + this.apiServerPort + "\r\n").getBytes());
    StringBuffer authSbu = new StringBuffer();
    authSbu.append("taskid=").append(jobInfo.getTaskId()).append("\r\n");
    authSbu.append("jobid=").append(jobInfo.getJobId()).append("\r\n");
    authSbu.append("ownerId=").append(ownerId).append("\r\n");
    authSbu.append("ownerType=").append(ownerType).append("\r\n");
    authSbu.append("principal=").append(principal).append("\r\n");
    runtTimeFileOus.write(authSbu.toString().getBytes());
    runtTimeFileOus.close();
    if (DpConfiguration.isHadoopKerberosEnabled()) {
      File keyTabFile = new File(jobDir + File.separator + jobInfo.getJobName() + ".keytab");
      FileOutputStream keytabfos = new FileOutputStream(keyTabFile);
      keytabfos.write(keytab);
      keytabfos.close();
    }
  }

  private String getHiveSite() {
    String hiveSite = DpConfiguration.getString(ConfigurationKeys.HIVE_SITE_PATH);

    if (hiveSite == null) {
      String sparkHome = DpConfiguration.getString(ConfigurationKeys.SPARK_HOME);
      hiveSite = sparkHome + "/conf/hive-site.xml";
    }
    return hiveSite;
  }

  private void setYarnAgentParamIfNeeded(JobInfo jobInfo) {
    if ("sparkhadoopJava".indexOf(jobInfo.getJobType()) < 0) {
      return;
    }
    String sparkHome =
        DpConfiguration.getString(ConfigurationKeys.SPARK_HOME,
            "/usr/local/dp/spark");
    String agentJar = sparkHome + "/jars/" + "dp-yarn-logagent.jar";
    Map<String, Object> params = jobInfo.getParams().getParams();
    if (params.get("driver-java-options") != null
        && params.get("driver-java-options").toString().contains(agentJar)) {
      return;
    }
    String driverOptions =
        params.get("driver-java-options") == null ? ""
            : params.get("driver-java-options").toString();
    //delete quot "
    if (driverOptions.startsWith("\"")) {
      driverOptions = driverOptions.substring(1, driverOptions.lastIndexOf("\""));
    }
    driverOptions = "-javaagent:" + agentJar + " " + driverOptions;
    driverOptions = driverOptions.trim();
    driverOptions =
        driverOptions + " -Dredis.ip=" + DpConfiguration.getString(ConfigurationKeys.REDIS_IP)
            + " -Dredis.port=" + DpConfiguration.getString(ConfigurationKeys.REDIS_PORT);
    if (DpConfiguration.getString(ConfigurationKeys.REDIS_PASSWORD,null) != null) {
      driverOptions = driverOptions
          + " -Dredis.password=" + DpConfiguration.getString(ConfigurationKeys.REDIS_PASSWORD);
    }
    //quot with "
    driverOptions = "\"" + driverOptions + "\"";
    params.put("driver-java-options", driverOptions);
  }

  private void writeJobParams(TaskInfo taskInfo, JobInfo jobInfo, String workdir, String principal)
      throws IOException {
    File jobFile = new File(workdir + File.separator + jobInfo.getJobName() + ".job");
    FileOutputStream fos = new FileOutputStream(jobFile, true);
    Map<String, Object> params = this.getJobParams(jobInfo);
    params.put("principal", principal);
    params.put("name", jobInfo.getJobName());
    params.put("type", jobInfo.getJobType());
    setYarnAgentParamIfNeeded(jobInfo);
    StringBuffer jars = new StringBuffer("runtime.properties,");
    String hiveSite = getHiveSite();
    if (hiveSite != null) {
      jars.append(hiveSite).append(",");
    }

    if (DpConfiguration.isHadoopKerberosEnabled()) {
      params.put("keytab", jobInfo.getJobName() + ".keytab");
      jars.append(jobInfo.getJobName() + ".keytab,");
    }
    String name = jobInfo.getPkgName();
    String version = jobInfo.getPkgVersion();
    String localPath = Joiner.on(File.separator).join(
        new String[]{
            packageManager.getLocalStorePath(),
            name,
            version,
            name + "-" + version
        }
    );
    String remotePath = Joiner.on(File.separator).join(
        new String[]{
            packageManager.getRemoteStorePath(),
            name,
            version,
            name + "-" + version
        }
    );
    File lib = new File(localPath + "/lib");
    File[] libJars =
        lib.listFiles(
            new FileFilter() {
              @Override
              public boolean accept(File pathname) {
                return pathname.isFile();
              }
            });
    if (libJars != null && libJars.length > 0
        && packageManager.exists(name, version)) {
      if (packageManager.isRemoteStoreDfs() && jobInfo.getJobType().equals("spark")) {
        for (File jarFile : libJars) {
          jars.append(Joiner.on(File.separator)
              .join(remotePath, "lib", jarFile.getName())
              + ",");
        }
      } else {
        for (File jarFile : libJars) {
          jars.append("lib/" + jarFile.getName()).append(",");
        }
      }
    }
    //To do,convert local path to remote path if needed
    //if (jobInfo.getParams().hasParam("jars")) {
    //  jars.append(jobInfo.getParams().getParam("jars").toString());
    //}
    if ("spark".equals(jobInfo.getJobType())) {
      params.put("jars", jars.toString());
    }
    String workingDir =
        String.format("%s/executions/${azkaban.flow.execid}/%s", azExecHome, jobInfo.getJobName());
    params.put("working.dir", workingDir);

    //modify by dp add params for java and hadoopJava jobtype 2018-4-18 15:39:47
    if (DpConfiguration.getBoolean(ConfigurationKeys.TASK_RUN_AS_USER_ENABLED, false)) {
      params.put("user.to.proxy", principal);
    } else {
      params.remove("user.to.proxy");
    }
    //params.put("proxy.user", principal);
    //params.put("proxy.keytab.location", jobInfo.getJobName() + ".keytab");
    if (params.containsKey("classpath") && params.get("classpath") != null) {
      StringBuffer stringBuffer = new StringBuffer();
      String classpath = String.valueOf(params.get("classpath"));
      Iterable<String> paths = Splitter.on(":").trimResults().omitEmptyStrings().split(classpath);
      paths.forEach(path -> {
        if (!path.startsWith("/")) {
          stringBuffer.append("${working.dir}/" + path).append(":");
        } else {
          stringBuffer.append(path).append(":");
        }
      });
      if (stringBuffer.length() > 0) {
        stringBuffer.deleteCharAt(stringBuffer.length() - 1);
      }
      params.put("classpath", stringBuffer.toString());
    }
    //end modify

    if (jobInfo.getDepJobs() != null) {
      StringBuffer sbu = new StringBuffer();
      jobInfo
          .getDepJobs()
          .stream()
          .forEach(
              new Consumer<JobInfo>() {
                @Override
                public void accept(JobInfo jobInfo) {
                  sbu.append(jobInfo.getJobName()).append(",");
                }
              });
      sbu.deleteCharAt(sbu.length() - 1);
      params.put("dependencies", sbu.toString());
    }

    if (taskInfo.isBindExecutor()) {
      params.put("useExecutor", taskInfo.getExecutorId());
    }
    /**
    String extraProps = "";
    if (params.get("extraProps") != null) {
      extraProps = params.remove("extraProps").toString();
    }
     **/
    if (!DpConfiguration.isHadoopKerberosEnabled()) {
      params.remove("keytab");
      params.put("proxy-user", principal);
      params.remove("principal");
    } else {
      params.remove("proxy-user");
    }
    List<PackageParam> schemas =
        JobTypeParamSchemas.getJobTypeParamSchemas(jobInfo.getJobType(), jobInfo.getPkgLang());

    for (String key : params.keySet()) {
      boolean jobParam = false;
      for (PackageParam schema : schemas) {
        if (schema.getName().equals(key)) {
          jobParam = true;
          break;
        }
      }
      if (!jobParam) {
        continue;
      }
      String value = params.get(key).toString();
      String line = key + "=" + value + "\r\n";
      fos.write(line.getBytes(Charset.forName("UTF-8")));
    }
    String  extraProps
        = params.get("extraProps") == null ? "" : params.get("extraProps").toString();
    if (extraProps != null) {
      String[] lines = extraProps.split(",");
      for (String line : lines) {
        fos.write(line.getBytes(Charset.forName("UTF-8")));
        fos.write("\r\n".getBytes());
      }
    }

    fos.close();
  }

  @Override
  public void submitTask(String taskId) throws IOException {
    TaskInfo taskInfo = this.getTaskDetailInfo(taskId);
    String tmp = System.getProperty("user.dir") + File.separator + "tmp";
    String workdir = tmp + File.separator + taskInfo.getName();
    File dir = new File(workdir);
    dir.mkdirs();
    String principal = null;
    byte[] keytab = new byte[0];
    if (taskInfo.getOwnerType().equals(OwnerType.USER.name())) {
      UserInfo userInfo = userOperation.getUserInfo(taskInfo.getOwnerId());
      principal = userInfo.getHdfsUser();
      if (DpConfiguration.isHadoopKerberosEnabled()) {
        keytab = this.userOperation.getUserKeytab(userInfo.getHdfsUser());
      }
    } else if (taskInfo.getOwnerType().equals(OwnerType.GROUP.name())) {
      GroupInfo groupInfo = groupOperation.getGroupInfo(taskInfo.getOwnerId());
      principal = groupInfo.getHdfsUser();
      if (DpConfiguration.isHadoopKerberosEnabled()) {
        keytab = userOperation.getUserKeytab(groupInfo.getHdfsUser());
      }
    }
    for (JobInfo jobInfo : taskInfo.getJobInfoList()) {
      String name = jobInfo.getPkgName();
      String version = jobInfo.getPkgVersion();
      String localPath = Joiner.on(File.separator).join(
          new String[]{
              packageManager.getLocalStorePath(),
              name,
              version,
              name + "-" + version
          }
      );
      if (!packageManager.localExists(name, version)) {
        packageManager.copyPluginFilesToLocal(name, version);
      }
      File[] files =
          new File(localPath)
              .listFiles(
                  new FileFilter() {
                    @Override
                    public boolean accept(File pathname) {
                      return !pathname.getName().equals("meta.json")
                          && !pathname.getName().equals("defaultParam.properties");
                    }
                  });
      File jobDir = new File(workdir + File.separator + jobInfo.getJobName());
      jobDir.mkdirs();
      if (jobInfo.getPkgType().equals("spark")
          && packageManager.exists(name, version)
          && packageManager.isRemoteStoreDfs()) {
        //do nothing
        //spark job read pkg files from dfs
      } else {
        for (File file : files) {
          FileUtil.copyFileOrDirectory(file, jobDir.getAbsolutePath(), file.getName());
        }
      }
      this.writeJobPkgParams(
          taskInfo,
          jobInfo,
          workdir,
          jobDir.getAbsolutePath(),
          principal,
          keytab,
          taskInfo.getOwnerType(),
          taskInfo.getOwnerId());
      boolean hasDep = jobInfo.getDepJobs() != null && jobInfo.getDepJobs().size() > 0;
      logger.info("job has deps " + hasDep + " " + jobInfo.getJobName());
      this.writeJobParams(taskInfo, jobInfo, workdir, principal);
      if (DpConfiguration.isHadoopKerberosEnabled()) {
        jobInfo.getParams().setParam("keytab", jobInfo.getJobName() + ".keytab");
      }
      jobInfo.getParams().setParam("principal", principal);
      jobMapper.updateJobParams(jobInfo.getJobId(), jobInfo.getParams());
    }
    String zipFile = tmp + File.separator + taskInfo.getName() + ".zip";
    FileUtil.zipFolderContent(new File(workdir), new File(zipFile));
    this.azkabanApi.uploadProject(zipFile, taskInfo.getName());
    FileUtil.deleteFileOrDir(dir);
    FileUtil.deleteFileOrDir(new File(zipFile));
  }

  @Override
  public void reSubmitTask(String taskId) throws IOException {
    TaskInfo taskInfo = this.getTaskDetailInfo(taskId);
    this.azkabanApi.deleteProject(taskInfo.getName());

    this.azkabanApi.createProject(taskInfo.getName(), taskInfo.getDescription());
    ProjectFlows flows = this.azkabanApi.getProjectFlows(taskInfo.getName());
    if (flows == null || flows.getProjectId() == null) {
      throw new RuntimeException("create task on az failed");
    }
    int projectId = Integer.parseInt(flows.getProjectId());
    taskInfo.setAzProjectId(projectId);
    taskMapper.updateTaskInfo(taskInfo);
    submitTask(taskId);
  }

  private List<Map<String, Object>> getTaskInfosResult(List<TaskInfo> taskInfos) {
    SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm");
    SimpleDateFormat timeSdf = new SimpleDateFormat("HH:mm");

    List<Map<String, Object>> result = new ArrayList<>();
    for (TaskInfo taskInfo : taskInfos) {
      Map<String, Object> tk = new HashMap<>();
      tk.put("name", taskInfo.getName());
      tk.put("ownerType", taskInfo.getOwnerType());
      tk.put("ownerId", taskInfo.getOwnerId());
      tk.put("taskId", taskInfo.getTaskId());
      tk.put("azProjectId", taskInfo.getAzProjectId());
      tk.put("createTime", taskInfo.getCreateTime());
      tk.put("creator", taskInfo.getCreator());
      tk.put("scheduled", taskInfo.isScheduled());
      tk.put("scheduleCron", taskInfo.getScheduleCron());
      tk.put("scheduledTime", taskInfo.getScheduledTime());
      tk.put("description", taskInfo.getDescription());
      String lastStatus = "NotScheduled";
      tk.put("lastStatus", lastStatus);
      CronExpression cronSequenceGenerator = CronExpression.parse(taskInfo.getScheduleCron());
      LocalDateTime nextTriggerTime = cronSequenceGenerator.next(LocalDateTime.now());
      // lastMinute 我这里是上一分钟的date类型对象
      String nextExecTime = sdf.format(nextTriggerTime);
      tk.put("nextExecTime", nextExecTime);
      result.add(tk);
    }
    return result;
  }

  private Map<String, Object> getJobParams(JobInfo jobInfo) {
    Map<String, Object> params = new HashMap<>();
    Map<String, String> jobDefaultParams =
        JobTypeParamSchemas.getJobTypeDefaultParams(jobInfo.getJobType(), jobInfo.getPkgLang());
    params.putAll(jobDefaultParams);
    PluginPackageV2 jobPlugin =
        this.pluginPackageMapper.getPluginByNameVersion(
            jobInfo.getPkgName(), jobInfo.getPkgVersion());
    PackageDefaultParams defaultParams = jobPlugin.getDefaultParams();
    if (defaultParams != null) {
      params.putAll(defaultParams.getParams());
    }
    Map<String, Object> jobParams = jobInfo.getParams().getParams();
    params.putAll(jobParams);
    params.put("type", jobInfo.getJobType());
    params.put("name", jobInfo.getJobName());
    if (jobInfo.getDepJobs() != null && jobInfo.getDepJobs().size() > 0) {
      StringBuffer sbu = new StringBuffer();
      jobInfo
          .getDepJobs()
          .forEach(
              new Consumer<JobInfo>() {
                @Override
                public void accept(JobInfo jobInfo) {
                  sbu.append(jobInfo.getJobName()).append(",");
                }
              });
      sbu.deleteCharAt(sbu.length() - 1);
      params.put("dependencies", sbu.toString());
    }
    if (jobPlugin.isFailRetry()) {
      if (params.get("retries") == null) {
        params.put("retries", JOB_RETRY_TIMES);
      }
      if (params.get("retry.backoff") == null) {
        params.put("retry.backoff", JOB_RETRY_INTERVAL_TIME);
      }

    } else {
      params.remove("retries");
      params.remove("retry.backoff");
    }
    return params;
  }

  public TaskInfo getTaskById(String taskId) {
    List<JobInfo> jobInfos = this.jobMapper.getJobsOfTask(taskId);
    TaskInfo taskInfo = this.taskMapper.getTaskById(taskId);
    taskInfo.setJobInfoList(jobInfos);
    return taskInfo;
  }

  @Override
  public void updateTask(TaskInfo taskInfo) {
    taskMapper.updateTaskInfo(taskInfo);
  }

  @Override
  public FlowExecuteResponse executeTask(String taskId) {
    TaskInfo taskInfo = taskMapper.getTaskById(taskId);
    String flow = this.getFlow(taskInfo.getName());
    FlowExecuteResponse response =
        this.azkabanApi.executeFlow(
            taskInfo.getName(), flow, taskInfo.isBindExecutor(), taskInfo.getExecutorId(), null);
    return response;
  }

  @Override
  public FlowExecuteResponse executeTask(String taskName, String configId) {
    // TODO
    ProjectFlows flows = azkabanApi.getProjectFlows(taskName);
    String flow = flows.getFlows().get(0).get("flowId");
    FlowExecuteOptions options = new FlowExecuteOptions();
    options.setConfigId(configId);
    FlowExecuteResponse resp = azkabanApi.executeFlow(taskName, flow, false, 0, options);
    return resp;
  }

  @Override
  public TaskInfo scheduleTask(String taskId) {
    TaskInfo taskInfo = taskMapper.getTaskById(taskId);
    taskInfo.setScheduled(true);
    taskInfo.setDelProtection(true);
    ProjectFlows flows = this.azkabanApi.getProjectFlows(taskInfo.getName());
    String flow = flows.getFlows().get(0).get("flowId");
    String cron = taskInfo.getScheduleCron();
    String concurrentOption =
        taskInfo.getOptions() != null ? taskInfo.getOptions().getConcurrentOption() : "skip";
    this.azkabanApi.scheduleCronFlow(
        flows.getProject(),
        Integer.parseInt(flows.getProjectId()),
        flow,
        cron,
        taskInfo.isBindExecutor(),
        taskInfo.getExecutorId(),
        concurrentOption);
    FlowSchedule schedule = this.azkabanApi.getFlowSchedule(taskInfo.getName(), flow);
    if (schedule == null) {
      throw new InvalidStatusException("Task Schedule Failed");
    }
    taskMapper.updateTaskInfo(taskInfo);
    return taskInfo;
  }

  @Override
  public String getNextExecuteTime(String taskId) {
    TaskInfo taskInfo = taskMapper.getTaskById(taskId);
    String flowid = this.getFlow(taskInfo.getName());
    FlowSchedule schedule = this.azkabanApi.getFlowSchedule(taskInfo.getName(), flowid);
    return schedule == null ? "" : schedule.getNextExecTime();
  }

  @Override
  public TaskSchedule getTaskSchedule(String taskId) {
    TaskInfo taskInfo = taskMapper.getTaskById(taskId);
    String flowid = this.getFlow(taskInfo.getName());
    FlowSchedule schedule = this.azkabanApi.getFlowSchedule(taskInfo.getName(), flowid);
    if (schedule == null) {
      return null;
    }
    TaskSchedule taskSchedule = new TaskSchedule();
    taskSchedule.setTaskId(taskId);
    taskSchedule.setCronExpression(schedule.getCronExpression());
    taskSchedule.setFirstSchedTime(schedule.getFirstSchedTime());
    taskSchedule.setLastModifyTime(schedule.getLastModifyTime());
    taskSchedule.setNextExecTime(schedule.getNextExecTime());
    taskSchedule.setStatus(schedule.getStatus());
    taskSchedule.setSubmitTime(schedule.getSubmitTime());
    taskSchedule.setProjectName(taskInfo.getName());
    taskSchedule.setScheduleId(schedule.getScheduleId());
    return taskSchedule;
  }

  @Override
  public TaskInfo unscheduleTask(String taskId) {
    TaskInfo taskInfo = taskMapper.getTaskById(taskId);
    taskInfo.setScheduled(false);
    String flow = this.getFlow(taskInfo.getName());
    FlowSchedule schedule = this.azkabanApi.getFlowSchedule(taskInfo.getName(), flow);
    if (schedule != null) {
      this.azkabanApi.unscheduleFlow(schedule.getScheduleId());
      schedule = this.azkabanApi.getFlowSchedule(taskInfo.getName(), flow);
      if (schedule != null) {
        throw new InvalidStatusException("Task Unschedule Failed");
      }
      taskMapper.updateTaskInfo(taskInfo);
    }
    return taskInfo;
  }

  /**
   * get task execution detail.
   *
   * @param execid exec id
   * @return TaskExecutionDetail
   */
  @Override
  public TaskExecutionDetail getTaskExecutionDetail(int execid) {
    FlowExecutionDetail detail = this.azkabanApi.getFlowExecution(execid);
    TaskExecutionDetail executionDetail = new TaskExecutionDetail();
    executionDetail.setTaskId(detail.getProject());
    executionDetail.setStatus(detail.getStatus());
    executionDetail.setStartTime(detail.getStartTime());
    executionDetail.setAttempt(detail.getAttempt());
    executionDetail.setFlow(detail.getFlow());
    executionDetail.setExecid(detail.getExecid());
    executionDetail.setFlowId(detail.getFlowId());
    executionDetail.setId(detail.getId());
    executionDetail.setNestedId(detail.getNestedId());
    executionDetail.setProject(detail.getProject());
    executionDetail.setSubmitTime(detail.getSubmitTime());
    executionDetail.setUpdateTime(detail.getUpdateTime());

    List<TaskExecutionDetail.JobExecDetailNode> nodes = new ArrayList<>(detail.getNodes().size());
    for (FlowDetailNode node : detail.getNodes()) {
      TaskExecutionDetail.JobExecDetailNode detailNode =
          new TaskExecutionDetail.JobExecDetailNode();
      detailNode.attempt = node.getAttempt();
      detailNode.endTime = node.getEndTime();
      detailNode.id = node.id;
      detailNode.in = node.in;
      detailNode.startTime = node.startTime;
      detailNode.status = node.status;
      detailNode.type = node.type;
      detailNode.updateTime = node.updateTime;
      nodes.add(detailNode);
    }
    executionDetail.setNodes(nodes);
    return executionDetail;
  }

  /**
   * get task executions.
   *
   * @param taskId task id
   * @param offset index offset
   * @param limit max count
   * @return task execution list
   */
  @Override
  public List<TaskExecution> getTaskExecutions(String taskId, int offset, int limit) {
    TaskInfo taskInfo = this.taskMapper.getTaskById(taskId);
    String flow = this.getFlow(taskInfo.getName());
    FlowExecutions executions =
        this.azkabanApi.getFlowExecutions(taskInfo.getName(), flow, offset, limit);
    List<FlowExecution> list = executions.getExecutions();
    List<TaskExecution> resultList = new ArrayList<>();
    for (FlowExecution execution : list) {
      TaskExecution taskExecution = new TaskExecution();
      taskExecution.setEndTime(execution.getEndTime());
      taskExecution.setStartTime(execution.getStartTime());
      taskExecution.setExecId(execution.getExecId());
      taskExecution.setTaskId(taskId);
      taskExecution.setFlowName(execution.getFlowId());
      taskExecution.setTaskName(taskInfo.getName());
      if (execution.getEndTime() != null) {
        taskExecution.setSuccess("SUCCEEDED".equals(execution.getStatus()));
      }
      taskExecution.setStatus(execution.getStatus());
      taskExecution.setTaskId(taskId);
      resultList.add(taskExecution);
    }
    return resultList;
  }

  @Override
  public List<TaskExecution> getTaskExecutions(String taskId, int offset, int limit, String status,
      Date startTime, Date endTime) {

    Set<String> statusSet = new HashSet<>();
    if (!StringUtils.isBlank(status)) {
      String[] statusArray = status.split(",");
      Arrays.stream(statusArray).forEach(sta -> statusSet.add(sta));
    }

    TaskInfo taskInfo = this.taskMapper.getTaskById(taskId);
    String flow = this.getFlow(taskInfo.getName());
    FlowExecutions executions =
        this.azkabanApi.getFlowExecutions(taskInfo.getName(), flow, offset, limit);
    List<FlowExecution> list = executions.getExecutions();
    List<TaskExecution> resultList = new ArrayList<>();
    for (FlowExecution execution : list) {

      if (!statusSet.isEmpty()) {
        if (!statusSet.contains(execution.getStatus())) {
          continue;
        }
      }

      if (null != startTime && null != endTime) {
        if (execution.getEndTime().before(startTime)
            || execution.getEndTime().after(endTime)) {
          continue;
        }
      }

      TaskExecution taskExecution = new TaskExecution();
      taskExecution.setEndTime(execution.getEndTime());
      taskExecution.setStartTime(execution.getStartTime());
      taskExecution.setExecId(execution.getExecId());
      taskExecution.setTaskId(taskId);
      taskExecution.setFlowName(execution.getFlowId());
      taskExecution.setTaskName(taskInfo.getName());
      if (execution.getEndTime() != null) {
        taskExecution.setSuccess("SUCCEEDED".equals(execution.getStatus()));
      }
      taskExecution.setStatus(execution.getStatus());
      taskExecution.setTaskId(taskId);
      taskExecution.setExecutorId(execution.getExecutorId());
      resultList.add(taskExecution);
    }
    return resultList;

  }

  @Override
  public List<TaskExecution> getRunningExecutions(String taskId) {
    TaskInfo taskInfo = this.taskMapper.getTaskById(taskId);
    String flow = this.getFlow(taskInfo.getName());
    List<Integer> execIds = this.azkabanApi.getRunningExecutions(taskInfo.getName(), flow);

    if (execIds != null) {
      List<TaskExecution> executions = new ArrayList<>(execIds.size());
      for (Integer execid : execIds) {
        FlowExecutionDetail detail = this.azkabanApi.getFlowExecution(execid);
        TaskExecution execution = new TaskExecution();
        execution.setTaskId(taskId);
        execution.setStatus(detail.getStatus());
        execution.setExecId(execid);
        execution.setStartTime(detail.getStartTime());
        executions.add(execution);
      }
      return executions;
    } else {
      return Collections.EMPTY_LIST;
    }
  }

  @Override
  public List<TaskExecution> getAllRunningExecutions(String groupName) {

    return null;
  }

  @Override
  public List<TaskInfo> getTasks(long groupId, int offset, int count) {

    return this.taskMapper.getGroupTasks(groupId, offset, count);
  }

  @Override
  public List<JobInfo> getJobsOfTask(String taskId) {
    return this.jobMapper.getJobsOfTask(taskId);
  }


  @Override
  public List<JobInfo> getJobsOfTaskByName(String taskname) {
    return this.jobMapper.getJobsOfTaskByName(taskname);
  }

  /**
   * stop task execution,cancel a flow execution on az.
   *
   * @param executionId execution id
   */
  @Override
  public void stopExecution(int executionId) {
    this.azkabanApi.cancelFlowExecution(executionId);
  }

  /**
   * get task execution log.
   *
   * @param taskExecutionId task execution id
   * @param offset offset
   * @param maxLength max length of log data
   * @return log data
   */
  @Override
  public JobLog getTaskExecutionLog(int taskExecutionId, String jobname, int offset,
      int maxLength) {
    JobLog log = this.azkabanApi.getExecutionJobLogs(taskExecutionId, jobname, offset, maxLength);
    if (log == null) {
      log = new JobLog();
      log.setOffset(offset);
      log.setLength(0);
      log.setData("");
    }
    if (log.getData() == null) {
      log.setData("");
    }
    return log;
  }

  private String getFlow(String taskName) {
    String project = taskName;
    List<Map<String, String>> flows = this.azkabanApi.getProjectFlows(project).getFlows();
    if (flows == null || flows.size() == 0) {
      return null;
    }
    return flows.get(flows.size() - 1).get("flowId");
  }

  private String getScheduleId(String taskId) {
    TaskInfo taskInfo = this.taskMapper.getTaskById(taskId);
    String flow = this.getFlow(taskInfo.getName());
    FlowSchedule schedule = this.azkabanApi.getFlowSchedule(taskInfo.getName(), flow);
    return schedule == null ? null : schedule.getScheduleId() + "";
  }

  @Override
  public ResourceDetail doResolve(ResourceType resourceType, String path) {
    TaskInfo taskInfo = this.taskMapper.getTaskById(path);
    ResourceDetail detail = new ResourceDetail();
    detail.setOwnerId(taskInfo.getOwnerId());
    detail.setPath(path);
    detail.setOwnerId(taskInfo.getOwnerId());
    detail.setOwnerType(OwnerType.valueOf(taskInfo.getOwnerType()));
    detail.setResourceType(ResourceType.Job);
    return detail;
  }

  @Override
  public boolean supportResourceType(Class<? extends ResourceType> clazz) {
    return ResourceType.JobType.class.equals(clazz);
  }

  /**
   * get execution history.
   *
   * @param offset fetch offset
   * @param count max count
   * @return history list
   */
  @Override
  public List<FlowExecutionHistory> getExecutionHistory(int offset, int count) {
    List<AzFlowExecutionHistory> list = azkabanApi.getFlowExecutionHistory(offset, count);
    List<FlowExecutionHistory> executions = new ArrayList<>(list.size());

    for (AzFlowExecutionHistory flow : list) {
      FlowExecutionHistory exec = new FlowExecutionHistory();
      exec.setEndTime(flow.getEndTime());
      exec.setStartTime(flow.getStartTime());
      exec.setExecutionId(flow.getExecutionId());
      exec.setFlow(flow.getFlow());
      exec.setProjectId(flow.getProjectId());
      exec.setProjectName(flow.getProjectName());
      TaskInfo taskInfo = this.taskMapper.getTaskByName(flow.getProjectName());
      if (taskInfo != null) {
        exec.setTaskId(taskInfo.getTaskId());
      } else {
        exec.setTaskId("unknown");
      }
      exec.setTaskInfo(taskInfo);
      exec.setSubmitTime(flow.getSubmitTime());
      exec.setLastModifiedTimestamp(flow.getLastModifiedTimestamp());
      exec.setVersion(flow.getVersion());
      exec.setScheduleId(flow.getScheduleId());
      exec.setStatus(flow.getStatus());
      exec.setExecutionPath(flow.getExecutionPath());
      executions.add(exec);
    }
    return executions;
  }

  @Override
  public List<TaskInfo> getUserTasks(long userId, String appId, String keyWord,
      int offset, int count, String scheduled) {
    return taskMapper.findUserTasks(userId, appId, keyWord, offset, count, scheduled);
  }

  @Override
  public List<TaskInfo> getGroupTasks(long userId, long groupId, String appId, String keyWord,
      int offset, int count, String scheduled) {
    return groupOperation.isMasterOrOwner(userId, groupId) ? taskMapper
        .findUserAuthorizedGroupTasks(userId, groupId, appId, keyWord, offset, count,
            scheduled)
        : new ArrayList<>();
  }

  @Override
  public List<TaskInfo> getUserAuthorizedTasks(long userId, String appId, String keyWord,
      int offset, int limit, String scheduled) {
    List<UserGroup> userGroups = this.groupOperation.getUserGroups(userId);
    List<Long> ids = new ArrayList<>();
    userGroups.forEach(
        userGroup -> {
          ids.add(userGroup.getGroupId());
        });
    return taskMapper.findUserAuthorizedTasks(
        userId, appId, ids, keyWord, offset, limit, scheduled);
  }

  @Override
  public TaskInfo getTaskDetailInfo(String taskId) {
    TaskInfo taskInfo = this.taskMapper.getTaskById(taskId);
    if (taskInfo == null) {
      throw new ResourceNotFoundException("任务不存在");
    }
    List<JobInfo> jobInfoList = jobMapper.getJobsOfTask(taskId);
    List<JobDepInfo> depInfos = jobMapper.getJobDepInfoOfTask(taskId);
    taskInfo.setJobInfoList(jobInfoList);
    Map<String, List<JobInfo>> depJobs = new HashMap<>(); // jobid->deplist
    for (JobDepInfo depInfo : depInfos) {
      List<JobInfo> list = depJobs.get(depInfo.getJobId());
      if (list == null) {
        list = new ArrayList<>(3);
        depJobs.put(depInfo.getJobId(), list);
      }
      List<JobInfo> depList =
          jobInfoList
              .stream()
              .filter(
                  new Predicate<JobInfo>() {
                    @Override
                    public boolean test(JobInfo jobInfo) {
                      return jobInfo.getJobId().equals(depInfo.getDepJobId());
                    }
                  })
              .collect(Collectors.toList());
      list.addAll(depList);
    }
    jobInfoList.forEach(
        new Consumer<JobInfo>() {
          @Override
          public void accept(JobInfo jobInfo) {
            if (depJobs.get(jobInfo.getJobId()) != null) {
              jobInfo.setDepJobs(depJobs.get(jobInfo.getJobId()));
            }
          }
        });

    return taskInfo;
  }

  @Override
  public TaskInfo getTaskDetailInfoByName(String taskName) {
    TaskInfo taskInfo = this.taskMapper.getTaskByName(taskName);
    return this.getTaskDetailInfo(taskInfo.getTaskId());
  }

  private Map<String, Object> mergeUpdateParam(Map<String, Object> newParam,
      Map<String, Object> oldParamMap, PluginPackageV2 pluginPackage) {
    if (pluginPackage == null) {
      return oldParamMap;
    }
    for (String key : newParam.keySet()) {
      if (pluginPackage.isUserSetAbleParam(key)) {
        if (newParam.get(key) != null) {
          oldParamMap.put(key, newParam.get(key));
        }
      }
    }
    return oldParamMap;
  }

  @Override
  public void updateTaskDetail(TaskInfo taskInfo) {
    TaskInfo taskInDb = this.getTaskDetailInfoByName(taskInfo.getName());
    taskInDb.setModifyTime(taskInfo.getModifyTime());
    taskInDb.setDescription(taskInfo.getDescription());
    taskInDb.setOptions(taskInfo.getOptions());
    taskInDb.setDelProtection(taskInfo.isDelProtection());
    taskInDb.setAlertSetting(taskInfo.getAlertSetting());
    taskInDb.setAppId(taskInfo.getAppId());
    boolean cronChanged = false;
    boolean scheduleChanged = false;
    boolean concurrencyChanged = false;
    concurrencyChanged = taskInfo.getOptions().getConcurrentOption()
        .equals(taskInDb.getOptions().getConcurrentOption());
    scheduleChanged = (taskInDb.isScheduled() != taskInfo.isScheduled());
    if (!taskInDb.getScheduleCron().equals(taskInfo.getScheduleCron())) {
      taskInDb.setScheduleCron(taskInfo.getScheduleCron());
      cronChanged = true;
    }
    taskInDb.setScheduled(taskInfo.isScheduled());
    boolean resubmitProject = false;
    if (taskInDb.isBindExecutor() != taskInfo.isBindExecutor()
        || (taskInDb.isBindExecutor()
        && taskInDb.getExecutorId() != taskInfo.getExecutorId())) {
      resubmitProject = true;
    }
    taskInDb.setBindExecutor(taskInfo.isBindExecutor());
    taskInDb.setExecutorId(taskInfo.getExecutorId());
    taskMapper.updateTaskInfo(taskInDb);
    String flow = this.getFlow(taskInDb.getName());
    List<JobInfo> jobInfoList = taskInDb.getJobInfoList();
    for (JobInfo jobInfo : jobInfoList) {
      JobInfo newJob = null;
      for (JobInfo job1 : taskInfo.getJobInfoList()) {
        if (job1.getJobId().equals(jobInfo.getJobId())) {
          newJob = job1;
          break;
        }
      }
      setYarnAgentParamIfNeeded(newJob);
      PluginPackageV2 pluginPackage =
          packageManager.getPluginByNameVersion(
              -1, jobInfo.getPkgName(), jobInfo.getPkgVersion());
      jobInfo.setNodeStyleInfo(newJob.getNodeStyleInfo());
      Map<String, Object> newParam =
          mergeUpdateParam(
              newJob.getParams().getParams(),
              jobInfo.getParams().getParams(),
              pluginPackage);
      jobInfo.getParams().setParams(newParam);
      try {
        logger.info("job new params:" + JsonUtil.toJson(newParam));
      } catch (IOException e) {
        LogUtil.logStackTrace(logger, e);
      }
      jobMapper.updateJobParams(jobInfo.getJobId(), jobInfo.getParams());
      jobMapper.updateJobNodeStyle(jobInfo.getJobId(), jobInfo.getNodeStyleInfo());
      //boolean resubmitProject = false;
      if (!newJob.getPkgVersion().equals(jobInfo.getPkgVersion())) {
        jobMapper.updateJobPkgInfo(newJob);
        logger.info(taskInfo.getName() + " update pkgVersion " + ",from "
            + jobInfo.getPkgVersion() + " to " + newJob.getPkgVersion());
        resubmitProject = true;
      }

      boolean update = false;
      Map<String, String> jobParams =
          azkabanApi.getFlowJobParams(taskInDb.getName(), flow, jobInfo.getJobName());
      try {
        logger.info("old job params " + JsonUtil.toJson(jobParams));
      } catch (IOException e) {
        LogUtil.logStackTrace(logger, e);
      }
      for (String key : jobParams.keySet()) {
        boolean useSetAble =
            JobTypeParamSchemas.isUserSetAble(
                jobInfo.getJobType(), jobInfo.getPkgLang(), key);
        if (!useSetAble) {
          continue;
        }
        String oldValue = jobParams.get(key);
        String newValue = null;
        if (newParam.get(key) != null) {
          //modify by dp add classpath transfer 2018-4-19 16:24:01
          if (key.equals("classpath")) {
            StringBuffer stringBuffer = new StringBuffer();
            String classpath = String.valueOf(newParam.get(key));
            Iterable<String> paths = Splitter.on(":").trimResults().omitEmptyStrings()
                .split(classpath);
            paths.forEach(path -> {
              if (!path.startsWith("/")) {
                stringBuffer.append("${working.dir}/" + path).append(":");
              } else {
                stringBuffer.append(path).append(":");
              }
            });
            if (stringBuffer.length() > 0) {
              stringBuffer.deleteCharAt(stringBuffer.length() - 1);
            }
            newValue = stringBuffer.toString();
          } else {
            newValue = newParam.get(key).toString();
          }
          //end modify
        }
        if (newValue != null && !newValue.equals(oldValue)) {
          jobParams.put(key, newValue);
          update = true;
        }
      }


      if (update && !resubmitProject) {
        azkabanApi.setFlowJobParams(
            taskInDb.getName(), flow, newJob.getJobName(), jobParams);
        logger.info("update job params :" + newJob.getJobName());
      } else {
        logger.info("job params not updated :" + newJob.getJobName());
      }

      if (resubmitProject) {
        try {
          submitTask(taskInfo.getTaskId());
        } catch (Exception e) {
          logger.error("resubmit error " + taskInfo.getTaskId());
          LogUtil.logStackTrace(logger, e);
          throw new InternalException("resumit project error");
        }
      }

    }

    if (scheduleChanged) {
      logger.info("task schedule changed:" + taskInDb.getName());
      if (taskInDb.isScheduled()) {
        logger.info("schedule task: " + taskInDb.getName());
        this.scheduleTask(taskInDb.getTaskId());
      } else {
        logger.info("unschedule task: " + taskInDb.getName());
        this.unscheduleTask(taskInDb.getTaskId());
      }
    }

    if (cronChanged && !scheduleChanged && taskInDb.isScheduled()) {
      logger.info("reschedule task: " + taskInDb.getName());
      this.unscheduleTask(taskInDb.getTaskId());
      this.scheduleTask(taskInDb.getTaskId());
    }

    if (concurrencyChanged && taskInfo.isScheduled()) {
      this.unscheduleTask(taskInDb.getTaskId());
      this.scheduleTask(taskInDb.getTaskId());
    }
  }

  @Override
  public List<TaskInfo> findTaskByNameKeyWord(Long userId, String nameKeyWord) {
    List<UserGroup> userGroups = this.groupOperation.getUserGroups(userId);
    List<Long> ids = new ArrayList<>();
    userGroups.forEach(
        userGroup -> {
          ids.add(userGroup.getGroupId());
        });

    return this.taskMapper.findUserAuthorizedTasks(
        userId, null, ids, nameKeyWord, 0, Integer.MAX_VALUE, null);
  }

  @Override
  public List<TaskInfo> findTaskByApp(Long userId, String appId, String nameKeyWord) {
    List<UserGroup> userGroups = this.groupOperation.getUserGroups(userId);
    List<Long> ids = new ArrayList<>();
    userGroups.forEach(
        userGroup -> {
          ids.add(userGroup.getGroupId());
        });
    return this.taskMapper.findUserAuthorizedTasks(
        userId, appId, ids, nameKeyWord, 0, Integer.MAX_VALUE, null);
  }

  @Override
  public void saveJobExecData(JobExecData execData) {
    this.jobMapper.saveJobExecData(execData);
  }

  @Override
  public void updateJobExecData(JobExecData execData) {
    this.jobMapper.updateJobExecData(execData);
  }

  @Override
  public List<JobExecData> getJobExecDataOfTask(String taskId, int execId) {
    return this.jobMapper.getJobDataOfExec(execId);
  }

  @Override
  public JobExecData getJobExecData(int execId, String jobname) {
    return this.jobMapper.getJobExecData(execId, jobname);
  }

  @Override
  public List<FlowExecutor> getFlowExecutors() throws IOException {
    List<Executor> executors = this.azkabanApi.getAllActiveExecutors();
    return executors
        .stream()
        .map(
            new Function<Executor, FlowExecutor>() {
              @Override
              public FlowExecutor apply(Executor executor) {
                FlowExecutor flowExecutor = new FlowExecutor();
                BeanUtils.copyProperties(executor, flowExecutor);
                ExecutorInfo executorInfo = executor.getExecutorInfo();
                if (executorInfo == null) {
                  try {
                    executorInfo = azkabanApi.getExecutorInfo(executor);
                  } catch (IOException e) {
                    LogUtil.logStackTrace(logger, e);
                  }
                }
                if (executorInfo == null) {
                  logger.error("fetch executor info failed:" + executor.getHost());
                } else {
                  BeanUtils.copyProperties(executorInfo, flowExecutor);
                }
                return flowExecutor;
              }
            })
        .collect(Collectors.toList());
  }

  @Override
  public JobState getJobState(String taskName, String jobName, String scheduleId) {
    return this.jobMapper.getJobState(taskName, jobName, scheduleId);
  }

  @Override
  public void updateJobState(JobState jobState) {
    this.jobMapper.updateJobState(jobState);
  }

  @Override
  public void saveJobState(JobState jobState) {
    this.jobMapper.saveJobState(jobState);
  }

  @Override
  public Integer getTaskCount(Long ownerId, List<Long> ids, Integer scheduled) {
    return this.taskMapper.getGroupTaskCount(ownerId, ids, scheduled);
  }

}
