package com.acmedcare.framework.exchange.center.executer.job;

import com.acmedcare.framework.exchange.center.entity.bean.Task;
import com.acmedcare.framework.exchange.center.entity.bean.TaskDetail;
import com.acmedcare.framework.exchange.center.entity.dto.TaskFailStatus;
import com.acmedcare.framework.exchange.center.executer.ExecutorConfig;
import com.acmedcare.framework.exchange.center.executer.TaskRunnerManager;
import java.util.Date;
import java.util.concurrent.atomic.AtomicLong;
import lombok.extern.slf4j.Slf4j;
import org.quartz.Job;
import org.quartz.JobDetail;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;

@SuppressWarnings("unused")
@Slf4j
public class SparkExecuteJob extends BaseJob implements Job {

  private  final String bank=" ";
  @Override
  public void execute(JobExecutionContext jobExecutionContext) throws JobExecutionException {

    paths.clear();
    JobDetail detail = jobExecutionContext.getJobDetail();
    Task task = (Task) detail.getJobDataMap().get(ExecutorConfig.TASKINFO);

    if (task == null) {
      throw new JobExecutionException("task info is empty");
    }

    TaskDetail taskDetail = new TaskDetail();
    taskDetail.setExecutorName(ExecutorConfig.getInstance().getExecutorName());
    try {

      taskDetail.setName(task.getName());
      taskDetail.setDescription(task.getDescription());
      taskDetail.setExecuteTime(new Date());
      TaskRunnerManager manager = new TaskRunnerManager();
      //如果存在正在执行的任务，直接跳过执行
      Boolean running = manager.isRun(task);

      if (!running) {

        try {
          manager.register(task);
          String shellScript = createSparkCommand(task);
          //更新任务最后执行状态
          task.setLastRunTime(new Date());
          publishTask(task);
          AtomicLong startTime = new AtomicLong(System.currentTimeMillis());   //获取开始时间
          executeShell(shellScript, taskDetail, task.getTimeOut());
          long endTime = System.currentTimeMillis(); //获取结束时间
          taskDetail.setDuringTime(endTime - startTime.get());
        } catch (Exception e) {
          throw e;
        } finally {

          manager.unRegister(task);
        }
      } else {

        taskDetail.setDuringTime(Long.valueOf(0));
        taskDetail.setExecuteLog("任务正在执行，已经跳过执行!");
        taskDetail.setFailStatus(TaskFailStatus.SKIP);
        taskDetail.setStatus(false);
      }

    } catch (Exception e) {

      taskDetail.setExecuteLog(e.getMessage() + e.getStackTrace());
      taskDetail.setStatus(false);
      taskDetail.setFailStatus(TaskFailStatus.ERROR);
      throw new JobExecutionException("task convert fail", e);
    } finally {
      publishDetail(taskDetail);
      clearJar();
    }
  }

  private String createSparkCommand(Task task) {

    StringBuilder builder = new StringBuilder();
    // add shell path
    builder.append("sh");
    builder.append(bank);
    builder.append(ExecutorConfig.getInstance().getShellPath());
    builder.append(bank);
    builder.append("--master "+ExecutorConfig.getInstance().getMasterUri());

    builder.append(bank);
    builder.append("--deploy-mode cluster");
    //add class
    builder.append(bank);
    builder.append(String.format("--class %s", task.getExecuteMainClass()));
    // add executor jar
    builder.append(bank);
    builder.append(resolveJarPath(task.getExecuteJar()));
    return builder.toString();
  }
}