package cn.ac.iscas.service.quartz;

//import cn.ac.iscas.dao.api.JobMetadataRepository;
import cn.ac.iscas.dao.IJobMetadataDao;
import cn.ac.iscas.domain.job.Job;
import cn.ac.iscas.domain.job.JobSearch;
import cn.ac.iscas.domain.job.JobStatus;
import cn.ac.iscas.domain.quartz.QuartzJobType;
import cn.ac.iscas.domain.response.job.SubmitJobResponse;
import cn.ac.iscas.service.common.JobExecutor;
import com.iscas.datasong.lib.common.DataSongException;
import com.iscas.datasong.lib.common.Status;
import com.iscas.datasong.lib.util.DataSongExceptionUtils;
import com.iscas.datasong.lib.util.DataSongJsonUtils;
import com.iscas.datasong.lib.util.DataSongStringUtils;


import org.json.JSONException;
import org.quartz.*;
import org.quartz.impl.matchers.GroupMatcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.messaging.simp.SimpMessagingTemplate;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
import org.springframework.stereotype.Service;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.*;

/**
 * @Author: DataSong
 * @Descrition:  定时任务执行器服务
 * * :@DisallowConcurrentExecution : 此标记用在实现Job的类上面,意思是不允许并发执行.
 * * :注意org.quartz.threadPool.threadCount线程池中线程的数量至少要多个,否则@DisallowConcurrentExecution不生效
 * * :假如Job的设置时间间隔为3秒,但Job执行时间是5秒,设置@DisallowConcurrentExecution以后程序会等任务执行完毕以后再去执行,否则会在3秒时再启用新的线程执行
 * @Date: Create in 2020/3/3 10:13
 * @Modified By:
 */
@DisallowConcurrentExecution
@Service
public class QuartzJobService implements org.quartz.Job {
	private Logger logger = LoggerFactory.getLogger(QuartzJobService.class);

	@Autowired
	private SchedulerFactoryBean schedulerFactoryBean;

	@Autowired
	JobExecutor jobExecutor;

	@Autowired
	private SimpMessagingTemplate messagingTemplate;

	@Autowired
	IJobMetadataDao jobMetadataDao;

	private static final String StausMoniterCron = "0/5 * * * * ? ";

	/**
	 * 核心方法,Quartz Job真正的执行逻辑.
	 *
	 * @param executorContext executorContext JobExecutionContext中封装有Quartz运行所需要的所有信息
	 * @throws JobExecutionException execute()方法只允许抛出JobExecutionException异常
	 */
	@Override
	public void execute(JobExecutionContext executorContext) {
		try {
			//JobDetail中的JobDataMap是共用的,从getMergedJobDataMap获取的JobDataMap是全新的对象
			JobDataMap map = executorContext.getMergedJobDataMap();
			QuartzJobType jobStatus = (QuartzJobType) map.get("type");
			switch (jobStatus) {
				case Normal:
					///Job jobMetadata = etlJobDao.getJob(map.getString("jobdId"));
					Job jobMetadata = jobMetadataDao.getJob(map.getString("jobdId"));

					execute(jobMetadata);
					break;
				case StatusMoniter:
					JobSearch jobSearch = new JobSearch();
					jobSearch.setStatus(JobStatus.RUNNING.name());
					jobSearch.setPage(1);
					jobSearch.setPageSize(Integer.MAX_VALUE);
					//JobSearchResponse jobSearchResponse = etlJobDao.search(jobSearch);
					List<Job> jobs = jobMetadataDao.select(jobSearch);
					if(!jobs.isEmpty()) {
						List<String> innerIds = new ArrayList<>();
						List<String> ids = new ArrayList<>();
						for(Job job : jobs){
							innerIds.add(job.getInnerId());
							ids.add(job.get_id());
						}

						Map<String, JobStatus> jobStatusMap = jobExecutor.getJobStatus(innerIds);
						if(jobStatusMap!= null){
							List<Job> needUpdateJobs = new ArrayList<>();
							for (Job job : jobs) {
								if (jobStatusMap.containsKey(job.getInnerId()) && jobStatusMap.get(job.getInnerId()) != JobStatus.RUNNING) {
									needUpdateJobs.add(job);
									//成功执行，则启动以其为前置的任务
									if(JobStatus.analyze(job.getStatus()) == JobStatus.SUCCEEDED){
										//如果Job执行成功，修改jobmetadata表中相关任务的status字段为SUCCEEDED
										jobMetadataDao.updateJobStatus(job.get_id(), JobStatus.SUCCEEDED.name());
										//同时通过websocket通知前台，将执行按钮变亮，停止按钮变灰
										messagingTemplate.convertAndSend("/topic/log/" + job.get_id(), "FINISHED");
										//jobMetadataDao.
										for(Job subJob : jobMetadataDao.getPreJobs(job.get_id())){
											execute(subJob);
											logger.info("启动关联子任务!");
										}
									}
								}
							}
							if (needUpdateJobs.size() > 0) {
								jobMetadataDao.save(needUpdateJobs);
								///etlJobDao.save(needUpdateJobs);
							}
							logger.info("get job status ok!");
						}else{
							logger.info("get job status null!");
						}
					}
					break;
			}
		}catch (Exception e){
			e.printStackTrace();
		}
	}

	private boolean execute(Job jobMetadata) throws DataSongException, JSONException {
		SubmitJobResponse response = jobExecutor.execute(jobMetadata);

		jobMetadata.setInnerId(response.getJobId());
		jobMetadata.setExecuteCount(jobMetadata.getExecuteCount() + 1);
		jobMetadata.setExecuteTime(Date.from(ZonedDateTime.now(ZoneId.of("Asia/Shanghai")).toInstant()));

		if(response.getStatus() == Status.OK.getValue()) {
			jobMetadata.setStatus(JobStatus.RUNNING.name());
		}else{
			jobMetadata.setStatus(JobStatus.FAILED.name());
		}
		jobMetadata.setRunInfo(DataSongJsonUtils.toJson(response));

		///etlJobDao.save(jobMetadata);
		int res = jobMetadataDao.save(jobMetadata);
		if (res != 1) {
			return false;
		}
		return true;
	}

	//打印Job执行内容的日志
	private void logProcess(InputStream inputStream, InputStream errorStream) throws IOException {
		String inputLine;
		String errorLine;
		BufferedReader inputReader = new BufferedReader(new InputStreamReader(inputStream));
		BufferedReader errorReader = new BufferedReader(new InputStreamReader(errorStream));
		while ((inputLine = inputReader.readLine()) != null) {
			logger.info(inputLine);
		}
		while ((errorLine = errorReader.readLine()) != null) {
			logger.error(errorLine);
		}
	}

	//从数据库中加载获取到所有Job
	public List<Job> loadJobs() {
		List<Job> list = new ArrayList<>();
		//List<Job> jobList = jobMetadataRepository.findAll();
		List<Job> jobList = jobMetadataDao.selectAll();
		if(jobList != null){
			for(Job job : jobList){
				list.add(job);
			}
		}
		return list;
	}

	//获取JobDataMap.(Job参数对象)
	public JobDataMap getJobDataMap(Job job) {
		JobDataMap map = new JobDataMap();

		map.put("name", job.get_id());
		map.put("group", job.getCreator());
		map.put("cronExpression", job.getCron());
		map.put("JobDescription", job.getDescription());
		map.put("jobdId", job.get_id());
		map.put("type", QuartzJobType.Normal);

		return map;
	}


	//获取JobDetail,JobDetail是任务的定义,而Job是任务的执行逻辑,JobDetail里会引用一个Job Class来定义
	public JobDetail geJobDetail(JobKey jobKey, String description, JobDataMap map) {
		return JobBuilder.newJob(QuartzJobService.class).withIdentity(jobKey).withDescription(description).setJobData(map)
			.storeDurably().build();
	}

	//获取Trigger (Job的触发器,执行规则)
	public Trigger getTrigger(Job job) {
		return TriggerBuilder.newTrigger().withIdentity(job.get_id(), job.getCreator())
			.withSchedule(CronScheduleBuilder.cronSchedule(job.getCron())).build();
	}

	//获取JobKey,包含Name和Group
	public JobKey getJobKey(Job job) {
		return JobKey.jobKey(job.get_id(), job.getCreator());
	}

	public boolean submitJob(Job job) throws DataSongException {
		//只允许一个线程进入操作
		try {
			if (DataSongStringUtils.isNotEmpty(job.getCron())) {
				synchronized (logger) {
					Scheduler scheduler = schedulerFactoryBean.getScheduler();
					JobDataMap map = getJobDataMap(job);
					JobKey jobKey = getJobKey(job);
					JobDetail jobDetail = geJobDetail(jobKey, job.getDescription(), map);

					scheduler.scheduleJob(jobDetail, getTrigger(job));
					job.setStatus(JobStatus.WAITING.name());
				}
			} else {//立即执行
				SubmitJobResponse submitJobResponse = jobExecutor.execute(job);
				job.setInnerId(submitJobResponse.getJobId());
				job.setExecuteCount(job.getExecuteCount()+1);
				job.setExecuteTime(Date.from(ZonedDateTime.now(ZoneId.of("Asia/Shanghai")).toInstant()));
				if(submitJobResponse.getStatus() == Status.OK.getValue()) {
					job.setStatus(JobStatus.RUNNING.name());
				}else{
					job.setStatus(JobStatus.FAILED.name());
				}
			}

			//jobMetadataRepository.save(job);
			jobMetadataDao.save(job);
		}catch (Exception e){
			e.printStackTrace();
			throw new DataSongException(Status.SERVER_ERROR,"submit job error: "+ DataSongExceptionUtils.getExceptionInfo(e));
		}

		return true;
	}

	public boolean stopAll() throws DataSongException {
		synchronized (logger) {
			try {
				Scheduler scheduler = schedulerFactoryBean.getScheduler();
				Set<JobKey> set = scheduler.getJobKeys(GroupMatcher.anyGroup());
				scheduler.pauseJobs(GroupMatcher.anyGroup());                               //暂停所有JOB
				for (JobKey jobKey : set) {                                                 //删除从数据库中注册的所有JOB
					scheduler.unscheduleJob(TriggerKey.triggerKey(jobKey.getName(), jobKey.getGroup()));
					scheduler.deleteJob(jobKey);
				}
			} catch (Exception e) {
				throw new DataSongException(Status.SERVER_ERROR, "submit job error: " + DataSongExceptionUtils.getExceptionInfo(e));
			}
		}

		return true;
	}

	/**
	 * 初始化job状态监控任务
	 * @return
	 * @throws DataSongException
	 */
	public boolean initStatusMoniterJob() throws DataSongException {
		Timer timer = new Timer();
		timer.schedule(new TimerTask() {
			@Override
			public void run() {
				try {
					synchronized (logger) {
						Scheduler scheduler = schedulerFactoryBean.getScheduler();

						JobDataMap map = new JobDataMap();
						map.put("type", QuartzJobType.StatusMoniter);

						JobKey jobKey = JobKey.jobKey("job", QuartzJobType.StatusMoniter.name());
						JobDetail jobDetail = 	JobBuilder.newJob(QuartzJobService.class).withIdentity(jobKey).withDescription(QuartzJobType.StatusMoniter.name()).setJobData(map)
								.storeDurably().build();

						Trigger trigger = TriggerBuilder.newTrigger().withIdentity("job", QuartzJobType.StatusMoniter.name())
								.withSchedule(CronScheduleBuilder.cronSchedule(StausMoniterCron)).build();

						scheduler.scheduleJob(jobDetail, trigger);
					}
				}catch (Exception e){
					e.printStackTrace();
					//throw new DataSongException(Status.SERVER_ERROR,"submit job error: "+ DataSongExceptionUtils.getExceptionInfo(e));
				}
			}
		},60000);

		return true;
	}

	public boolean stop(Job job ) throws DataSongException {
		synchronized (logger) {
			try {
				if (DataSongStringUtils.isNotEmpty(job.getCron())) {
					Scheduler scheduler = schedulerFactoryBean.getScheduler();
					JobKey jobKey = getJobKey(job);
					scheduler.unscheduleJob(TriggerKey.triggerKey(jobKey.getName(), jobKey.getGroup()));
					scheduler.deleteJob(jobKey);
				}
				return jobExecutor.stop(job.getInnerId());
			} catch (Exception e) {
				throw new DataSongException(Status.SERVER_ERROR, "submit job error: " + DataSongExceptionUtils.getExceptionInfo(e));
			}
		}
	}
}
