package com.cqwshzj.framework.service.impl;/**
 * @Description:
 * @Auther: Huang Zhijin
 * @Version: 1.0.0
 * @Date: 2021/4/7 14:12
 */

import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.cqwshzj.common.JobConstant;
import com.cqwshzj.common.column.JobInfoColumn;
import com.cqwshzj.framework.dao.api.JobInfoDao;
import com.cqwshzj.framework.dao.model.JobInfo;
import com.cqwshzj.framework.dao.model.JobStrategy;
import com.cqwshzj.framework.exception.unchecked.JobFrameworkException;
import com.cqwshzj.framework.manager.ElasticJobManager;
import com.cqwshzj.framework.service.JobErrRptService;
import com.cqwshzj.framework.service.JobInfoService;
import com.cqwshzj.framework.service.JobStrategyService;
import com.cqwshzj.framework.service.model.AddJobParam;
import com.cqwshzj.util.CrontabUtil;
import com.cqwshzj.util.ExceptionUtil;
import com.cqwshzj.util.SysUtils;
import com.dangdang.ddframe.job.api.simple.SimpleJob;
import com.dangdang.ddframe.job.exception.JobSystemException;
import org.apache.commons.lang3.RandomUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;

import javax.annotation.PostConstruct;
import java.time.LocalDateTime;
import java.util.*;

/**
 *
 * 〈〉
 *
 * @author Huang Zhijin
 * @create 2021/4/7
 * @since 1.0.0
 */
@Service
public class JobInfoServiceImpl implements JobInfoService {

	private static final Logger log = LoggerFactory.getLogger(JobInfoServiceImpl.class);

	//-----------
	/** 当前机器IP  */
	private static String devIp = SysUtils.getCurDevId();

	private Object lockObj = new Object();
	private Object addJobLockObj = new Object();

	//-----------
	private Map<Integer,Integer> jobTypeLimitMap = new HashMap<>();

	//---------------
	@Autowired
	private Environment env;

	//-------------
	@Autowired
	private ElasticJobManager jobManager;
	@Autowired
	private JobInfoDao jobInfoDao;
	@Autowired
	private JobErrRptService jobErrRptService;
	@Autowired
	private JobStrategyService jobStrategyService;


	//--------------------
	@PostConstruct
	private void init(){
		jobTypeLimitMap.put( null, -1);
		jobTypeLimitMap.put( 600, env.getProperty("jobLimit.job600",int.class,10) );
		jobTypeLimitMap.put( 60, env.getProperty("jobLimit.job600",int.class,10) );
	}

	//------------------







	@Override
	public boolean dynamicAddJob(AddJobParam param) {
		Assert.notNull( param ,"缺少参数");
		Assert.hasText( param.getJobName(), "缺少参数 jobName");
		Assert.notNull( param.getSimpleJobClass(), "缺少参数 simpleJobClass ");
		Assert.hasText( param.getCron() ,"缺少参数 cron");
		Assert.isTrue( param.getShardingTotalCount()>0 ,"shardingTotalCount 切分任务参数必须大于0");
		Assert.hasText( param.getJobId(),"缺少参数 jobId");
		Assert.notNull( param.getJobType(),"缺少参数 jobType");

		boolean addSuccess = tryAddSimpleJob( param.getJobId(), param.getCron(), param.getJobName(), param.getShardingTotalCount(), param.getShardingItemParameters(), instantiateSimpleJob(param.getSimpleJobClass()),10);

		param.setAdd2ElasticJob( addSuccess ? JobConstant.COMMON_YES : JobConstant.COMMON_NO );
		boolean success = this.addJobInfoForScan( param );
		if (!success) {
			log.debug(" add job {} successfully . AddJobParam is {} ",param.getJobName(), SysUtils.obj2JsonStr(param) );
		}
		return success;
	}


	private boolean tryAddSimpleJob(String jobId,
									String cron,
									String jobName,
									int shardingTotalCount,
									String shardingItemParameters,
									SimpleJob simpleJob,
									int repeatTime) {
		if (repeatTime <= 0) {
			repeatTime = 10;
		}
		boolean addSuccess = true;
		boolean cronRefresh = false;
		String exceptionStackTrace = null;
		for (int i = 0; i < repeatTime; i++) {
			try {
				this.addSimpleJobAndHandleException( jobName, simpleJob, cron, shardingTotalCount, shardingItemParameters );
				break;
			}catch (Exception e){
				addSuccess = false;
				jobManager.removeZkNodeByJobName( jobName );
				log.error(" failed to add job into elastic . repeatTime=["+i+"] jobId=["+jobId+"] cron=["+cron+"] simpleJob=["+simpleJob.getClass().getSimpleName()+"] . : "+e.getMessage(),e);
				exceptionStackTrace = prepareExceptionStackTrace( e );
				if ( e instanceof JobSystemException ) {
					JobSystemException ex = (JobSystemException) e;
					if ( ex.getMessage().contains( "will never fire") ) {
						cronRefresh =true;
						log.error("cron refresh");
					}
				}
			}
		}
		if (!addSuccess) {
			log.error(" add job to elastic-job failed. \n"+ exceptionStackTrace );
			return false;
		}
		return true;
	}


	private SimpleJob instantiateSimpleJob( Class<? extends SimpleJob> simpleJobClass ){
		try {
			return simpleJobClass.getConstructor( new Class[0] ).newInstance();
		}catch (Exception e){
			throw new JobFrameworkException(" failed to create SimpleJob instance ",e);
		}
	}


	private void addSimpleJobAndHandleException(String jobName, SimpleJob simpleJob, String cron, int shardingTotalCount, String shardingItemParameters) {
		Assert.hasText( jobName, "缺少 jobName");
		Assert.notNull( simpleJob, "缺少 simpleJob");
		Assert.hasText( cron, "缺少 cron");
		Assert.isTrue( shardingTotalCount>0, " shardingTotalCount 必须大于0");

		try {
			synchronized ( addJobLockObj ){
				jobManager.addSimpleJob( jobName, simpleJob, cron, shardingTotalCount, shardingItemParameters);
			}
		}catch ( Exception e){
			log.error(" fail to put SimpleJob instance into elastic-job-lite. jobName=["+jobName+"] simpleJob=["+simpleJob.getClass().getSimpleName()+"] shardingItemParameters=["+shardingItemParameters+"] cron=["+cron+"]. : "+e.getMessage(),e);
			throw e;
		}

	}


	private String prepareExceptionStackTrace(Exception e) {
		if (e == null) {
			return "";
		}
		final StackTraceElement[] stackTrace = e.getStackTrace();
		if (stackTrace == null) {
			return "";
		}
		int maxDeep = 8;
		int count = 0;
		StringBuilder sb = new StringBuilder();
		sb.append( e.getMessage() ).append("\n");
		for (StackTraceElement element : stackTrace) {
			count++;
			if (count > maxDeep) {
				break;
			}
			sb.append( element.toString() ).append("\n");
		}
		return sb.toString();
	}

	//----------------------------------


	@Override
	public boolean addJobInfoForScan(AddJobParam param) {
		Assert.notNull( param , "缺少参数");
		Assert.hasText( param.getJobName() , "缺少参数 jobName");
		Assert.notNull( param.getSimpleJobClass() , "缺少参数 simpleJobClass");
		Assert.hasText( param.getCron() , "缺少参数 cron");
		Assert.isTrue( param.getShardingTotalCount()>0 , "shardingTotalCount 必须大于0");
		Assert.hasText( param.getJobId() ,"缺少 jobId");
		Assert.notNull( param.getJobType() ,"缺少 jobType");

		JobInfo jobInfo = param.toJobInfo();
		if (JobConstant.COMMON_YES.equals( param.getAdd2ElasticJob() )) {
			jobInfo.setIsJobInZk( JobConstant.COMMON_YES );
			jobInfo.setState( JobConstant.JOB_STATE_READY_TO_EXECUTE );
			jobInfo.setExeDevId( devIp );
		} else {
			jobInfo.setIsJobInZk( JobConstant.COMMON_NO );
			jobInfo.setState( JobConstant.JOB_STATE_READY_TO_EXECUTE );
		}

		if (param.getParams() == null) {
			param.setParams( new HashMap<>() );
		}
		jobInfo.setJobName( param.getJobName() );
		//不一定生效
		jobInfo.setCron( param.getCron() );
		jobInfo.setSimpleJobClass( param.getSimpleJobClass().getName() );
		jobInfo.setShardingTotalCount( param.getShardingTotalCount() );
		jobInfo.setShardingItemParameters( param.getShardingItemParameters() );

		jobInfo.setParams( SysUtils.obj2JsonStr( param.getParams() ) );

		JobStrategy jobStrategy = jobStrategyService.getById(jobInfo.getStrtId());
		jobInfo.setJobUpdTime(DateFormatUtils.format( new Date(), JobConstant.JOB_DATETIME) );

		if (jobStrategy != null) {
			int timeOut = 60;
			if (StringUtils.isEmpty( param.getParentJobId() ) && jobStrategy.getTimeWindow() != null) {
				timeOut = jobStrategy.getTimeWindow();
			}else if( jobStrategy.getJobRunTimeout() != null ){
				timeOut = jobStrategy.getJobRunTimeout();
			}
			jobInfo.setJobExeTimeout( timeOut );
		}
		int rows = jobInfoDao.insert( jobInfo );
		return rows > 0 ? true : false ;
	}

	@Override
	public int scanJobInfoAndAdd() {
		QueryWrapper<JobInfo> wrapper = new QueryWrapper<>();
		wrapper.or(i-> i.eq( JobInfoColumn.STATE, JobConstant.JOB_STATE_WAIT_TO_EXECUTE )
				.eq(JobInfoColumn.IS_JOB_IN_ZK, JobConstant.COMMON_NO ) )
				.or( i-> i.eq( JobInfoColumn.IS_JOB_IN_ZK, JobConstant.JOB_STATE_WAIT_TO_EXECUTE)
				.isNull( JobInfoColumn.IS_JOB_IN_ZK) );
		wrapper.orderByAsc( JobInfoColumn.JOB_UPD_TIME , JobInfoColumn.JOB_TYPE );

		final List<JobInfo> jobsToAdd = jobInfoDao.selectList(wrapper);
		if (CollectionUtils.isEmpty( jobsToAdd )) {
			return 0;
		}
		Set<Integer> limitJobTypeCache = new HashSet<>();

		int rows = 0;

		for (JobInfo jobInfo : jobsToAdd) {

			try {
				this.lockAndUpdateUpdTimeById( jobInfo.getId(), jobInfo.getJobUpdTime() );
			}catch (Exception e){
				log.info(" jobInfo( id = "+jobInfo.getId()+") add lock failed. skip reExe current job.");
				continue;
			}

			if (jobInfo.getJobType() != null) {
				boolean control = false;
				if (!limitJobTypeCache.contains(jobInfo.getJobType())) {
					control = this.isConcurrencyControlByJobType( jobInfo.getJobType().intValue()  );
					if ( control ){
						limitJobTypeCache.add( jobInfo.getJobType() );
					}
				}else {
					control = true;
					log.debug(" jobType( jobType=["+jobInfo.getJobType()+"]) reach max concurrency limit. skip executing current job .");
				}
				if (control) {
					log.info(" jobInfo( id=["+jobInfo.getId()+"] , jobType=["+jobInfo.getJobType()+"]) reach max concurrency limit. skip executing current job .");
					continue;
				}
			}


			LocalDateTime jobExeTime = LocalDateTime.now().plusSeconds( RandomUtils.nextInt(8, 15) );
			String jobName =jobInfo.getJobName();
			String cron = CrontabUtil.getCrontabByLocalDateTime( jobExeTime );
			Integer shardingTotalCount = jobInfo.getShardingTotalCount() != null ? jobInfo.getShardingTotalCount() : Integer.valueOf(1);
			String shardingItemParameters = StringUtils.isNotEmpty( jobInfo.getShardingItemParameters()) ? jobInfo.getShardingItemParameters() : "" ;
			String simpleJobClassName = jobInfo.getSimpleJobClass();

			boolean cronExist = StringUtils.isNotEmpty( cron );
			boolean jobClassExist = StringUtils.isNotEmpty( simpleJobClassName );
			boolean shardingCountExist = shardingTotalCount != null && shardingTotalCount.intValue() > 0 ;
			boolean addSuccess = true;
			if ( !cronExist || !shardingCountExist || !shardingCountExist ){
				jobErrRptService.addJobErrRpt(jobInfo.getId(), jobInfo.getState(), "failed to add job instance into elastic-job", "",new Date());
				continue;
			}

			Class<? extends SimpleJob> simpleJobClass = null;
			try {
				simpleJobClass = (Class<? extends SimpleJob>) JobInfoServiceImpl.class.getClassLoader().loadClass( simpleJobClassName );
			}catch (Exception e){
				log.error(" load SimpleJob class failed. simpleJobClassName=["+simpleJobClassName+"] . : "+e.getMessage() ,e);
				continue;
			}

			int repeatTime = 3;
			String exceptionStackTrace = null;
			for (int i = 0; i < repeatTime; i++) {
				try{
					this.addSimpleJobAndHandleException(jobName, instantiateSimpleJob(simpleJobClass) , cron, shardingTotalCount.intValue(), shardingItemParameters );
					addSuccess = true;
					break;
				}catch (Exception e){
					log.error(" add job instance into elastic-job failed. repeatTime=["+i+"]. jobInfo:["+ SysUtils.obj2JsonStr(jobInfo)+"]. exception : "+e.getMessage(),e);
					exceptionStackTrace = prepareExceptionStackTrace(e);
				}
			}

			//添加失败
			if (!addSuccess) {
				jobErrRptService.addJobErrRpt(jobInfo.getId(), jobInfo.getState(), " add job instance into elastic-job failed.", exceptionStackTrace, new Date() );
				continue;
			}

			if (addSuccess) {
				JobInfo entity = new JobInfo();
				entity.setIsJobInZk( JobConstant.COMMON_YES );
				entity.setState( JobConstant.JOB_STATE_READY_TO_EXECUTE );
				entity.setJobUpdTime( DateFormatUtils.format(new Date() , JobConstant.JOB_DATETIME ) );
				entity.setCron( cron );
				//任务放入 elastic-job 后设置机器IP
				entity.setExeDevId( devIp );

				UpdateWrapper<JobInfo> wrapper2 = new UpdateWrapper<>();
				wrapper2.eq( JobInfoColumn.ID , jobInfo.getId() );
				rows += jobInfoDao.update(entity, wrapper2 );
			}
		}

		return rows;
	}

	@Override
	public int updateJobProgressById(String jobId, Double progress) {
		Assert.hasText( jobId, "缺少参数 jobId ");
		Assert.notNull( progress, "缺少参数 progress ");

		String now = DateFormatUtils.format( new Date(), JobConstant.DATETIME_2 );

		JobInfo entity = new JobInfo();
		entity.setProgress( progress );
		entity.setJobUpdTime( now );

		UpdateWrapper<JobInfo> wrapper = new UpdateWrapper<>();
		wrapper.eq(JobInfoColumn.ID, jobId );

		return jobInfoDao.update(entity, wrapper);
	}

	@Transactional(propagation = Propagation.REQUIRED , rollbackFor = Throwable.class )
	@Override
	public int lockAndUpdateUpdTimeById(String id, String updTime) {
		Assert.hasText( id, "缺少参数 id");
		Assert.hasText( updTime, "缺少参数 updTime");

		final JobInfo jobInfo = jobInfoDao.selectOneForUpdateById(id, updTime);
		Assert.notNull( jobInfo,"没有找到 id="+id+" , jobUpdTime="+updTime+" 对应的任务信息");

		String updTimeNew = DateFormatUtils.format(new Date(), JobConstant.JOB_DATETIME );

		UpdateWrapper<JobInfo> wrapper = new UpdateWrapper<>();
		wrapper.eq( JobInfoColumn.ID, id);
		wrapper.eq( JobInfoColumn.JOB_UPD_TIME , updTime );

		JobInfo entity = new JobInfo();
		entity.setJobUpdTime( updTimeNew );

		return jobInfoDao.update( entity, wrapper );
	}

	@Override
	public boolean isConcurrencyControlByJobType(int jobType) {
		if (jobType == 0) {
			return false;
		}
		Integer limit = jobTypeLimitMap.get(jobType);
		if (limit == null || limit.intValue() <= 0 ) {
			return false;
		}
		synchronized ( lockObj ){
			int exeJobNum = this.countExecutingJobOfCurrentDevIdByJobType( jobType );
			if (exeJobNum >= limit) {
				log.info( " jobType( jobType="+jobType+") reach max concurrency limit. current executing count=["+exeJobNum+"] , max limit=["+limit+"]. ");
				return true;
			}
			return false;
		}
	}

	@Override
	public int countExecutingJobOfCurrentDevIdByJobType(int jobType) {
		Assert.isTrue( jobType >0 ," jobType 参数不合法");

		QueryWrapper<JobInfo> wrapper = new QueryWrapper<>();
		wrapper.or(i-> i.eq( JobInfoColumn.JOB_TYPE, jobType )
				.eq(JobInfoColumn.EXE_DEV_ID, devIp)
				.eq(JobInfoColumn.STATE, JobConstant.JOB_STATE_EXECUTING)
		);
		wrapper.or(i-> i.eq( JobInfoColumn.JOB_TYPE, jobType)
			.eq( JobInfoColumn.EXE_DEV_ID, devIp)
			.eq( JobInfoColumn.STATE, JobConstant.JOB_STATE_READY_TO_EXECUTE)
		);

		Integer count = jobInfoDao.selectCount( wrapper );
		return count == null ? 0 : count.intValue() ;
	}

	@Transactional(propagation = Propagation.REQUIRED, rollbackFor = Throwable.class)
	@Override
	public int addJobErrRptAndSetJobFailed(String jobId, Exception e) {

		final String exceptionStr = ExceptionUtil.getStackTraceStringOfException(e);

		int rows = 0;
		rows += jobErrRptService.addJobErrRpt(jobId, JobConstant.JOB_STATE_FAILED, e.getMessage(), exceptionStr, new Date());
		rows += this.setJobAsFailedById(jobId);
		return rows;
	}

	@Override
	public int setJobAsFailedById(String jobId) {
		Assert.hasText(jobId,"缺少参数 jobId");
		final JobInfo jobInfo = jobInfoDao.selectById(jobId);

		//执行失败删除 zk 节点
		jobManager.removeZkNodeByJobName( jobInfo.getJobName() );

		String nowStr = DateFormatUtils.format( new Date(), JobConstant.DATETIME_2 );
		JobInfo entity = new JobInfo();
		entity.setState( JobConstant.JOB_STATE_FAILED );
		entity.setJobUpdTime( nowStr );
		entity.setJobEndTime( nowStr );
		entity.setProgress( 0.0 );

		UpdateWrapper<JobInfo> wrapper = new UpdateWrapper<>();
		wrapper.eq( JobInfoColumn.ID, jobId );

		return jobInfoDao.update( entity, wrapper);
	}


	@Transactional(propagation = Propagation.REQUIRED, rollbackFor = Throwable.class)
	@Override
	public int addJobErrRptAndSetJobFailed(String jobId, String message ) {

		int rows = 0;
		rows += jobErrRptService.addJobErrRpt(jobId, JobConstant.JOB_STATE_FAILED, message, "", new Date());
		rows += this.setJobAsFailedById(jobId);
		return rows;
	}


	@Transactional(propagation = Propagation.REQUIRED, rollbackFor = Throwable.class)
	@Override
	public boolean reExeJobByIdAndCron(String jobId, String cronNew) {
		Assert.hasText( jobId,"缺少参数 jobId");
		Assert.hasText( cronNew,"缺少参数 cronNew");

		JobInfo jobInfo = jobInfoDao.selectById( jobId );

		String jobName = StringUtils.isNotEmpty( jobInfo.getJobName() ) ? jobId : jobInfo.getJobName();
		String simpleJobClass = jobInfo.getSimpleJobClass();
		Short shardingTotalCount = jobInfo.getShardingTotalCount();
		String shardingItemParameters = jobInfo.getShardingItemParameters();

		Assert.hasText( jobName,"jobInfo 缺少 jobName 的值，jobId="+jobId );
		Assert.hasText( simpleJobClass, "jobInfo 缺少 simpleJobClass 的值，jobId="+jobId );
		Assert.isTrue( shardingTotalCount != null && shardingTotalCount.shortValue()>0, "jobInfo 缺少 shardingTotalCount 的值，jobId="+jobId );

		jobManager.removeZkNodeByJobName( jobInfo.getJobName() );

		Class<? extends SimpleJob> simpleJobClazz = null;
		try {
			simpleJobClazz = (Class<? extends SimpleJob>) JobInfoServiceImpl.class.getClassLoader().loadClass( simpleJobClass );
		}catch (Exception e){
			throw new JobFrameworkException(" load class error. className="+simpleJobClass  ,e);
		}
		//添加任务到 elastic-job
		boolean addJobSuccess = tryAddSimpleJob(jobId, cronNew, jobName, shardingTotalCount, shardingItemParameters, instantiateSimpleJob(simpleJobClazz), 10);
		if (!addJobSuccess){
			//重新执行任务失败
			return false;
		}

		String nowStr = DateFormatUtils.format(new Date(), JobConstant.JOB_DATETIME );
		JobInfo entity = new JobInfo();
		entity.setState( JobConstant.JOB_STATE_READY_TO_EXECUTE );
		entity.setJobUpdTime( nowStr );
		entity.setProgress(0.0);
		entity.setIsJobInZk( JobConstant.COMMON_YES );
		entity.setCron( cronNew );

		UpdateWrapper<JobInfo> wrapper = new UpdateWrapper<>();
		wrapper.eq(JobInfoColumn.ID, jobId);

		int rows = jobInfoDao.update( entity, wrapper );
		return rows > 0 ? true : false ;
	}
}
