package com.itstyle.quartz.job;

import com.alibaba.fastjson.JSONObject;
import com.itstyle.quartz.entity.QuartzEntityBatchDt;
import com.itstyle.quartz.entity.QuartzEntityDt;
import com.itstyle.quartz.interceptor.MyTask;
import com.itstyle.quartz.kettleUtil.DateUtil;
import com.itstyle.quartz.kettleUtil.LogUtil;
import com.itstyle.quartz.kettleUtil.PropertyUtil;
import com.itstyle.quartz.service.dao.IJobService;
import org.quartz.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.annotation.Transactional;

import java.io.*;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.UnknownHostException;
import java.util.Date;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

/**
 * 实现序列化接口、防止重启应用出现quartz Couldn't retrieve job because a required class was not found 的问题
 * Job 的实例要到该执行它们的时候才会实例化出来。每次 Job 被执行，一个新的 Job 实例会被创建。
 * 其中暗含的意思就是你的 Job 不必担心线程安全性，因为同一时刻仅有一个线程去执行给定 Job 类的实例，甚至是并发执行同一 Job 也是如此。
 * @DisallowConcurrentExecution 保证上一个任务执行完后，再去执行下一个任务，这里的任务是同一个任务
 */
@DisallowConcurrentExecution
public class LoadThreadsBatch implements  Job,Serializable {

    private static final Logger logger = LoggerFactory.getLogger(LoadThreadsBatch.class);

    private static final long serialVersionUID = 1L;
    @Autowired
    public IJobService jobService;
    public JobKey jobkey;
    public LoadThreadsBatch(){};
    public LoadThreadsBatch(JobKey jobkey,IJobService jobService){this.jobkey=jobkey;this.jobService=jobService;};
    @Override
    @Transactional
    public void execute(JobExecutionContext context){
        JobDetail jobDetail = context.getJobDetail();
        JobDataMap dataMap = jobDetail.getJobDataMap();
        JobKey jobkeys = jobDetail.getKey();
        /**
         * 获取任务中保存的方法名字，动态调用方法
         */
        String methodName = dataMap.getString("jobMethodName");
        try {
            LoadThreadsBatch job = new LoadThreadsBatch(jobkeys,this.jobService);
            Method method = job.getClass().getMethod(methodName);
            method.invoke(job);
        } catch (NoSuchMethodException e) {
            logger.error("KETTLE 批量任务执行异常，jobkey="+jobkey.toString()+e.getMessage());
        } catch (IllegalAccessException e) {
            logger.error("KETTLE 批量任务执行异常，jobkey="+jobkey.toString()+e.getMessage());
        } catch (InvocationTargetException e) {
            logger.error("KETTLE 批量任务执行异常，jobkey="+jobkey.toString()+e.getMessage());
        }
    }
    public void runKettleBatch()
    {
        //日志状态信息给定默认值
        String message="任务执行中！";
        //任务状态
        String status="正在执行";
        //使用集群时显示主机IP
        String hostIp = "";
        try {
            hostIp = LogUtil.getLocalhostIp();
        } catch (UnknownHostException e) {
            e.printStackTrace();
        }
        logger.info("批量任务开始执行：开始读取配置信息  jobkeys="+jobkey);

        List<QuartzEntityDt> listJobKettleDetails= jobService.listJobKettle(this.jobkey);
        String threads_group = "20";
        QuartzEntityDt quartzEntityDt = null;
        String kettle_patch = null;
        if(listJobKettleDetails.size() > 0 && listJobKettleDetails != null){
             quartzEntityDt = listJobKettleDetails.get(0);
             kettle_patch = quartzEntityDt.getKETTLEPATCH();
             threads_group = JSONObject.parseObject(quartzEntityDt.getKETTLEDETAILS()).getString("threads_group");
        }

        logger.info("批量任务执行中：开始读取数据库批量表信息 jobkeys="+jobkey);
        List<QuartzEntityBatchDt> listJobKettleBatch= jobService.listJobKettleBatch(this.jobkey);


        //创建线程池
        logger.info("批量任务执行中：创建线程池，线程池最大容纳60条 jobkeys="+jobkey);
        ThreadPoolExecutor executor = new ThreadPoolExecutor(100, 200, 60L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue(500));
        //获取日志主键
        String quartzid = UUID.randomUUID().toString().substring(0,10)+DateUtil.getFormatTimeString(new Date(),"yyyyMMddHHmmss");

        List<List<QuartzEntityBatchDt>> list=null;
        if(listJobKettleBatch != null && listJobKettleBatch.size() > 0){
            list = PropertyUtil.getListStringArray(listJobKettleBatch, Integer.parseInt(threads_group));
            message = "执行主机IP"+hostIp+"; 处理表总数:" + listJobKettleBatch.size() + ",每组表数据两:" + threads_group + ",分组数:" + list.size()+"jobkeys="+jobkey;
            logger.info(message);

        }else {
            message="执行主机IP"+hostIp+"; 批量任务执行中：QRTZ_JOB_KETTLE_BATCH 表中没有配置jobkeys="+jobkey+"的数据，任务即将结束！";
            logger.info(message);

        }
        //处理日志信息
        LogUtil.saveJobKettleLogs(kettle_patch,quartzid,jobkey,jobService,status,message);
       try{
        if(list != null && list.size()>0){
            for (int i = 0; i < list.size(); i++)
            {
                logger.info("批量任务执行中：开始交由具体线程执行分组任务；当前分组为:第"+i+"组 jobkeys="+jobkey);
                MyTask myTask = new MyTask((List<QuartzEntityBatchDt>)list.get(i),this.jobkey);
                executor.execute(myTask);
            }
        }
           executor.shutdown();
           logger.info("批量任务执行完成 jobkeys="+jobkey);
           //处理日志信息
           status="执行成功";
           LogUtil.updateJobKettleLogs(quartzid,jobkey,jobService,status,message);

       }catch(Exception e){
           logger.error("KETTLE 批量任务执行异常，jobkey="+jobkey.toString()+e.getMessage());
           //处理日志信息
           status="执行异常";
           message="执行主机IP"+hostIp+";"+e.getMessage();
           LogUtil.updateJobKettleLogs(quartzid,jobkey,jobService,status,message);
       }
    }
    public JobKey getJobkey() {
        return jobkey;
    }

    public void setJobkey(JobKey jobkey) {
        this.jobkey = jobkey;
    }
}