package com.feidee.data.report.service.crontask;

import com.feidee.data.report.constant.Constant;
import com.feidee.data.report.dao.IHiveDao;
import com.feidee.data.report.dao.IImpalaDao;
import com.feidee.data.report.dao.IMysqlDao;
import com.feidee.data.report.dao.IReportMetadataOpr;
import com.feidee.data.report.dao.IReportAndSourceStatusDao;
import com.feidee.data.report.dao.ISparkDao;
import com.feidee.data.report.model.ReportConfiguration;
import com.feidee.data.report.util.MonitorThread;
import com.feidee.data.report.util.QueryThread;
import com.feidee.data.report.util.quartz.BaseQuartzJob;
import com.feidee.data.report.util.threadutil.ThreadPoolUtil;
import org.apache.log4j.Logger;
import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.PriorityBlockingQueue;

public class CronReportJob extends BaseQuartzJob {

    private IHiveDao hiveDao;
    private IImpalaDao impalaDao;
    private IMysqlDao mysqlDao;
    private ISparkDao sparkDao;
    private String frequency;
    private IReportMetadataOpr reportMetadataOpr; // 报表配置库操作句柄
    private IReportAndSourceStatusDao reportAndSourceStatusDao; //报表执行状态操作句柄
    private Logger logger = Logger.getLogger(CronReportJob.class);

    public CronReportJob() {
        super();
    }

    public CronReportJob( String cronExpressStr ) {
        super(cronExpressStr);
    }

    /**
     * 定时任务执行函数
     */
    @Override
    public void doJob( JobExecutionContext context ) {
        // 处理元数据
        JobDataMap jobDataMap = context.getJobDetail().getJobDataMap();
        this.hiveDao = (IHiveDao) jobDataMap.get("hivedao");
        this.impalaDao = (IImpalaDao) jobDataMap.get("impaladao");
        this.mysqlDao = (IMysqlDao) jobDataMap.get("mysqldao");
        this.sparkDao = (ISparkDao) jobDataMap.get("sparkDao");
        this.frequency = (String) jobDataMap.get("frequency");
        this.reportMetadataOpr = (IReportMetadataOpr) jobDataMap.get("metahandler");
        this.reportAndSourceStatusDao = (IReportAndSourceStatusDao) jobDataMap.get("runstatushandler");

        //  存储依赖检查未通过的报表等待被依赖的报表执行成功后再添加至队列中执行
        Map<String, ArrayList<ReportConfiguration>> dependencyMap = new ConcurrentHashMap<String, ArrayList<ReportConfiguration>>();

        // 从报表配置库中获取需要执行的报表,并提交到线程池执行
        logger.info("报表定时任务 " + frequency + " : " + context.getJobDetail().getKey()
                + " 执行,执行周期: " + frequency + " ,工具包 hiveDao:"
                + hiveDao + " ,impalaDao:" + impalaDao + " ,mysqlDao:" + mysqlDao + " ,reportMetadataHandler:"
                + reportMetadataOpr);
        PriorityBlockingQueue<ArrayList<ReportConfiguration>> hqlSqlQueue = reportMetadataOpr
                .getFrequencyHqlSqlList(frequency);
        logger.info("报表定时任务 " + frequency + " 提交 QUEUE SIZE : " + hqlSqlQueue.size() + " ,并行度:"
                + Constant.THREAD_NUM + " ,Start to submit, Query Thread start......");

        List<Callable<Integer>> threadList = new ArrayList<Callable<Integer>>();
        String groupName = ThreadPoolUtil.getUuid();
        for (int i = 0; i < Constant.THREAD_NUM; i++) {
            threadList.add(new QueryThread(hqlSqlQueue, dependencyMap, hiveDao, impalaDao, mysqlDao, sparkDao, "0", reportAndSourceStatusDao, groupName));
        }
        //提交报表执行线程
        ThreadPoolUtil.submitReportExecTask(groupName, threadList);
        //提交报表监控线程
        ThreadPoolUtil.submitReportMonitorTask(new MonitorThread(groupName, hqlSqlQueue, Constant.THREAD_NUM));
    }

    @Override
    public String toString() {
        return "CronReportJob [hiveDao=" + hiveDao + ", impalaDao=" + impalaDao + ", mysqlDao=" + mysqlDao
                + ", frequency=" + frequency + ", reportMetadataOpr=" + reportMetadataOpr + "]";
    }

}
