package com.gopay.scheduler.shedulerx;

import com.gopay.common.constants.scheduler.SchedulerConstants;
import com.gopay.common.util.JsonUtils;
import com.gopay.scheduler.shedulerx.rdb.JobConfigRepository;
import com.gopay.scheduler.shedulerx.util.RedisOperator;
import com.gopay.scheduler.shedulerx.util.ThreadPoolUtil;
import com.gopay.scheduler.util.DateUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.sql.Timestamp;
import java.util.*;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;


/**
 * 系统调度配置加载
 * @author 19371
 */

public class SchedulerConfigLoad {

    private final static Logger LOGGER = LoggerFactory.getLogger(SchedulerConfigLoad.class);

    private static SchedulerConfigLoad schedulerConfigLoad = new SchedulerConfigLoad();

    private static volatile boolean init = true;

    private final ReentrantLock lock = new ReentrantLock();

    private final ScheduledThreadPoolExecutor simpleHeartBeat = new ScheduledThreadPoolExecutor(1, new ThreadFactory() {
        int index = 0;
        @Override
        public Thread newThread(Runnable runnable) {
            ++this.index;
            return new Thread(runnable, "simple-heart-thread-" + this.index); }
    });
    
    private SchedulerConfigLoad() {
    }

    public static SchedulerConfigLoad getSingle(){
        return schedulerConfigLoad;
    }

    public boolean init() throws Exception {
        initDB();
        return true;
    }

    /**
     * 初始化mysql
     */
    private void initDB() throws Exception {
        lock.lock();
        try {
            if(init) {
                //判断是否需要insert jobConfig
                SchedulerOperateFactory operateFactory = SchedulerOperateFactory.getFactory();
                Map<String, String> initJobData = JobConfigRepository.getSingle().selectNeed2InitConfig(operateFactory.getJobCache(),operateFactory.getAppName()+SchedulerConstants.SCHEDULER_HORIZONTAL_LINE+SchedulerOperateFactory.getFactory().getRunEnv());
                if(initJobData != null) {
                    HashMap<String,String> groupMapping = operateFactory.getGroupMapping();
                    HashMap<String,String> jobMapping = operateFactory.getJobMapping();
                    Set<Map.Entry<String, String>> entrySet = initJobData.entrySet();
                        for (Map.Entry entry : entrySet) {
                            String jobName = entry.getKey().toString();
                            JobConfigRepository.getSingle().insertJobConfig(jobName, entry.getValue().toString(), jobMapping.get(jobName),groupMapping.get(jobName),
                                    operateFactory.getAppName()+SchedulerConstants.SCHEDULER_HORIZONTAL_LINE+SchedulerOperateFactory.getFactory().getRunEnv(),operateFactory.getRegionMapping().get(jobName),
                                    operateFactory.getJobDescription().get(jobName));
                        }
                }
                //写入事件
                List<JobConfig> jobConfigs = JobConfigRepository.getSingle().selectJobConfig(operateFactory.getAppName()+SchedulerConstants.SCHEDULER_HORIZONTAL_LINE+SchedulerOperateFactory.getFactory().getRunEnv());
                HashMap<String, String> eventTraceMap = new HashMap<>(jobConfigs.size());
                HashMap<String, String> alarmLevelMap = new HashMap<>(jobConfigs.size());
                HashMap<String, Integer> alarmCountMap = new HashMap<>(jobConfigs.size());
                for (JobConfig jobConfig : jobConfigs) {
                    eventTraceMap.put(jobConfig.getJobClass(), jobConfig.getEventControl());
                    alarmLevelMap.put(jobConfig.getJobClass(),jobConfig.getAlarm());
                    alarmCountMap.put(jobConfig.getJobName(),0);
                }
                SchedulerOperateFactory.getFactory().
                        builder().
                        eventSet(eventTraceMap).
                        alarmLevelMapping(alarmLevelMap).
                        alarmCountMapping(alarmCountMap);
                //初始化redis
                initRedis();
                //初始化完毕
                heartBeat();
                init = false;
            }
        }finally {
            lock.unlock();
        }
    }

    /**
     * 初始化redis
     */
    private void initRedis() throws Exception {
        SchedulerOperateFactory operateFactory = SchedulerOperateFactory.getFactory();
        //写入Redis key:switchType, perfix:jobName, value:{state:0;update_time:2018-05-23 00:00:00}
        String jobConfigKey = operateFactory.getAppName()+SchedulerConstants.SCHEDULER_HORIZONTAL_LINE+SchedulerOperateFactory.getFactory().getRunEnv()+SchedulerConstants.SCHEDULER_HORIZONTAL_LINE+SchedulerConstants.SCHEDULER_MARK_CONFIG;
        String jobOperateCache = operateFactory.getAppName()+SchedulerConstants.SCHEDULER_HORIZONTAL_LINE+operateFactory.getRunEnv()+SchedulerConstants.SCHEDULER_HORIZONTAL_LINE+SchedulerConstants.OPERATE;
        String urlMappingKey = operateFactory.getAppName()+SchedulerConstants.SCHEDULER_HORIZONTAL_LINE+SchedulerOperateFactory.getFactory().getRunEnv()+SchedulerConstants.SCHEDULER_HORIZONTAL_LINE+SchedulerConstants.SCHEDULER_MARK_RPC;
        String simpleHeartKey = operateFactory.getAppName()+SchedulerConstants.SCHEDULER_HORIZONTAL_LINE+operateFactory.getRunEnv()+SchedulerConstants.SCHEDULER_HORIZONTAL_LINE+SchedulerConstants.SIMPLE_HEART;
        Map<String, String> hgetAll = RedisOperator.getAllJobConfig(jobConfigKey);
        if(hgetAll != null && !hgetAll.isEmpty()) {
            //写入mysql
            write2DBHook(hgetAll);
            RedisOperator.delJobOperateCache(jobOperateCache);
        }
        RedisOperator.delJobConfig(jobConfigKey);
        Set<Map.Entry<String, String>> entrySet = operateFactory.getJobCache().entrySet();
        Map<String,Object> jsonMap = new HashMap<>(2);
        jsonMap.put(SchedulerConstants.REDIS_STATE_MARK,SchedulerConstants.JOB_WATING_STATE);
        jsonMap.put(SchedulerConstants.REDIS_UPDATE_TIME_MARK,DateUtils.format(new Date(),DateUtils.yyyy_MM_dd_HH_mm_ss));
        for (Map.Entry entry : entrySet) {
            RedisOperator.setJobConfig(jobConfigKey, entry.getKey().toString(),JsonUtils.toJson(jsonMap));
        }
        HashMap<String, String> urlMapping = operateFactory.getUrlMapping();
        Set<Map.Entry<String, String>> urlEntries = urlMapping.entrySet();
        for (Map.Entry<String, String> entry : urlEntries){
            RedisOperator.setUrlMapping(urlMappingKey,entry.getKey(),entry.getValue(),60*3);
            RedisOperator.setSimpleHeartBeat(simpleHeartKey,
                        entry.getKey(),
                        SchedulerConstants.REDIS_HEALTH,10);
        }
    }

    private void write2DBHook(Map<String, String> hgetAll) throws IOException {
        Set<Map.Entry<String, String>> entrySet = hgetAll.entrySet();
        Timestamp createTime = new Timestamp(System.currentTimeMillis());
        for (Map.Entry<String,String> entry : entrySet){
            String job_name = entry.getKey();
            String value = entry.getValue();
            //存入宕机记录表
            Map<String, String> valueMap = JsonUtils.toMap(value);
            SchedulerOperateFactory operateFactory = SchedulerOperateFactory.getFactory();
            if(!SchedulerConstants.JOB_WATING_STATE.equals(valueMap.get(SchedulerConstants.REDIS_STATE_MARK))){
                JobConfigRepository.getSingle().insertCrash(job_name, valueMap,createTime,operateFactory.getAppName()+SchedulerConstants.SCHEDULER_HORIZONTAL_LINE+operateFactory.getRunEnv(),operateFactory.getRegionMapping().get(job_name),
                        operateFactory.getJobDescription().get(job_name));
            }
        }
    }

    private void heartBeat(){
        simpleHeartBeat.scheduleAtFixedRate(new SimpleHeartTask(),5,5,TimeUnit.SECONDS);
    }

    /**
     * just a simple mark for console
     * can't judge the app's health
     * if u wanna to comprehensive detection, please use the heart-beat-check
     */
    class SimpleHeartTask implements Runnable{
        @Override
        public void run() {
            LOGGER.info("-----keeping the simple-heart-check-------");
            SchedulerOperateFactory operateFactory = SchedulerOperateFactory.getFactory();
            String urlMappingKey = operateFactory.getAppName()+SchedulerConstants.SCHEDULER_HORIZONTAL_LINE+SchedulerOperateFactory.getFactory().getRunEnv()+SchedulerConstants.SCHEDULER_HORIZONTAL_LINE+SchedulerConstants.SCHEDULER_MARK_RPC;
            String simpleHeartKey = operateFactory.getAppName() + SchedulerConstants.SCHEDULER_HORIZONTAL_LINE + operateFactory.getRunEnv() + SchedulerConstants.SCHEDULER_HORIZONTAL_LINE + SchedulerConstants.SIMPLE_HEART;
            HashMap<String, String> urlMapping = operateFactory.getUrlMapping();
            Set<Map.Entry<String, String>> urlEntries = urlMapping.entrySet();
            for (Map.Entry<String, String> entry : urlEntries){
                try {
                    RedisOperator.setUrlMapping(urlMappingKey,entry.getKey(),entry.getValue(),60*3);
                    RedisOperator.setSimpleHeartBeat(simpleHeartKey,
                            entry.getKey(),
                            SchedulerConstants.REDIS_HEALTH,10);
                } catch (Exception e) {
                    exceptionLog(e,"-----SchedulerX simple-heart-check error-----");
                }
            }
        }
    }

    public void shutdown(){
        String poolName = String.format("from class {%s} , and poolName is {%s}",this.getClass().getName(),"SIMPLE-HEART 线程池");
        ThreadPoolUtil.shutdownGracefully(poolName,simpleHeartBeat,TimeUnit.MILLISECONDS,10);
    }

    private void exceptionLog(Exception e,String description){
        LOGGER.error(String.format("the scheduler occur error ,the description is {%s}, e={%s}",description, e.getMessage()));
    }
}
