package org.sean.framework.schedule;

import org.quartz.DisallowConcurrentExecution;
import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext;
import org.sean.framework.canary.feign.CanaryExecutorPool;
import org.sean.framework.canary.feign.CanaryInfoHolder;
import org.sean.framework.context.SpringApplicationContext;
import org.sean.framework.logging.Logger;
import org.sean.framework.redis.RedisService;
import org.sean.framework.schedule.client.ScheduleClientImpl;
import org.sean.framework.util.DigestUtil;
import org.sean.framework.util.GSONUtil;
import org.sean.framework.util.StringUtil;
import org.springframework.scheduling.quartz.QuartzJobBean;

import java.util.HashMap;
import java.util.Map;
import java.util.UUID;

/**
 * 定时任务处理器
 *
 * @author sean.xie
 */
@DisallowConcurrentExecution
public class ScheduleJobRunnable extends QuartzJobBean {
    private Logger logger = Logger.newInstance(ScheduleJobRunnable.class);

    @SuppressWarnings("unchecked")
    @Override
    protected void executeInternal(JobExecutionContext context) {
        CanaryExecutorPool.getInstance().runAsync(() -> {
            RedisService redisService = SpringApplicationContext.getBean(RedisService.class);
            if (redisService == null) {
                return null;
            }
            String key = null;
            Integer jobId = null;
            try {
                JobDataMap dataMap = context.getMergedJobDataMap();
                ScheduleJob job = GSONUtil.json2Obj((String) dataMap.get(ScheduleUtils.JOB_PARAM_KEY), ScheduleJob.class);
                if (job == null) {
                    return null;
                }
                jobId = job.getId();
                key = "job_timeout_" + job.getId();
                if (redisService.hasKey(key)) {
                    return null;
                }
                Map<String, String> headers = (Map<String, String>) dataMap.get(ScheduleUtils.JOB_HEADER_KEY);
                if (headers == null) {
                    headers = new HashMap<>();
                }
                String traceId = headers.get(Logger.KEY_TRACE);
                String spanId = headers.get(Logger.KEY_SPAN);
                if (StringUtil.isEmpty(traceId)) {
                    traceId = DigestUtil.md5Hex16(UUID.randomUUID().toString());
                    spanId = traceId;
                    headers.put(Logger.KEY_TRACE, traceId);
                    headers.put(Logger.KEY_SPAN, spanId);
                }
                // 日志/灰度处理
                CanaryInfoHolder.handle(headers, traceId, spanId);

                //执行任务
                logger.debug("任务启动，任务ID：{}", jobId);
                redisService.putLong(key, 1L, ScheduleClientImpl.TIMEOUT);
                ScheduleClientImpl client = SpringApplicationContext.getBean(ScheduleClientImpl.class);
                if (client != null) {
                    client.send(job, headers);
                }
                return null;
            } catch (Exception e) {
                logger.error("任务执行失败，任务ID:" + jobId, e);
                return null;
            } finally {
                CanaryInfoHolder.cleanHeaders();
                redisService.delete(key);
            }
        });
    }

}
