package com.ds.lens.data.scheduler;

import com.alibaba.fastjson.JSONObject;
import com.ds.lens.data.common.constant.AlarmEventTypeEnum;
import com.ds.lens.data.common.integration.apm.dto.MetadataUpdateEvent;
import com.ds.lens.data.kafka.KafkaProducer;
import com.ds.lens.data.util.DateTimeUtil;
import lombok.extern.slf4j.Slf4j;
import net.javacrumbs.shedlock.core.SchedulerLock;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;

import java.util.Date;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;

/**
 * Description:
 * 调度器
 *
 * @author WeiShaoying
 * @date 2019-06-20
 */
@Component
@Slf4j
public class Scheduler {

    /**
     * 25分钟
     */
    private static final long INIT_DATA_LOCK_TIME = 25 * 60 * 1000L;
    /**
     * 2小时
     */
    private static final long INIT_ENDPOINT_LOCK_TIME = 2 * 60 * 60 * 1000L;
    /**
     * 10秒
     */
    private static final long KAFKA_HEART_BEAT_LOCK_TIME = 10 * 1000L;
    private static final String FORMAT = "yyyy-MM-dd HH:mm:ss";

    @Value("#{'${lens.scheduler.initEndpoints.exclude.apps}'.split(',')}")
    private List<String> excludedApps;

    @Value("${lens.kafka.metadata.event.topic}")
    private String jobKafkaTopic;

    @Autowired
    KafkaProducer kafkaProducer;

    private ExecutorService service = new ThreadPoolExecutor(3, 3, 40, TimeUnit.SECONDS, new LinkedBlockingQueue(3), new NamedThreadFactory("Scheduler"));
//    private ExecutorService service1 = new ThreadPoolExecutor(3,3,40, TimeUnit.SECONDS,new LinkedBlockingQueue(3),new NamedThreadFactory("Scheduler"));
//    private ExecutorService service = new ThreadPoolExecutor(3,3,40, TimeUnit.SECONDS,new LinkedBlockingQueue(3),new NamedThreadFactory("Scheduler"));

    /**
     * 定时同步APP tier instance 数据到告警系统
     * 每半小时执行一次
     */
    @Scheduled(cron = "${lens.scheduler.initData.cron}")
    @SchedulerLock(name = "initData-schedulerLock", lockAtMostFor = INIT_DATA_LOCK_TIME, lockAtLeastFor = INIT_DATA_LOCK_TIME)
    private void initData() {
        service.submit(() -> {
            log.info("initData定时任务执行开始：{}", DateTimeUtil.convertDateToString(new Date(), FORMAT));
            log.info("initData定时任务执行结束：{}", DateTimeUtil.convertDateToString(new Date(), FORMAT));
        });
    }


    /**
     * 定时同步常用endpoint 数据到告警系统
     * 默认为每天9点开始执行一次
     */
    @Scheduled(cron = "${lens.scheduler.initEndpoints.cron}")
    @SchedulerLock(name = "initEndpoints-schedulerLock", lockAtMostFor = INIT_ENDPOINT_LOCK_TIME, lockAtLeastFor = INIT_ENDPOINT_LOCK_TIME)
    private void initEndpoints() {
        service.submit(() -> {
            log.info("initEndpoints定时任务执行开始：{}", DateTimeUtil.convertDateToString(new Date(), FORMAT));
            log.info("initEndpoints定时任务执行结束：{}", DateTimeUtil.convertDateToString(new Date(), FORMAT));
        });
    }


    /**
     * Heart beat for the flink jobs
     * Default every 30s
     */
    @Scheduled(cron = "${lens.scheduler.jobHeartBeat.cron}")
    @SchedulerLock(name = "jobHeartBeat-schedulerLock", lockAtMostFor = KAFKA_HEART_BEAT_LOCK_TIME, lockAtLeastFor = KAFKA_HEART_BEAT_LOCK_TIME)
    private void jobHeartBeat() {
        service.submit(() -> {
            log.debug("jobHeartBeat start time：{}", DateTimeUtil.convertDateToString(new Date(), FORMAT));
            MetadataUpdateEvent metadata = new MetadataUpdateEvent();
            metadata.setEventType(AlarmEventTypeEnum.HEART_BEAT_EVENT);
            kafkaProducer.sendMessage(jobKafkaTopic, AlarmEventTypeEnum.HEART_BEAT_EVENT.name(), JSONObject.toJSONString(metadata));
            log.debug("jobHeartBeat end time：{}", DateTimeUtil.convertDateToString(new Date(), FORMAT));
        });
    }

}
