package com.project.poetry.common.job;

import com.project.poetry.common.constant.ConstantValue;
import com.project.poetry.service.entity.po.job.JobPO;
import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.springframework.lang.NonNull;
import org.springframework.scheduling.quartz.QuartzJobBean;

import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * @author mengxj
 */
public class ScheduleJob extends QuartzJobBean {

    private ThreadPoolExecutor pool = new ThreadPoolExecutor(
            1,
            50,
            0,
            TimeUnit.MILLISECONDS,
            new ArrayBlockingQueue<>(10),
            new JobThreadFactory("schedule-task_"));

    private static class JobThreadFactory implements ThreadFactory {

        private AtomicInteger num = new AtomicInteger();

        private String name;

        JobThreadFactory(String name){
            this.name = name;
        }

        @Override
        public Thread newThread(@NonNull Runnable r) {
            Thread t = new Thread(r, name + num.getAndIncrement());
            if(t.isDaemon()){
                t.setDaemon(false);
            }
            if(t.getPriority() != Thread.NORM_PRIORITY){
                t.setPriority(Thread.NORM_PRIORITY);
            }
            return t;
        }
    }

    @Override
    protected void executeInternal(JobExecutionContext jobExecutionContext) throws JobExecutionException {
        JobDataMap jobDataMap = jobExecutionContext.getMergedJobDataMap();
        JobPO jobEntity = (JobPO) jobDataMap.get(ConstantValue.JOB_PARAM_KEY);
        ScheduleTask task = new ScheduleTask(jobEntity);
        this.pool.submit(task);
    }
}
