package com.sam.thread;

import com.alibaba.fastjson.JSON;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.sam.bean.JobTaskBuilder;
import com.sam.bean.JobsConfig;
import com.sam.listener.DataExtractScheduleEvent;
import com.sam.listener.JobWorkEvent;
import com.sam.util.PrintUtil;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationListener;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Component;

import javax.annotation.PostConstruct;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Future;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReentrantReadWriteLock;

@Component
@Slf4j
public class ScheduleHandler implements Runnable, ApplicationListener<JobWorkEvent> {

//    private AtomicInteger dataExtractSuccessCount = new AtomicInteger(0);
//    private AtomicInteger dataExtractScheduleFailCount = new AtomicInteger(0);
//    private AtomicInteger dataExtractWorkFailCount = new AtomicInteger(0);
    private AtomicInteger dataExtractTotalCount = new AtomicInteger(0);
    private Map<String, Integer> jobRunStatusMap = Maps.newHashMap();
    private Map<String, Integer> queryMap = Maps.newHashMap();
    private Map<String, List<String>> jobDependentMapping = Maps.newHashMap();
    private List<String> jobNames = Lists.newArrayList();
    private CountDownLatch countDownLatch;
    private ThreadPoolTaskExecutor threadPoolTaskExecutor;
    private Map<String, AbstractJobRunner> jobRunnerMap;
    private ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
    private Integer isRunning = 0;


    @Autowired
    private ApplicationContext context;
    @Autowired
    private JobTaskBuilder jobTaskBuilder;
    @Autowired
    private JobsConfig JobsConfig;

    public ThreadPoolTaskExecutor getThreadPoolTaskExecutor() {
        return threadPoolTaskExecutor;
    }

    @PostConstruct
    public void init() {
        threadPoolTaskExecutor = jobTaskBuilder.buildJobTaskWorkerThreadPool();
        buildJobDependentMapping();
        buildJobNamesList();
        jobRunnerMap = context.getBeansOfType(AbstractJobRunner.class);
    }

    public void reStart() {
        stopJobRunner();
        threadPoolTaskExecutor = jobTaskBuilder.buildJobTaskWorkerThreadPool();
    }

    public Map<String, Integer> status(){
        queryMap.putAll(jobRunStatusMap);
        queryMap.put("status", isRunning);
        return queryMap;
    }

    public void stopJobRunner() {
        threadPoolTaskExecutor.shutdown();
    }

    @Override
    public void run() {
        try {
            isRunning = 2;
            log.info(String.format("第%d次调度任务--开始", dataExtractTotalCount.incrementAndGet()));
            jobRunStatusMap.put("batchId", dataExtractTotalCount.get());
            jobNames.forEach(name -> {
                jobRunStatusMap.put(name, 0);
            });
            JobRunnerStatus.threadLocal.set(jobRunStatusMap);

            List<String> lowLevelJobName = JobsConfig.getLowLevelJobs();
            countDownLatch = new CountDownLatch(jobNames.size());
            for (int xx = 0; xx < lowLevelJobName.size(); xx++) {
                String jobName = lowLevelJobName.get(xx);
                startJob(jobName);
            }
            countDownLatch.await();
            isRunning = 1;
        } catch (Exception e) {
            isRunning = -1;
            log.error("调度任务线程池拒绝异常", e);
        } finally {
            log.info(String.format("第%d次调度任务--完成", dataExtractTotalCount.get()));
            context.publishEvent(new DataExtractScheduleEvent(dataExtractTotalCount.get(), isRunning));
            jobRunStatusMap.clear();
        }

    }

    private void startJob(String jobName){
        jobRunnerMap.forEach((k, jobRunner) -> {
            jobRunner.setCountDownLatch(countDownLatch);
            jobRunner.setApplicationContext(context);
            if (jobRunner.getJobName().equals(jobName.trim())) {
                threadPoolTaskExecutor.submit(jobRunner);
            }
        });
    }

    @Override
    public void onApplicationEvent(JobWorkEvent event) {
        log.info(JSON.toJSONString(event.getSource()));
        try {
            Map<String, ?> eventMap = (Map) event.getSource();
            queryMap.put(eventMap.get("jobName").toString(), Integer.parseInt(eventMap.get("status").toString()));

            lock.writeLock().lock();
            jobRunStatusMap.put(eventMap.get("jobName").toString(), Integer.parseInt(eventMap.get("status").toString()));
            if (eventMap.get("status").equals(-1)) {
                Set<String> childSet = Sets.newHashSet();
                getChildForRecursion(eventMap.get("jobName").toString(), jobDependentMapping, childSet);
                for(String s : childSet){
                    countDownLatch.countDown();
                }
                throw new RuntimeException(eventMap.get("jobName").toString() + "执行失败");
            }
            Map<String, List<String>> jobDependentMap = JobsConfig.getDependentJobs();
            if(jobDependentMapping.get(eventMap.get("jobName")) == null){
                return;
            }
            List<String> names = jobDependentMapping.get(eventMap.get("jobName"));
            Map<String, Integer> checkResultMap = Maps.newHashMap();
            for (String name : names) {
                boolean flag = true;
                List<String> checkStatusList = jobDependentMap.get(name);
                for (String jobName : checkStatusList) {
                    Integer status = jobRunStatusMap.get(jobName);
                    checkResultMap.put(jobName, status);
                    if (status == null || !status.equals(1)) {
                        flag = false;
                        break;
                    }
                }
                if (flag) {
                    log.info("启动job:{},检查依赖job情况:{}", name, JSON.toJSONString(checkResultMap));
                    startJob(name);
                }
            }
        } catch (Throwable t) {
            isRunning = -1;
            log.error("抽数任务执行失败", t);
        } finally {
            lock.writeLock().unlock();
        }
    }

    private void buildJobDependentMapping() {
        Map<String, List<String>> jobDependentMap = JobsConfig.getDependentJobs();

        final Set<String> allDependentJobNameSet = Sets.newHashSet();
        jobDependentMap.forEach((k, v) -> {
            allDependentJobNameSet.addAll(v);
        });
        allDependentJobNameSet.forEach(item -> {
            jobDependentMapping.put(item, Lists.newArrayList());
        });
        jobDependentMap.forEach((k, v) -> {
            jobDependentMapping.forEach((k1, v1) -> {
                if (v.contains(k1)) {
                    jobDependentMapping.get(k1).add(k);
                }
            });
        });
    }

    private void buildJobNamesList() {
        Set<String> allJobNames = Sets.newHashSet();

        List<String> lowLevelJobName = JobsConfig.getLowLevelJobs();
        allJobNames.addAll(lowLevelJobName);
        Map<String, ?> jobMap = JobsConfig.getDependentJobs();
        jobMap.forEach((k ,v) -> {
            allJobNames.add(k);
        });
        jobNames = Lists.newArrayList(allJobNames.iterator());
    }

    private void getChildForRecursion(String parentName, Map<String, List<String>> jobDependentMapping, Set<String> childSet){
        List<String> parentList = jobDependentMapping.get(parentName);
        if(parentList == null) return;
        for(String parent : parentList){
            childSet.add(parent);
            getChildForRecursion(parent, jobDependentMapping, childSet);
        }
    }
}
