package com.analyst.service.impl;


import com.analyst.dto.UpdateResult;
import com.analyst.entity.JobStatus;
import com.analyst.entity.Job;
import com.analyst.dto.ui.EdgeUI;
import com.analyst.dto.ui.NodeUI;
import com.analyst.service.IDependService;
import com.analyst.service.IEtlService;
import com.analyst.service.ITimeService;
import com.analyst.utils.DateUtil;
import com.github.benmanes.caffeine.cache.Cache;
import org.apache.poi.hssf.usermodel.HSSFCell;
import org.apache.poi.hssf.usermodel.HSSFRow;
import org.apache.poi.hssf.usermodel.HSSFSheet;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import javax.annotation.Resource;

import java.io.FileOutputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.*;
import java.util.function.Function;
import java.util.stream.Collectors;

@Service
public class TimeService implements ITimeService {

    @Resource
    private IDependService dependService;
    @Resource
    private IEtlService etlService;
    @Resource
    private ITimeService timeService;

    @Autowired
    private Cache<String, Map<String, Job>> jobCache;

    @Override
    public UpdateResult updateTime(Job job, Date newBeginDate) throws Exception {

        //自拟数据
        List<Job> sqlJobs = initJobs();
        //从etl和dependency获取数据
//        List<SqlJob> sqlJobs = initJobsByDataBase();
        Job endJob = getEndJob(job);
        long oldEndTime = endJob.getEndTime().getTime();
//        serialUpdate(job, job.getBeginTime().getTime() - newBeginDate.getTime());
//        return new UpdateResult(getKeyRoute(job),updateTime);

//        parallelUpdate(job,job.getBeginTime().getTime() - newBeginDate.getTime());
        endJob = getEndJob(job);
        long newEndTime = endJob.getEndTime().getTime();
        return new UpdateResult(getKeyRoute(job), (newEndTime - oldEndTime) / 1000);
    }

    //串行更新
    public void serialUpdate(Job job, long improveTime) {
        List<Job> nextList = job.getNextList();
        //更新本节点
        job.setBeginTime(new Date(job.getBeginTime().getTime() - improveTime));
        job.setEndTime(new Date(job.getEndTime().getTime() - improveTime));
        //终点结束
        if (nextList.size() == 0) {
            return;
        }
        //获取每个后继 然后 串行/并行 更新下一个节点
        for (Job next : nextList) {

            //兄弟节点集合
            PriorityQueue<Job> dependencies = next.getDependencies();

            //重新排序
            dependencies.remove(job);
            dependencies.offer(job);

            Job peek = dependencies.peek();

            long newBeginTime = peek.getEndTime().getTime() + next.getPendingTime();
            improveTime = next.getBeginTime().getTime() - newBeginTime;
            if (improveTime == 0)
                continue;
            //更新下一个节点
            serialUpdate(next, improveTime);
        }


    }

    //并发更新 -v1
    public void parallelUpdateV1(Job job, long improveTime) {
        //0 设置状态
        setStatus(job);
        //1. 等待所有需要的前置节点更新完毕
        //2. 更新 该节点
        //3. 后继每个节点 开启线程 更新
        job.setImproveTime(improveTime);
        //标记末节点的更新状态
        CountDownLatch latch = new CountDownLatch(1);
        job.setLatch(latch);
        job.start();
        try {
            //等待末节点更新完毕 得到结果
            latch.await();
            System.out.println("末节点更新结束");
        } catch (InterruptedException e) {
            e.printStackTrace();
        }


//        SqlJob endJob = job;
//        while(endJob.getNextList().size()!=0)
//            endJob=endJob.getNextList().get(0);
//        while (true){
//            if (endJob.getStatus()== JobStatus.DONE) {
//                return;
//            }
//            try {
//                Thread.sleep(1000);
//            } catch (InterruptedException e) {
//                e.printStackTrace();
//            }
//        }


    }

    //并发更新 -v2
    public void parallelUpdate(List<Job> updateList, List<Long> improveTime) {

        //1.求出层级
//        int maxLevel = 0;
//        for (Job job : updateList) {
//            maxLevel = Math.max(setLevel(job, 0), maxLevel);
//        }

        updateList.forEach(job -> setLevel(job, 0));
        ArrayList<ArrayList<Job>> levelList = new ArrayList<>();
        map.values().forEach(
                job -> {
                    int level = job.getLevel();
                    while (levelList.size() <= level) {
                        levelList.add(new ArrayList<>());
                    }
                    levelList.get(level).add(job);
                }
        );

        // 应该是需要更新的节点的list集合
        // (不止有updatelist中的节点，是包含了所有需要更新的节点，比如updatelist有4个，list可能有10个)
//        List<Job> list = new ArrayList<>(map.values());
        //创建层级数组
        // 二维数组，记录每个层级中需要更新的节点
        // 取maxLevel+1是因为level是从0开始的
//        ArrayList<ArrayList<Job>> levelList = new ArrayList<>(maxLevel + 1);
//        for (int i = 0; i < maxLevel + 1; i++) {
//            levelList.add(new ArrayList<>());
//        }

        //遍历添加层级集合
        // 把level值一样的节点加入到levelList的同一列
//        for (Job job : list) {
//            int level = job.getLevel();
//            levelList.get(level).add(job);
//        }

        // 先更新updatelist中的每个节点的开始时间和结束时间
        for (int i = 0; i < updateList.size(); i++) {
            updateJob(updateList.get(i), improveTime.get(i));
        }

        //按照层级更新节点集合
        for (int i = 1; i < levelList.size(); i++) {
            //遍历更新每一层级中的节点
            for (Job job : levelList.get(i)) {
                updateJob(job);
            }
        }
    }

    /**
     * 更新updatelist中的某个节点的开始时间和结束时间，并对其所在依赖节点集合重新排序
     *
     * @param updateJob   updatelist中的某一个需要更新的节点updatejob
     * @param improveTime 当前updatejob所对应的improvetime
     */
    private void updateJob(Job updateJob, Long improveTime) {
        // 更新该节点
        // 结束时间和开始时间都减去需要提升的时间
        updateJob.setEndTime(new Date(updateJob.getEndTime().getTime() - improveTime));
        updateJob.setBeginTime(new Date(updateJob.getBeginTime().getTime() - improveTime));
        //排序
        // 更新当前updatejob的开始和结束时间后，需要对update所在的依赖节点集合重新排序，以方便后续取出endtime最大的节点
        for (Job next : updateJob.getNextList()) {
            next.getDependencies().remove(updateJob);
            next.getDependencies().offer(updateJob);
        }
    }


    private void updateJob(Job sqlJob) {
        // 判断当前节点所依赖的节点集合是否为空，如果为空，说明没有前继节点
        // 这里不是很懂
        // 为什么没有前继节点就不更新，直接返回了呢？
        // 不应该是没有后继节点的话不更新吗？
        Job peek = sqlJob.getDependencies().peek();
        if (peek == null) {
            return;
        }
        // 当前节点的作业持续时间
        long period = sqlJob.getEndTime().getTime() - sqlJob.getBeginTime().getTime();
        // 当前节点新的开始时间（可能改变）=前继节点集合中的最大结束时间（可能改变）+当前节点的等待时间（不变）
        long newBeginTime = peek.getEndTime().getTime() + sqlJob.getPendingTime();

        //更新
        sqlJob.setBeginTime(new Date(newBeginTime));
        sqlJob.setEndTime(new Date(newBeginTime + period));

//        DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
//        String beginDate = dateFormat.format(new Date(newBeginTime));
//        sqlJob.setBeginTime(dateFormat.parse(beginDate));
//
//        sqlJob.setEndTime(dateFormat.parse(dateFormat.format(new Date(newBeginTime + period))));

        List<Job> nextList = sqlJob.getNextList();
        if (nextList == null) return;
        //排序
        for (Job next : nextList) {
            next.getDependencies().remove(sqlJob);
            next.getDependencies().offer(sqlJob);
        }


    }

    public void setStatus(Job job) {
        List<Job> nextList = job.getNextList();
        job.setStatus(JobStatus.ON_UPDATE);
        if (nextList.size() == 0) return;
        for (Job next : nextList) {
            setStatus(next);
        }

    }

    // 定义一个全局变量，记录需要更新的节点，后面用于向levelList中添加
    private static Map<String, Job> map = new HashMap<>();


    /**
     * 根据当前需要更新的节点，找到它后续节点的最大层级。
     *
     * @param job 需要更新的节点
     * @param n   默认层级为0
     * @return 最大层级
     */
    public void setLevel(Job job, int n) {
        if (job.getLevel() < n) {
            job.setLevel(n);
        }
        map.put(job.getId(), job);
        job.getNextList().forEach(
                (nextJob) -> {
                    setLevel(nextJob, n + 1);
                }
        );
//        job.setLevel(Math.max(job.getLevel(), n));
//        // 会把每个需要更新的节点加入一个全局变量map中
//        map.put(job.getId(), job);
//        List<Job> nextList = job.getNextList();
//
//        if (nextList.size() == 0) return n;
//        int max = n;
//        for (Job next : nextList) {
//            max = Math.max(max, setLevel(next, n + 1));
//        }
//        return max;
    }


    //手动添加固定关联
    public static List<Job> initJobs() throws Exception {
        Job job1 = new Job("1", DateUtil.parseTime("2023-07-12 01:00:10"), DateUtil.parseTime("2023-07-12 01:00:15"));

        Job job2 = new Job("2", DateUtil.parseTime("2023-07-12 01:00:15"), DateUtil.parseTime("2023-07-12 01:00:20"));

        Job job3 = new Job("3", DateUtil.parseTime("2023-07-12 01:00:20"), DateUtil.parseTime("2023-07-12 01:00:25"));

        Job job4 = new Job("4", DateUtil.parseTime("2023-07-12 01:00:30"), DateUtil.parseTime("2023-07-12 01:00:35"));

        Job job5 = new Job("5", DateUtil.parseTime("2023-07-12 01:00:20"), DateUtil.parseTime("2023-07-12 01:00:24"));
        Job job0 = new Job("0", DateUtil.parseTime("2023-07-12 01:00:10"), DateUtil.parseTime("2023-07-12 01:00:14"));
        List<Job> jobs = new ArrayList<>(Arrays.asList(job1, job2, job3, job4, job5));

        job0.getNextList().add(job5);
        job5.getDependencies().offer(job0);
        //1
        job1.getNextList().add(job2);
        job1.getNextList().add(job5);


        //2
        job2.getNextList().add(job3);
//        job2.getNextList().add(job4);
//
        job2.getDependencies().offer(job1);
        job5.getDependencies().offer(job1);

        //3
        job3.getNextList().add(job4);
        job5.getNextList().add(job4);

        job3.getDependencies().offer(job2);


        //4
//        job4.setPendingTime(5000);
        job4.getDependencies().offer(job3);
        job4.getDependencies().offer(job5);


        //更新pendingtime
//        calPendingTime(job1,false);

        return jobs;
    }

    /**
     * 用于parallelUpdateByConcreteTime接口，有缓存就取缓存
     *
     * @return
     */
    @Override
    public Map<String, Job> initJobsIntoMap() {
        Map<String, Job> cachedJobs = jobCache.getIfPresent("databaseJobs");
        if (cachedJobs != null) {
            return cachedJobs;
        }

        List<Job> databaseJobs = dependService.getJobList();

        Map<String, Job> sqlJobs = databaseJobs.stream().collect(Collectors.toMap(Job::getId, Function.identity()));


        // 将数据存入缓存
        jobCache.put("databaseJobs", sqlJobs);

        return sqlJobs;
//        return dependService.getJobList();
//        return dependService.getJobListByPage();
    }

    /**
     * 用于getGraphData接口，根据查询节点得到作业运行图，并放入缓存中，可以实现每次点击查询按钮都重新查询数据库，相当于清空缓存
     *
     * @return
     */
    @Override
    public Map<String, Job> initJobsIntoMapFromDatabase() {
        List<Job> databaseJobs = dependService.getJobList();

        Map<String, Job> sqlJobs = databaseJobs.stream().collect(Collectors.toMap(Job::getId, Function.identity()));


        // 将数据存入缓存
        jobCache.put("databaseJobs", sqlJobs);

        return sqlJobs;
    }

    @Override
    public List<Job> initJobsByDataBase() {
        return dependService.getJobList();
//        return dependService.getJobListByPage();
    }

    /**
     * 根据输入的任意节点找到关键路径，同时将关键节点上的节点颜色highlight一下
     *
     * @param job
     * @return
     */
    @Override
    public List<Job> getKeyRoute(Job job) {
        //找到任意节点的末节点
        job = getEndJob(job);
        job.setHighLight(true);
//        job.setUpdate(false);
        //jobs = keyroute list
        ArrayList<Job> jobs = new ArrayList<>();
        jobs.add(job);
        PriorityQueue<Job> dependencies = job.getDependencies();

        while (dependencies.size() != 0) {
            Job peek = dependencies.peek();
            peek.setHighLight(true);
//            peek.setUpdate(false);
            jobs.add(peek);
            dependencies = peek.getDependencies();
        }
        //反转
        Collections.reverse(jobs);
        return jobs;
    }

    @Override
    public void resetUpdateStyle(List<String> updateJobIds, Job endJob, Map<String, Job> jobMap) {
//        Map<String, Job> cachedJobs = jobCache.getIfPresent("databaseJobs");
        if (jobMap != null) {
            // 更新非关键路径上的节点的 highlight 属性为 false
            for (Job cachedJob : jobMap.values()) {
                if (!updateJobIds.contains(cachedJob.getId())) {
                    cachedJob.setUpdate(false);
                }
            }

            // 将更新后的列表放回缓存中
            jobCache.put("databaseJobs", jobMap);
        }
    }

    @Override
    public Map<String, Job> getJobsOnRoute(String jobName, Map<String, Job> sqlJobs) {
        List<Job> jobsOnRoute = getJobs(jobName, sqlJobs);
        Map<String, Job> jobMap = jobsOnRoute.stream().collect(Collectors.toMap(Job::getId, Function.identity()));
        return jobMap;
    }

    public List<Job> getJobs(String jobName, Map<String, Job> jobMap) {
        Job queryJob = jobMap.get(jobName);
        List<Job> jobs = new ArrayList<>();
        Set<Job> visited = new HashSet<>();
        dfs(queryJob, jobs, visited);
        return jobs;
    }

    public void dfs(Job job, List<Job> jobs, Set<Job> visited) {
        if (job == null || visited.contains(job)) {
            return;
        }
        visited.add(job);
        jobs.add(job);
        for (Job depJob : job.getDependencies()) {
            dfs(depJob, jobs, visited);
        }
    }

    /**
     * 根据查询的节点，输出该节点前面的路径，不过滤更新节点之前的节点
     *
     * @param job
     * @return 依赖图上的节点集合和路径集合
     */
    @Override
    public List<Set> getRoute(Job job, Map<String, Job> sqlJobs) {
        ArrayList<Job> JobList = new ArrayList<>();
//        Map<String, List<Job>> keyRouteList = new HashMap<>();
        List<Job> keyRouteList = new ArrayList<>();
        String jobId = job.getId();
        Map<String, Job> cachedJobs = jobCache.getIfPresent(jobId);
        if (sqlJobs.containsKey(jobId)) {
            JobList.add(sqlJobs.get(jobId));
//            keyRouteList.put(jobId, getKeyRoute(sqlJobs.get(jobId)));
            keyRouteList = getKeyRoute(sqlJobs.get(jobId));
        }

        if (cachedJobs != null) {
            // 更新非关键路径上的节点的 highlight 属性为 false
            for (Job cachedJob : cachedJobs.values()) {
                if (!keyRouteList.contains(cachedJob)) {
                    cachedJob.setHighLight(false);
                }
            }

            // 将更新后的列表放回缓存中
            jobCache.put(jobId, cachedJobs);
        }

        Set<NodeUI> nodes = new HashSet<>();
        Set<EdgeUI> edges = new HashSet<>();
        List<Set> result = new ArrayList<>();

        SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss");
        sqlJobs.values().forEach(tempJob -> {
            String tempId = tempJob.getId() + "\n" + dateFormat.format(tempJob.getBeginTime()) + "~" + dateFormat.format(tempJob.getEndTime());
            nodes.add(new NodeUI(tempId, tempJob.getPendingTime() / 1000));
            tempJob.getDependencies().forEach(dependJob -> {
                String dependId = dependJob.getId() + "\n" + dateFormat.format(dependJob.getBeginTime()) + "~" + dateFormat.format(dependJob.getEndTime());
                edges.add(new EdgeUI(dependId, tempId));
            });
        });
        //下面两个方法效率低
//        for (Job tempJob : sqlJobs.values()) {
//            String tempId = tempJob.getId() + "\n" + dateFormat.format(tempJob.getBeginTime()) + "~" + dateFormat.format(tempJob.getEndTime());
//            nodes.add(new NodeUI(tempId, tempJob.getPendingTime() / 1000));
//            PriorityQueue<Job> dependencies = tempJob.getDependencies();
//            for (Job dependJob : dependencies) {
//                String dependId = dependJob.getId() + "\n" + dateFormat.format(dependJob.getBeginTime()) + "~" + dateFormat.format(dependJob.getEndTime());
//                edges.add(new EdgeUI(dependId, tempId));
//            }
//        }
        // 把所查询节点入栈
//        jobStack.push(job);
//        while (!jobStack.isEmpty()) {
//            Job tempJob = jobStack.pop();
//            String tempId = tempJob.getId() + "\n" + dateFormat.format(tempJob.getBeginTime()) + "~" + dateFormat.format(tempJob.getEndTime());
//            nodes.add(new NodeUI(tempId, tempJob.getPendingTime() / 1000));
//            jobs.add(tempJob);
//            PriorityQueue<Job> dependencies = tempJob.getDependencies();
//            jobSet.add(tempJob);
//            for (Job dependJob : dependencies) {
//                if (jobSet.contains(dependJob)) {
//                    continue;
//                }
//                jobStack.push(dependJob);
//                String dependId = dependJob.getId() + "\n" + dateFormat.format(dependJob.getBeginTime()) + "~" + dateFormat.format(dependJob.getEndTime());
//                edges.add(new EdgeUI(dependId, tempId));
//            }
//        }
        result.add(nodes);
        result.add(edges);
        //反转
//        Collections.reverse(jobs);
        return result;
    }

    /**
     * 根据更新节点的ID和查询节点的ID得到（除去更新节点之前的节点的）路径中的节点list,用于控制台输入形式
     *
     * @param updateJobIds
     * @param queryJobIds
     * @return 目前返回的Joblist的开始和结束时间不对，因为更新时间的操作结果没有写入数据库或者缓存，keyroute是从一个新的jobList中取的。
     */
    public List<Job> getKeyRoute(List<String> updateJobIds, List<String> queryJobIds, Map<String, Job> sqlJobs) {
//        List<SqlJob> jobList = initJobsByDataBase();
//        calPendingTimeMultRoot(jobList);
//        Map<String, SqlJob> sqlJobs = jobList.stream().collect(Collectors.toMap(SqlJob::getId, Function.identity()));
        ArrayList<Job> queryJobList = new ArrayList<>();
        Map<String, List<Job>> keyRouteList = new HashMap<>();
        for (String queryJobId : queryJobIds) {
            if (sqlJobs.containsKey(queryJobId)) {
                queryJobList.add(sqlJobs.get(queryJobId));
                // 这里效率不高，每次getKeyRoute都需要很久
                keyRouteList.put(queryJobId, getKeyRoute(sqlJobs.get(queryJobId)));
            }
        }
        Stack<Job> jobStack = new Stack<>();
        ArrayList<Job> jobs = new ArrayList<>();
        Set<Job> jobSet = new HashSet<>();
        // 把所有查询节点入栈
        for (Job job : queryJobList) {
            jobStack.push(job);
        }
        while (!jobStack.isEmpty()) {
            Job tempJob = jobStack.pop();
            jobs.add(tempJob);
            // 如果当前要弹出的节点为更新节点，且它前面的路径中没有更新节点，就不用往前找了
            if (updateJobIds.contains(tempJob.getId()) && !hasFrontUpdateJobs(updateJobIds, tempJob)) {
                jobSet.add(tempJob);
                continue;
            }
            PriorityQueue<Job> dependencies = tempJob.getDependencies();
            jobSet.add(tempJob);
            for (Job job : dependencies) {
                if (jobSet.contains(job)) {
                    continue;
                }
                jobStack.push(job);
            }
        }
        //反转
        Collections.reverse(jobs);
        return jobs;
    }

    /**
     * 判断当前节点的前面是否有更新节点
     *
     * @param tempJob
     * @return true表示tempJob前面的路径有更新节点，false表示没有更新节点
     */
    public static Boolean hasFrontUpdateJobs(List<String> updateJobIds, Job tempJob) {
        if (tempJob.getDependencies().isEmpty()) {
            return false;
        }
        for (Job job : tempJob.getDependencies()) {
            if (updateJobIds.contains(job.getId())) {
                return true;
            }
            hasFrontUpdateJobs(updateJobIds, job);
        }
        return false;
    }

    @Override
    public void calPendingTime(Job job, Map<String, Job> map, boolean isEnd) {
        if (map != null) {
            calPendingTime(job, isEnd, map);
            jobCache.put(job.getId(), map);
        }
    }

    //计算pendingtime
    public void calPendingTime(Job job, boolean isEnd, Map<String, Job> map) {
        //找到任意节点的末节点
        while (!isEnd && job.getNextList().size() != 0) {
            job = job.getNextList().get(0);
        }
        isEnd = true;
        // 1.计算当前节点的pendingtime
        if (job.getDependencies().size() != 0) {
            PriorityQueue<Job> jobDependencies = job.getDependencies();
            Date beginTime = job.getBeginTime();
            Date dependencyEndTime = jobDependencies.peek().getEndTime();
            long pendingtime = beginTime.getTime() - dependencyEndTime.getTime();
            job.setPendingTime(pendingtime);
            for (Job jobDependency : jobDependencies) {
                calPendingTime(jobDependency, isEnd, map);
            }
        }
    }

    @Override
    public void calPendingTimeMultRoot(List<Job> jobs) {
        // 这里效率不高
        List<Job> multEndJobList = getMultEndJobList(jobs);
        Set<Job> set = new HashSet<>();
        Stack<Job> jobStack = new Stack<>();
        for (Job job : multEndJobList) {
            // 入栈
            jobStack.push(job);
        }

        while (!jobStack.isEmpty()) {
            Job job = jobStack.pop();
            // 1.计算当前节点的pendingtime
            if (job.getDependencies().isEmpty()) {
                continue;
            }

            PriorityQueue<Job> jobDependencies = job.getDependencies();
            // 得到当前节点的开始时间
            Date beginTime = job.getBeginTime();
            // 得到当前节点所依赖的所有节点的最大结束时间
            Date dependencyEndTime = jobDependencies.peek().getEndTime();
            long pendingtime = beginTime.getTime() - dependencyEndTime.getTime();
            job.setPendingTime(pendingtime);
            // 把已经计算出pendingtime的节点加入set
            set.add(job);
            for (Job jobDependency : jobDependencies) {
                if (set.contains(jobDependency)) {
                    continue;
                }
                // 把当前节点的依赖节点中没有计算pendingtime的节点压入栈，等待后续进行pendingtime的计算
                jobStack.push(jobDependency);
            }
        }
    }

    private static List<Job> getMultEndJobList(List<Job> jobs) {
        ArrayList<Job> endJobList = new ArrayList<>();
        //找到每个末节点
        for (Job job : jobs) {
            if (job.getNextList().isEmpty()) {
                endJobList.add(job);
            }
        }
        return endJobList;
    }

    @Override
    public Job getEndJob(Job job) {
        //找到任意节点的末节点
        while (job.getNextList().size() != 0)
            job = job.getNextList().get(0);
        return job;
    }

    /**
     * 控制台输入形式
     *
     * @param updateJobIds
     * @param newEndTimeList
     * @param queryJobIds
     * @return
     */
    @Override
    public Map<String, String> parallelUpdateByConcreteTime(List<String> updateJobIds, List<Date> newEndTimeList, List<String> queryJobIds) {
        List<Job> jobList = initJobsByDataBase();
        //pengdingtime
        calPendingTimeMultRoot(jobList);
        Map<String, Job> sqlJobs = jobList.stream().collect(Collectors.toMap(Job::getId, Function.identity()));
        List<Job> updateJobList = new ArrayList<>();
        ArrayList<Job> queryJobList = new ArrayList<>();
        List<Long> improveTimeList = new ArrayList<>();
        for (String jobId : updateJobIds) {
            if (sqlJobs.containsKey(jobId)) {
                updateJobList.add(sqlJobs.get(jobId));
            }
        }
        for (String queryJobId : queryJobIds) {
            if (sqlJobs.containsKey(queryJobId)) {
                queryJobList.add(sqlJobs.get(queryJobId));
            }
        }

        for (int i = 0; i < updateJobIds.size(); i++) {
            // 如果AnalysisApplication中的Boolean isMulTime是true，把newEndTimeList.get(0)修改为newEndTimeList.get(i)
            long time = sqlJobs.get(updateJobIds.get(i)).getEndTime().getTime() - newEndTimeList.get(0).getTime();
            improveTimeList.add(time);
        }
        parallelUpdate(updateJobList, improveTimeList);

        List<Job> keyRouteAfterUpdate = getKeyRoute(updateJobIds, queryJobIds, sqlJobs);
        System.out.println("更新后:");
        for (Job job : keyRouteAfterUpdate) {
            System.out.println(job);
        }
        writeJobsToExcel(keyRouteAfterUpdate);

//        ArrayList<Long> queryImprovedConcreteTime = new ArrayList<>();
        Map<String, String> queryResultMap = new HashMap<>();
        SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
        for (int i = 0; i < queryJobIds.size(); i++) {
            queryResultMap.put(queryJobIds.get(i), dateFormat.format(new Date(queryJobList.get(i).getEndTime().getTime())));
        }
        return queryResultMap;
    }

    public void writeJobsToExcel(List<Job> keyRouteAfterUpdate) {
        //第一步，创建一个workbook对应一个excel文件
        HSSFWorkbook workbook = new HSSFWorkbook();
        //第二步，在workbook中创建一个sheet对应excel中的sheet
        HSSFSheet sheet = workbook.createSheet("updatedJobs");
        //第三步，在sheet表中添加表头第0行，老版本的poi对sheet的行列有限制
        HSSFRow row = sheet.createRow(0);
        //第四步，创建单元格，设置表头
        HSSFCell cell = row.createCell(0);
        cell.setCellValue("jobid");
        cell = row.createCell(1);
        cell.setCellValue("txdate");
        cell = row.createCell(2);
        cell.setCellValue("begintime");
        cell = row.createCell(3);
        cell.setCellValue("endtime");
        cell = row.createCell(4);
        cell.setCellValue("dependencies");
        cell = row.createCell(5);
        cell.setCellValue("pendingtime(s)");

        //第五步，写入实体数据
        for (int i = 0; i < keyRouteAfterUpdate.size(); i++) {
            HSSFRow row1 = sheet.createRow(i + 1);
            Job tempJob = keyRouteAfterUpdate.get(i);
            String dependencyJobs = "";
            SimpleDateFormat simpleDateFormat = new SimpleDateFormat("YYYY-MM-dd HH:mm:ss");
            for (Job job : tempJob.getDependencies()) {
                dependencyJobs = dependencyJobs + job.getId() + ",";
            }
            //创建单元格设值
            row1.createCell(0).setCellValue(tempJob.getId());
            //取结束时间的日期为txdate，防止txdate改变
            String endtime = simpleDateFormat.format(tempJob.getEndTime());
            row1.createCell(1).setCellValue(endtime.substring(0, endtime.indexOf(" ")));
            row1.createCell(2).setCellValue(simpleDateFormat.format(tempJob.getBeginTime()));
            row1.createCell(3).setCellValue(simpleDateFormat.format(tempJob.getEndTime()));
            row1.createCell(4).setCellValue(dependencyJobs);
            row1.createCell(5).setCellValue(tempJob.getPendingTime() / 1000);
        }

        //将文件保存到指定的位置
        try {
            FileOutputStream fos = new FileOutputStream("D:\\Users\\W\\Desktop\\updatedJobs.xls");
            workbook.write(fos);
            System.out.println("写入成功");
            fos.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }


    @Override
    public String parallelUpdateByConcreteTime(List<String> updateJobIds, List<Date> newEndTimeList, Map<String, Job> map) {
//        newEndTimeList = DateUtil.convertDate(newEndTimeList);
        Job endJob = getEndJob(map.get(updateJobIds.get(0)));
        String improveTimeForEndJob = "";
        if (map != null) {
            improveTimeForEndJob = parallelUpdateByConcreteTime(updateJobIds, newEndTimeList, endJob, map);
            jobCache.put(endJob.getId(), map);
        }
        return improveTimeForEndJob;
    }

    /**
     * 与前端对接形式
     *
     * @param updateJobIds
     * @param newEndTimeList
     * @param endJob
     * @param sqlJobs
     * @return
     */
    public String parallelUpdateByConcreteTime(List<String> updateJobIds, List<Date> newEndTimeList, Job endJob, Map<String, Job> sqlJobs) {
        Date oldEndTime = endJob.getEndTime();
        List<Job> updateJobList = new ArrayList<>();
        List<Long> improveTimeList = new ArrayList<>();
        for (String jobId : updateJobIds) {
            if (sqlJobs.containsKey(jobId)) {
                updateJobList.add(sqlJobs.get(jobId));
            }
        }

        // 每个update的节点都对应一个新的结束时间
        for (int i = 0; i < updateJobIds.size(); i++) {
            long time = sqlJobs.get(updateJobIds.get(i)).getEndTime().getTime() - newEndTimeList.get(i).getTime();
            improveTimeList.add(time);
        }
        parallelUpdate(updateJobList, improveTimeList);

        Date newEndTime = endJob.getEndTime();
        String improveTimeForEndJob = DateUtil.getIntervalTimeString(newEndTime, oldEndTime);
        System.out.println("improveTimeForEndJob:" + improveTimeForEndJob);
        return improveTimeForEndJob;
//        List<String> endJobIds = new ArrayList<>();
//        endJobIds.add(endJob.getId());
//
//        List<Job> keyRouteAfterUpdate = getKeyRoute(updateJobIds, endJobIds, sqlJobs);
//        writeJobsToExcel(keyRouteAfterUpdate);
    }

    @Override
    public List<List<String>> getJobRankList(String jobName) {
        Map<String, Job> jobMap = timeService.initJobsIntoMap();
        Map<String, Job> jobsOnRoute = getJobsOnRoute(jobName, jobMap);
        if (jobsOnRoute == null) return null;
        List<String> jobs = jobsOnRoute.values().stream().sorted().limit(5).map(Job::getId).collect(Collectors.toList());
        List<String> date = jobsOnRoute.values().stream().sorted().limit(5)
                .map(job ->
                        DateUtil.getIntervalTimeString(job.getBeginTime(), job.getEndTime())).collect(Collectors.toList());
        ArrayList<List<String>> res = new ArrayList<>();
        res.add(jobs);
        res.add(date);
        return res;
    }


    @Override
    public void setCache(Map<String, Job> jobMap) {
        jobCache.put(  "databaseJobs", jobMap);
    }
}
