package com.hub.realtime.resource.service.impl;

import cn.hutool.core.date.DateUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.extension.plugins.pagination.Page;
import com.hub.realtime.common.core.domain.model.ClusterInfo;
import com.hub.realtime.common.core.domain.model.LoginUser;
import com.hub.realtime.common.enums.DateType;
import com.hub.realtime.common.enums.JobDeployState;
import com.hub.realtime.common.enums.JobRunState;
import com.hub.realtime.common.exception.UtilException;
import com.hub.realtime.common.model.*;
import com.hub.realtime.common.model.catalog.FlinkCatalog;
import com.hub.realtime.common.utils.*;
import com.hub.realtime.common.utils.hadoop.HadoopUtil;
import com.hub.realtime.common.utils.hadoop.HdfsUtil;
import com.hub.realtime.flink.submit.base.FlinkYarnSubmitBase;
import com.hub.realtime.flink.submit.client.SubmitClient;
import com.hub.realtime.flink.submit.impl.FlinkYarnApplicationSubmit;
import com.hub.realtime.flink.submit.model.YarnRunEnv;
import com.hub.realtime.flinkshims.core.model.SqlError;
import com.hub.realtime.framework.service.ServiceBase;
import com.hub.realtime.resource.domain.*;
import com.hub.realtime.resource.dto.input.*;
import com.hub.realtime.resource.dto.output.UploadResult;
import com.hub.realtime.resource.model.FlinkConfParam;
import com.hub.realtime.resource.service.*;
import com.hub.realtime.system.service.ISysConfigService;
import com.streamxhub.streamx.common.util.DeflaterUtils;
import com.streamxhub.streamx.common.util.HdfsUtils;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.time.DateFormatUtils;
import org.apache.flink.api.common.JobID;
import org.apache.flink.client.program.ClusterClient;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.GlobalConfiguration;
import org.apache.flink.configuration.JobManagerOptions;
import org.apache.flink.configuration.TaskManagerOptions;
import org.apache.flink.util.ExceptionUtils;
import org.apache.flink.yarn.YarnClusterClientFactory;
import org.apache.flink.yarn.YarnClusterDescriptor;
import org.apache.flink.yarn.configuration.YarnConfigOptions;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.eclipse.aether.graph.Exclusion;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.hub.realtime.resource.mapper.ResFlinkJobMapper;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;

import java.io.File;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.*;
import java.util.concurrent.*;
import java.util.regex.Pattern;
import java.util.stream.Collectors;

import static com.hub.realtime.common.constant.Constants.LOGBACKDEFAULT;
import static com.hub.realtime.common.constant.HdfsConstants.*;

/**
 * 作业管理中心Service业务层处理
 *
 * @author wupeng
 * @date 2022-01-18
 */
@Service
@Slf4j
public class ResFlinkJobServiceImpl extends ServiceBase<ResFlinkJobMapper, ResFlinkJob> implements IResFlinkJobService {

    private final Map<String, URLClassLoader> shimsClassLoaderCache = new ConcurrentHashMap<>();


    @Autowired
    IResApplicationService applicationService;

    @Autowired
    private ISysConfigService configService;

    @Autowired
    IResFlinkConfService flinkConfService;

    @Autowired
    IResJarManagementService jarManagementService;


    @Autowired
    IResClusterService clusterService;


    @Autowired
    IResSavepointService savepointService;

    @Autowired
    IResCatalogService catalogService;

    @Autowired
    IResJobLogService jobLogService;


    @Override
    public Page<ResFlinkJob> list(FlinkJobQueryParam param) {
        LambdaQueryWrapper<ResFlinkJob> wrapper = createCommonWrapper(ResFlinkJob::getUserId, ResFlinkJob::getCreateTime);
        wrapper.orderByDesc(ResFlinkJob::getCreateTime);
        if (StringUtils.isNotEmpty(param.getJobName())) {
            wrapper.like(ResFlinkJob::getJobName, param.getJobName());
        }
        if (StringUtils.isNotEmpty(param.getJobType())) {
            wrapper.eq(ResFlinkJob::getJobName, param.getJobName());
        }
        if (StringUtils.isNotEmpty(param.getJobState())) {
            wrapper.eq(ResFlinkJob::getJobState, param.getJobState());
        }
        Page<ResFlinkJob> page = getPageList(param, wrapper);
        if (!page.getRecords().isEmpty()) {
            page.getRecords().forEach(l -> {
                ResApplication app = applicationService.getById(l.getApplicationId());
                l.setApplicationName(app.getName());
                //syncJobState(l);
            });
        }
        return page;
    }

    @Transactional
    @Override
    public int saveInfo(ResFlinkConf flinkJob) {
        int res;
        LambdaQueryWrapper<ResFlinkJob> queryWrapper = new LambdaQueryWrapper<>();
        queryWrapper.eq(ResFlinkJob::getJobName, flinkJob.getJobName());
        ResFlinkJob resFlinkJob = baseMapper.selectOne(queryWrapper);
        if (resFlinkJob != null) {
            if (flinkJob.getFlinkJobId() == null) {
                throw new UtilException("作业名重复！");
            } else {
                if (!flinkJob.getFlinkJobId().equals(resFlinkJob.getId())) {
                    throw new UtilException("作业名重复！");
                } else {
                    res = doInsertAndUpdate(flinkJob);
                }
            }
        } else {
            if (flinkJob.getFlinkJobId() == null) {
                res = doInsertAndUpdate(flinkJob);
            } else {
                res = doInsertAndUpdate(flinkJob);
            }
        }
        return res;
    }


    private int doInsertAndUpdate(ResFlinkConf flinkConf) {

        int res;
        LoginUser loginUser = getLoginUser();
        ResFlinkJob flinkJob;
        if (flinkConf.getFlinkJobId() == null) {
            flinkJob = new ResFlinkJob();
        } else {
            flinkJob = baseMapper.selectById(flinkConf.getFlinkJobId());
        }

        if (flinkConf.getJobType().equals("sql")) {
            flinkConf.setSqlText(SqlCodeUtil.base64Encode(flinkConf.getSqlText()));
        }
        flinkJob.setApplicationId(flinkConf.getApplicationId());
        flinkJob.setJobName(flinkConf.getJobName());
        flinkJob.setJobType(flinkConf.getJobType());
        flinkJob.setDeployModel(flinkConf.getDeployModel());
        flinkJob.setFlinkVersion(applicationService.getCurrentFlinkVersion(flinkConf.getApplicationId()));
        flinkJob.setJobState(JobRunState.ADDED.get());
        flinkJob.setDeployState(JobDeployState.NEED_DEPLOY.get());
        flinkJob.setRemark(flinkConf.getRemark());
        flinkConf.setVersion(1L);
        ClusterInfo clusterInfo = applicationService.getClusterInfoByAppId(flinkConf.getApplicationId());
        ResFlinkConf oldConf = flinkConfService.getBaseMapper().selectById(flinkConf.getId());
        if (flinkConf.getFlinkJobId() == null) {
            flinkJob.setUserId(loginUser.getUserId());
            flinkJob.setUid(UUID.randomUUID().toString());
            flinkJob.setCreateTime(DateUtils.getNowDate());
            flinkJob.setCreateBy(loginUser.getUsername());
            baseMapper.insert(flinkJob);
            flinkConf.setCreateBy(loginUser.getUsername());
            flinkConf.setCreateTime(DateUtils.getNowDate());
            flinkConf.setFlinkJobId(flinkJob.getId());
            res = flinkConfService.getBaseMapper().insert(flinkConf);
        } else {
            List<String> oldPaths = new ArrayList<>();
            if (StringUtils.isNotEmpty(oldConf.getDependencies())) {
                List<DependencyResult> oldDeps = getDeps(flinkConf.getDependencies());
                oldPaths = oldDeps.stream().filter(d -> !d.getType().equals("pom")).map(d -> {
                    UploadDep uploadDep = (UploadDep) d.getData();
                    return uploadDep.getPath();
                }).collect(Collectors.toList());
            }
            try {
                if (StringUtils.isEmpty(flinkConf.getDependencies())) {
                    if (!oldPaths.isEmpty()) {
                        deleTmp(oldPaths, clusterInfo);
                    }
                } else {
                    List<DependencyResult> currentDeps = getDeps(flinkConf.getDependencies());
                    List<String> currentUploads = currentDeps.stream().filter(d -> !d.getType().equals("pom")).map(d -> {
                        UploadDep uploadDep = (UploadDep) d.getData();
                        return uploadDep.getPath();
                    }).collect(Collectors.toList());
                    List<String> needDelPaths = new ArrayList<>();
                    if (!oldPaths.isEmpty()) {
                        if (currentUploads.isEmpty()) {
                            deleTmp(oldPaths, clusterInfo);
                        } else {
                            oldPaths.forEach(old -> {
                                if (!currentUploads.contains(old)) {
                                    needDelPaths.add(old);
                                }
                            });

                            if (!needDelPaths.isEmpty()) {
                                deleTmp(needDelPaths, clusterInfo);
                            }
                        }
                    }
                }
            } catch (Exception ex) {

            }

            flinkJob.setUpdateBy(loginUser.getUsername());
            flinkJob.setUpdateTime(DateUtils.getNowDate());
            baseMapper.updateById(flinkJob);
            flinkConf.setUpdateBy(loginUser.getUsername());
            flinkConf.setUpdateTime(DateUtils.getNowDate());
            res = flinkConfService.getBaseMapper().updateById(flinkConf);
        }
        updateAppStateAndJarState();
        return res;
    }


    private void deleTmp(List<String> paths, ClusterInfo clusterInfo) {
        paths.forEach(path -> {
            HdfsUtil.delete(clusterInfo, path);
        });
    }

    private void updateAppStateAndJarState() {

        List<ResFlinkJob> resFlinkJobs = baseMapper.selectList(new LambdaQueryWrapper<ResFlinkJob>());

        LambdaQueryWrapper<ResFlinkConf> resFlinkConfLambdaQueryWrapper = new LambdaQueryWrapper<>();
        List<ResFlinkConf> resFlinkConfs = flinkConfService.getBaseMapper().selectList(resFlinkConfLambdaQueryWrapper);

        LambdaQueryWrapper<ResApplication> applicationLambdaQueryWrapper = new LambdaQueryWrapper<>();
        List<ResApplication> resApplications = applicationService.getBaseMapper().selectList(applicationLambdaQueryWrapper);

        LambdaQueryWrapper<ResJarManagement> jarManagementLambdaQueryWrapper = new LambdaQueryWrapper<>();
        List<ResJarManagement> jarManagements = jarManagementService.getBaseMapper().selectList(jarManagementLambdaQueryWrapper);

        if (resFlinkJobs == null || resFlinkJobs.isEmpty()) {
            resApplications.forEach(app -> {
                app.setInUse(false);
                applicationService.updateById(app);
            });
            if (jarManagements != null && !jarManagements.isEmpty()) {
                jarManagements.forEach(jar -> {
                    jar.setInUse(false);
                    jarManagementService.getBaseMapper().updateById(jar);
                });
            }
        } else {
            List<Long> appIds = new ArrayList<>();
            List<Long> jarIds = new ArrayList<>();
            resFlinkJobs.forEach(conf -> {
                appIds.add(conf.getApplicationId());
            });
            resFlinkConfs.forEach(conf -> {
                if (conf.getFlinkJarId() != null && conf.getFlinkJarId() > 0) {
                    jarIds.add(conf.getFlinkJarId());
                }
            });
            updateApps(appIds, true);
            updateApps(appIds, false);
            updateJar(jarIds, true);
            updateJar(jarIds, false);
        }


    }


    private void updateApps(List<Long> appIds, boolean isUse) {
        LambdaQueryWrapper<ResApplication> applicationLambdaQueryWrapper = new LambdaQueryWrapper<>();
        if (isUse) {
            applicationLambdaQueryWrapper.in(ResApplication::getId, appIds);
        } else {
            applicationLambdaQueryWrapper.notIn(ResApplication::getId, appIds);
        }
        List<ResApplication> updateAppList = applicationService.getBaseMapper().selectList(applicationLambdaQueryWrapper);
        if (updateAppList != null && !updateAppList.isEmpty()) {
            updateAppList.forEach(app -> {
                app.setInUse(isUse);
                applicationService.getBaseMapper().updateById(app);
            });
        }
    }

    private void updateJar(List<Long> jarList, boolean isUse) {
        LambdaQueryWrapper<ResJarManagement> jarManagementLambdaQueryWrapper = new LambdaQueryWrapper<>();
        if (!jarList.isEmpty()) {
            if (isUse) {
                jarManagementLambdaQueryWrapper.in(ResJarManagement::getId, jarList);
            } else {
                jarManagementLambdaQueryWrapper.notIn(ResJarManagement::getId, jarList);
            }
        }
        List<ResJarManagement> updateJarList = jarManagementService.getBaseMapper().selectList(jarManagementLambdaQueryWrapper);
        if (jarList.isEmpty()) {
            if (updateJarList != null && !updateJarList.isEmpty()) {
                updateJarList.forEach(jar -> {
                    jar.setInUse(false);
                    jarManagementService.getBaseMapper().updateById(jar);
                });
            }
        } else {
            if (updateJarList != null && !updateJarList.isEmpty()) {
                updateJarList.forEach(jar -> {
                    jar.setInUse(isUse);
                    jarManagementService.getBaseMapper().updateById(jar);
                });
            }
        }
    }


    @Override
    public int deleteInfos(Long[] ids) {
        try {
            List<Long> idList = Arrays.stream(ids).collect(Collectors.toList());
            LambdaQueryWrapper<ResFlinkJob> resFlinkJobLambdaQueryWrapper = new LambdaQueryWrapper<>();
            resFlinkJobLambdaQueryWrapper.in(ResFlinkJob::getId, idList);
            List<ResFlinkJob> resFlinkJobs = baseMapper.selectList(resFlinkJobLambdaQueryWrapper);
            for (ResFlinkJob resFlinkJob : resFlinkJobs) {
                Long applicationId = resFlinkJob.getApplicationId();
                ClusterInfo clusterInfo = applicationService.getClusterInfoByAppId(applicationId);
                LambdaQueryWrapper<ResFlinkConf> resFlinkConfLambdaQueryWrapper = new LambdaQueryWrapper<>();
                resFlinkConfLambdaQueryWrapper.eq(ResFlinkConf::getFlinkJobId, resFlinkJob.getId());
                ResFlinkConf flinkConf = flinkConfService.getBaseMapper().selectOne(resFlinkConfLambdaQueryWrapper);
                //删除第三方依赖
                if (resFlinkJob.getJobType().equals("flink") && StringUtils.isNotEmpty(flinkConf.getDependencies())) {
                    List<DependencyResult> deps = getDeps(flinkConf.getDependencies());
                    List<UploadDep> uploadDepList = deps.stream().filter(dep -> !dep.getType().equals("pom"))
                            .map(dep -> (UploadDep) dep.getData())
                            .collect(Collectors.toList());
                    if (!uploadDepList.isEmpty()) {
                        uploadDepList.forEach(jar -> {
                            HdfsUtil.delete(clusterInfo, jar.getPath());
                        });
                    }
                }
                String hdfsUploadPath = getHdfsUploadPath(resFlinkJob);
                HdfsUtil.delete(clusterInfo, hdfsUploadPath);
                flinkConfService.getBaseMapper().deleteById(flinkConf.getId());
                baseMapper.deleteById(resFlinkJob.getId());
            }
            updateAppStateAndJarState();
        } catch (Exception ex) {

            throw new UtilException(ex);
        }
        return 1;
    }

    @Override
    public List<UploadResult> createUploadFile(MultipartFile[] file, Long applicationId) {
        ClusterInfo clusterInfo = applicationService.getClusterInfoByAppId(applicationId);
        String tmpPath = configService.selectConfigByKey("sys:temp:dir").concat("/upload")
                .concat("/")
                .concat(UUID.randomUUID().toString());
        File localFile = new File(tmpPath);
        localFile.mkdirs();
        List<UploadResult> results = new ArrayList<>();
        if (file != null && file.length > 0) {
            Arrays.stream(file).forEach(f -> {
                String name = f.getOriginalFilename();
                String savePath = tmpPath.concat("/").concat(name);
                File saveLocalFile = new File(savePath);
                try {
                    FileUtils.writeByteArrayToFile(saveLocalFile, f.getBytes());
                    com.streamxhub.streamx.common.util.FileUtils.exists(savePath);
                    String hdfsTmpPath = UPLOAD_DEP_TMP_PATH.concat("/").concat(UUID.randomUUID().toString());
                    if (!HdfsUtil.exists(clusterInfo, hdfsTmpPath)) {
                        HdfsUtil.mkdirs(clusterInfo, hdfsTmpPath);
                    }
                    HdfsUtil.upload(clusterInfo, saveLocalFile.getAbsolutePath(), hdfsTmpPath, true, true);

                    UploadResult uploadResult = new UploadResult();
                    uploadResult.setApplicationId(applicationId);
                    uploadResult.setUpload(false);
                    uploadResult.setPath(HdfsUtil.getDefaultFS(clusterInfo).concat(hdfsTmpPath.concat("/").concat(name)));
                    uploadResult.setName(name);
                    results.add(uploadResult);
                } catch (IOException e) {
                    log.error("上传出错：" + e.getMessage());
                    e.printStackTrace();
                }
            });

        }
        return results;
    }

    @Override
    public void deleteHdfsTmp(List<DepParam> params) {
        executorService.execute(() -> {
            params.forEach(p -> {
                try {
                    Long applicationId = p.getApplicationId();
                    ClusterInfo clusterInfo = applicationService.getClusterInfoByAppId(applicationId);
                    HdfsUtil.delete(clusterInfo, p.getPath());
                } catch (Exception ex) {

                }
            });
        });
    }

    /**
     * 部署依赖
     *
     * @param jobId
     */
    @Override
    public void deployejob(Long jobId) {
        ResFlinkJob resFlinkJob = baseMapper.selectById(jobId);
        resFlinkJob.setDeployState(JobDeployState.DEPLOYING.get());
        baseMapper.updateById(resFlinkJob);
        executorService.execute(() -> {
            try {
                Long applicationId = resFlinkJob.getApplicationId();
                ResApplication application = applicationService.getById(applicationId);
                ClusterInfo clusterInfo = applicationService.getClusterInfoByAppId(applicationId);
                LambdaQueryWrapper<ResFlinkConf> flinkConfLambdaQueryWrapper = new LambdaQueryWrapper<>();
                flinkConfLambdaQueryWrapper.eq(ResFlinkConf::getFlinkJobId, jobId);
                ResFlinkConf flinkConf = flinkConfService.getBaseMapper().selectOne(flinkConfLambdaQueryWrapper);
                String hdfsPath = getHdfsUploadPath(resFlinkJob);
                if (org.apache.commons.lang3.StringUtils.isNotEmpty(flinkConf.getDependencies())) {

                    if (HdfsUtil.exists(clusterInfo, hdfsPath)) {
                        HdfsUtil.delete(clusterInfo, hdfsPath);
                    }
                    HdfsUtil.mkdirs(clusterInfo, hdfsPath);
                    String logStr = String.format("%s 作业开始上传依赖 %s", resFlinkJob.getJobName(), flinkConf.getDependencies());
                    log.info(logStr);
                    List<DependencyResult> dependencyResults = getDeps(flinkConf.getDependencies());
                    if (dependencyResults.size() > 0) {
                        dependencyResults.forEach(dep -> {
                            if (dep.getType().equals("pom")) {
                                MvnDep pom = (MvnDep) dep.getData();
                                String mavenRepository = configService.selectConfigByKey("sys:mvn:rep");
                                String localTmpPath = configService.selectConfigByKey("sys:temp:dir")
                                        .concat("/").concat(UUID.randomUUID().toString());
                                File localPathFile = new File(localTmpPath);
                                localPathFile.mkdirs();
                                MavenParams mavenParams = new MavenParams();
                                if (org.apache.commons.lang3.StringUtils.isNotEmpty(mavenRepository)) {
                                    mavenParams.setRepository(mavenRepository);
                                }
                                mavenParams.setGroupId(pom.getGroupId());
                                mavenParams.setArtifactId(pom.getArtifactId());
                                mavenParams.setVersion(pom.getVersion());
                                Set<Exclusion> exclusions = mavenParams.getExclusions();
                                if (exclusions.isEmpty()) {
                                    exclusions = new HashSet<>();
                                    mavenParams.setExclusions(exclusions);
                                }
                                //设置用户自定义排除
                                if (pom.getExclusions() != null && pom.getExclusions().size() > 0) {
                                    pom.getExclusions().forEach(e -> {
                                        Exclusion exclusion = new Exclusion(e.getGroupId(),
                                                e.getArtifactId(), "", "jar");
                                        mavenParams.getExclusions().add(exclusion);

                                    });
                                }
                                //排除默认
                                mavenParams.getExclusions().add(new Exclusion("org.apache.flink", "force-shading", "", "jar"));
                                mavenParams.getExclusions().add(new Exclusion("com.google.code.findbugs", "jsr305", "", "jar"));
                                mavenParams.getExclusions().add(new Exclusion("org.slf4j", "*", "", "jar"));
                                mavenParams.getExclusions().add(new Exclusion("org.apache.logging.log4j", "*", "", "jar"));
                                mavenParams.setTarget(localTmpPath);
                                try {
                                    List<File> files = DownloadMavenJar.downLoadMavenJar(mavenParams);
                                    if (files.size() > 0) {
                                        files.forEach(f -> {
                                            if (f.exists()) {
                                                HdfsUtil.upload(clusterInfo, f.getAbsolutePath(), hdfsPath, true, true);
                                            }
                                        });
                                    }
                                } catch (Exception ex) {
                                    log.error("maven依赖下载失败：" + ex.getMessage());
                                    ex.printStackTrace();
                                }
                            } else {
                                UploadDep uploadJar = (UploadDep) dep.getData();
                                if (HdfsUtil.exists(clusterInfo, uploadJar.getPath())) {
                                    HdfsUtil.copyHdfs(clusterInfo, uploadJar.getPath(), hdfsPath, false, true);
                                }
                            }
                        });
                    }
                    logStr = String.format("%s 作业作业依赖上传完成 %s", resFlinkJob.getJobName(), flinkConf.getDependencies());
                    log.info(logStr);
                } else {
                    //删除依赖
                    try {
                        HdfsUtil.delete(clusterInfo, hdfsPath);
                    } catch (Exception ex) {

                    }

                }
                ResFlinkJob job = baseMapper.selectById(jobId);
                job.setDeployState(JobDeployState.DONE.get());
                baseMapper.updateById(job);
            } catch (Exception ex) {
                resFlinkJob.setDeployState(JobDeployState.FAILED.get());
                baseMapper.updateById(resFlinkJob);
                insertErrorLog(resFlinkJob, ex.getCause().getMessage(), "deploy");
            }

        });
    }

    /**
     * 启动任务
     */
    @Override
    public void startJob(JobStartAndStopParam param) {
        ResFlinkJob resFlinkJob = baseMapper.selectById(param.getJobId());
        try {
            resFlinkJob.setJobState(JobRunState.STARTING.get());
            resFlinkJob.setYarnId(" ");
            resFlinkJob.setFlinkJobId(" ");
            resFlinkJob.setFlinkWebUrl(" ");
            resFlinkJob.setFlinkMonitor(" ");
            resFlinkJob.setLastSubmitTime(DateUtil.date());
            baseMapper.updateById(resFlinkJob);
            executorService.execute(() -> {
                doStart(resFlinkJob, param);
            });
        } catch (Exception ex) {
            log.error("启动失败：" + ex.getMessage());
            ex.printStackTrace();
            resFlinkJob.setJobState(JobRunState.FAILED.get());
            baseMapper.updateById(resFlinkJob);
            insertErrorLog(resFlinkJob, ex.getCause().getMessage(), "start");
            throw new UtilException("启动失败:" + ex.getMessage());
        }
    }

    @Override
    public synchronized void syncJobState(ResFlinkJob flinkJob) {
        if (flinkJob.getDeployModel().contains("yarn")) {
            getJobStateFromYarn(flinkJob);
        }
    }

    @Override
    public void triggerSavePointOnTimer() {
        executorService.execute(() -> {
            LambdaQueryWrapper<ResFlinkJob> flinkJobLambdaQueryWrapper = new LambdaQueryWrapper<>();
            flinkJobLambdaQueryWrapper.eq(ResFlinkJob::getJobState, JobRunState.RUNNING.get());
            List<ResFlinkJob> resFlinkJobs = baseMapper.selectList(flinkJobLambdaQueryWrapper);
            if (resFlinkJobs != null && !resFlinkJobs.isEmpty()) {
                resFlinkJobs.forEach(job -> {
                    if (job.getDeployModel().contains("yarn")) {
                        if (job.getStartTime() != null) {
                            Long applicationId = job.getApplicationId();
                            ClusterInfo clusterInfo = applicationService.getClusterInfoByAppId(applicationId);
                            LambdaQueryWrapper<ResSavepoint> savepointLambdaQueryWrapper = new LambdaQueryWrapper<>();
                            savepointLambdaQueryWrapper.eq(ResSavepoint::getJobId, job.getId());
                            savepointLambdaQueryWrapper.orderByDesc(ResSavepoint::getCreateTime);
                            executeSavePoint(clusterInfo, job);
                        }
                    }
                });
            }

            try {
                //清除七天前的保存点
                LambdaQueryWrapper<ResSavepoint> savepointLambdaQueryWrapper = new LambdaQueryWrapper<>();
                savepointLambdaQueryWrapper.lt(ResSavepoint::getCreateTime, DateUtils.getPastDate(7, DateType.DAY));
                List<ResSavepoint> resSavepoints = savepointService.getBaseMapper().selectList(savepointLambdaQueryWrapper);
                if (resSavepoints != null && !resSavepoints.isEmpty()) {
                    resSavepoints.forEach(save -> {
                        String path = save.getPath();
                        savepointService.getBaseMapper().deleteById(save);
                        ResFlinkJob resFlinkJob = baseMapper.selectById(save.getJobId());
                        ClusterInfo clusterInfo = applicationService.getClusterInfoByAppId(resFlinkJob.getApplicationId());
                        HdfsUtil.delete(clusterInfo, path);
                    });
                }
            } catch (Exception ex) {

            }
        });
    }

    @Override
    public void stopJob(StopTriggerSavePoint param) {
        LambdaQueryWrapper<ResFlinkJob> tbFlinkJobEntityWrapper = new LambdaQueryWrapper<>();
        tbFlinkJobEntityWrapper.eq(ResFlinkJob::getId, param.getJobId());
        ResFlinkJob tbFlinkJob = baseMapper.selectOne(tbFlinkJobEntityWrapper);
        if (tbFlinkJob.getDeployModel().contains("yarn")) {
            tbFlinkJob.setJobState(JobRunState.CANCELLING.get());
            tbFlinkJob.setLastCancelTime(DateUtil.date());
            baseMapper.updateById(tbFlinkJob);
            executorService.execute(() -> {
                doStop(tbFlinkJob, param);
            });
        } else {
            insertErrorLog(tbFlinkJob, "该模式暂时不支持停止", "stop");
            throw new UtilException("该模式暂时不支持停止");
        }
    }

    /**
     * 校验sql
     *
     * @param sqlQueryParam
     * @return
     */
    @Override
    public SqlError verifySql(SqlQueryParam sqlQueryParam) {
        String currentFlinkVersion = applicationService.getCurrentFlinkVersion(sqlQueryParam.getApplicationId());
        try {
            String plannerClass = null;
            if (currentFlinkVersion.equals("1.12.2")) {
                plannerClass = "org.apache.flink.table.planner.calcite.CalciteParser";
            } else {
                plannerClass = "org.apache.flink.table.planner.parse.CalciteParser";
            }
            ClassLoader loader = getFlinkShimsClassLoader(applicationService.getFlinkHome(sqlQueryParam.getApplicationId()), currentFlinkVersion);
            Class<?> clazz = loader.loadClass("com.hub.realtime.flinkshims.core.FlinkSqlValidator");
            Method method = clazz.getDeclaredMethod("verifySql", String.class, String.class, ClassLoader.class);
            method.setAccessible(true);
            Object invoke = method.invoke(null, sqlQueryParam.getSqlText(), plannerClass, loader);
            if (invoke != null) {
                SqlError sqlError = JSON.parseObject(JSON.toJSONString(invoke), SqlError.class);
                return sqlError;
            } else
                return null;
        } catch (Exception ex) {
            log.error("verify error:".concat(ExceptionUtils.stringifyException(ex)));
            SqlError sqlError = new SqlError();
            return sqlError.fromString(ex.getCause().getMessage());
        }
    }

    @Override
    public ResJobLog getErrorLogByJobId(Long jobId) {
        LambdaQueryWrapper<ResJobLog> queryWrapper = new LambdaQueryWrapper<>();
        queryWrapper.eq(ResJobLog::getJobId, jobId);
        return jobLogService.getBaseMapper().selectOne(queryWrapper);
    }


    @SneakyThrows
    private synchronized ClassLoader getFlinkShimsClassLoader(String flinkHome, String flinkVersion) {
        // TODO: 根据用户选择的Flink版本选择对应的版本实现.
        String appHome = clusterService.getAppHome();
        if (!shimsClassLoaderCache.containsKey(flinkVersion)) {
            String shimsRegex = "flink-shims-(1.12|1.13|1.14)-(.*).jar";
            Pattern pattern = Pattern.compile(shimsRegex, Pattern.CASE_INSENSITIVE | Pattern.DOTALL);
            //记载服务lib下边的jar
            File[] libJars = new File(appHome.concat("/shims")).listFiles(pathname -> !pathname.getName().matches(shimsRegex));
            assert libJars != null;
            List<URL> libList = new ArrayList<>(0);
            for (File jar : libJars) {
                libList.add(jar.toURI().toURL());
            }
            File[] flinklibjars = new File(flinkHome.concat("/lib")).listFiles();
            Arrays.stream(flinklibjars).collect(Collectors.toList()).forEach(f -> {
                try {
                    libList.add(f.toURI().toURL());
                } catch (MalformedURLException e) {

                }
            });
            URLClassLoader classLoader = new URLClassLoader(libList.toArray(new URL[0]));
            shimsClassLoaderCache.put(flinkVersion, classLoader);
        }

        return shimsClassLoaderCache.get(flinkVersion);

    }


    private void doStop(ResFlinkJob job, StopTriggerSavePoint savePointParam) {

        try {
            Long applicationId = job.getApplicationId();
            FlinkStopRequest stopRequest = new FlinkStopRequest();
            String flinkHome = applicationService.getFlinkHome(applicationId);
            stopRequest.setFlinkHome(flinkHome);
            stopRequest.setYid(job.getYarnId());
            stopRequest.setJid(job.getFlinkJobId());
            stopRequest.setClusterInfo(applicationService.getClusterInfoByAppId(applicationId));
            stopRequest.setIsSavePoint(savePointParam.getIsSavePoint());
            stopRequest.setSavePointPath(savePointParam.getSavePointPath());
            stopRequest.setIsDrain(savePointParam.getIsDrain());

            String savepointDir = SubmitClient.runStop(stopRequest);
            job.setStartTime(null);
            job.setUpdateTime(DateUtil.date());
            job.setJobState(JobRunState.CANCELED.get());
            baseMapper.updateById(job);
            if (org.apache.commons.lang3.StringUtils.isNotEmpty(savepointDir) && !"Acknowledge".equalsIgnoreCase(savepointDir.trim())) {
                ResSavepoint tbSavepoint = new ResSavepoint();
                tbSavepoint.setJobId(job.getId());
                tbSavepoint.setPath(savepointDir);
                tbSavepoint.setCreateTime(DateUtil.date());
                tbSavepoint.setCreateBy(" ");
                tbSavepoint.setJobName(job.getJobName());
                savepointService.getBaseMapper().insert(tbSavepoint);
            }
        } catch (Exception ex) {
            log.error(ex.getMessage());
        }


    }


    private void executeSavePoint(ClusterInfo clusterInfo, ResFlinkJob job) {
        String flinkHome = applicationService.getFlinkHome(job.getApplicationId());
        FlinkYarnSubmitBase flinkYarnSubmitBase = new FlinkYarnApplicationSubmit();
        Configuration flinkConfiguration = GlobalConfiguration.loadConfiguration(flinkHome + "/conf");
        flinkConfiguration.set(YarnConfigOptions.APPLICATION_ID, job.getYarnId());
        YarnRunEnv yarnRunEnv = flinkYarnSubmitBase.createyarnClusterDescriptor(clusterInfo, flinkConfiguration, null);
        YarnClusterClientFactory yarnClusterClientFactory = new YarnClusterClientFactory();
        ApplicationId applicationId = yarnClusterClientFactory.getClusterId(flinkConfiguration);
        if (applicationId == null) return;
        YarnClusterDescriptor clusterDescriptor = yarnRunEnv.getYarnClusterDescriptor();
        ClusterClient clusterClient = null;
        try {
            log.info("开始触发保存点,YID:" + job.getYarnId() + " ,JID:" + job.getFlinkJobId());
            clusterClient = clusterDescriptor.retrieve(applicationId).getClusterClient();
            JobID jobID = flinkYarnSubmitBase.parseJobId(job.getFlinkJobId());
            CompletableFuture completableFuture = clusterClient.triggerSavepoint(jobID, HdfsUtil.getDefaultFS(clusterInfo).concat(APP_SAVEPOINTS));
            Object res = completableFuture.get();
            if (res != null && org.apache.commons.lang3.StringUtils.isNotEmpty(res.toString()) && !"Acknowledge".equalsIgnoreCase(res.toString())) {
                ResSavepoint tbSavepoint = new ResSavepoint();
                tbSavepoint.setJobId(job.getId());
                tbSavepoint.setPath(res.toString());
                tbSavepoint.setCreateTime(DateUtil.date());
                tbSavepoint.setCreateBy(" ");
                tbSavepoint.setJobName(job.getJobName());
                savepointService.getBaseMapper().insert(tbSavepoint);
                log.info("保存点触发成功：" + res.toString());
            }
        } catch (Exception ex) {
            log.error("保存点触发失败,YID:" + job.getYarnId() + " ,JID:" + job.getFlinkJobId() + " ,失败原因：" + ex.getMessage());
            ex.printStackTrace();
        }
    }

    private synchronized void getJobStateFromYarn(ResFlinkJob job) {
        try {

            if (job == null || org.apache.commons.lang3.StringUtils.isEmpty(job.getYarnId().trim())) return;
            Long appId = job.getApplicationId();
            String flinkHome = applicationService.getFlinkHome(appId);
            ClusterInfo clusterInfo = applicationService.getClusterInfoByAppId(appId);
            Configuration flinkConfiguration = GlobalConfiguration.loadConfiguration(flinkHome + "/conf");
            flinkConfiguration.set(YarnConfigOptions.APPLICATION_ID, job.getYarnId());
            YarnClusterClientFactory yarnClusterClientFactory = new YarnClusterClientFactory();
            ApplicationId applicationId = yarnClusterClientFactory.getClusterId(flinkConfiguration);
            if (applicationId == null) return;
            try {
                FlinkRestModel flinkRestModel = JobUtils.getJobInfo(clusterInfo, applicationId);
                if (flinkRestModel != null && flinkRestModel.getJobs() != null && flinkRestModel.getJobs().size() > 0) {
                    JobInfo jobInfo = flinkRestModel.getJobs().get(0);
                    job.setJobState(JobRunState.valueOf(jobInfo.getState()).get());
                    job.setFlinkJobId(jobInfo.getJid());
//                    if (org.apache.commons.lang3.StringUtils.isNotEmpty(job.getFlinkJobId().trim())) {
//                        if (tbSetting != null && org.apache.commons.lang3.StringUtils.isNotEmpty(tbSetting.getSettingValue())) {
//                            String monitorWeb = applicationContext.getEnvironment().getProperty("job.monitor.url",
//                                    tbSetting.getSettingValue());
//                            String monitorUrl = monitorWeb.concat("&var-JOB_ID=").concat(job.getJid());
//                            job.setFlinkMonitor(monitorUrl);
//                        }
//                    }
                    if (jobInfo.getStartTime() != null && jobInfo.getStartTime() != -1) {
                        job.setStartTime(DateUtil.parse(DateFormatUtils.format(jobInfo.getStartTime(), "yyyy-MM-dd HH:mm:ss")));
                        job.setLunchTime(DateUtils.getDatePoor(DateUtil.date(), job.getStartTime()));
                    }
                } else {
                    YarnAdapter yarnAdapter = new YarnAdapter();
                    YarnApplicationState applicationState = yarnAdapter.getApplicationState(clusterInfo.getHadoopConfDir(), applicationId);
                    if (applicationState.equals(YarnApplicationState.FAILED)
                            || applicationState.equals(YarnApplicationState.FINISHED)
                            || applicationState.equals(YarnApplicationState.KILLED)) {
                        job.setJobState(JobRunState.FINISHED.get());
                        job.setLunchTime(" ");
                    }
                }
                job.setUpdateTime(DateUtil.date());
                baseMapper.updateById(job);
            } catch (Exception e) {
                log.error("同步状态出错：" + e.getMessage());
            }
        } catch (Exception ex) {
            log.error("同步状态出错：" + ex.getMessage());
        }
    }


    private void doStart(ResFlinkJob job, JobStartAndStopParam param) {
        Long appId = job.getApplicationId();
        ResApplication application = applicationService.getById(appId);
        ResCluster cluster = clusterService.getBaseMapper().selectById(application.getClusterId());
        String flinkVersion = applicationService.getCurrentFlinkVersion(appId);
        String flinkHome = applicationService.getFlinkHome(appId);
        ClusterInfo clusterInfo = applicationService.getClusterInfoByAppId(appId);
        LambdaQueryWrapper<ResFlinkConf> flinkConfLambdaQueryWrapper = new LambdaQueryWrapper<>();
        flinkConfLambdaQueryWrapper.eq(ResFlinkConf::getFlinkJobId, job.getId());
        ResFlinkConf flinkConf = flinkConfService.getBaseMapper().selectOne(flinkConfLambdaQueryWrapper);

        try {
            //开始启动
            FlinkRunRequest flinkRunRequest = new FlinkRunRequest();
            flinkRunRequest.setFlinkVersion(flinkVersion);
            flinkRunRequest.setAppUid(application.getUid());
            flinkRunRequest.setJobUid(job.getUid());
            flinkRunRequest.setClusterInfo(clusterInfo);
            flinkRunRequest.setFlinkHome(flinkHome);
            flinkRunRequest.setExecuteMode(job.getDeployModel());
            flinkRunRequest.setTableModel(flinkConf.getFlinkType());
            if (org.apache.commons.lang3.StringUtils.isNotEmpty(flinkConf.getLogText())) {
                flinkRunRequest.setLogText(flinkConf.getLogText());
            } else {
                flinkRunRequest.setLogText(LOGBACKDEFAULT);

            }
            flinkRunRequest.setJobType(job.getJobType());
            if (flinkRunRequest.getJobType().equals("flink")) {
                flinkRunRequest.setMainClass(flinkConf.getMainClass());
                flinkRunRequest.setMainClass(flinkConf.getMainClass());
                flinkRunRequest.setJarFile(flinkConf.getJarPath());
            } else {
                //设置sql
                String sql = "";
                //sql=SqlCodeUtil.base64Decode(tbFlinkConfVersion.getSqlText());
                if (flinkRunRequest.getExecuteMode().equals("yarn-application")) {
                    sql = DeflaterUtils.zipString(SqlCodeUtil.base64Decode(flinkConf.getSqlText()));
                } else {
                    sql = SqlCodeUtil.base64Decode(flinkConf.getSqlText());
                }

                if (flinkConf.getUseCatalog()) {
                    FlinkCatalog catalog = new FlinkCatalog();
                    ResCatalog resCatalog = catalogService.getBaseMapper().selectById(flinkConf.getCatalogId());
                    catalog.setCatalogName(resCatalog.getCatalogName());
                    catalog.setDatabaseName(flinkConf.getDatabaseName());
                    catalog.setHiveConfDir(resCatalog.getHiveConfDir());
                    flinkRunRequest.setFlinkCatalog(catalog);
                }

                String flinkMainVersion = flinkVersion.substring(0, 4);

                File localPlugins = new File(clusterService.getAppHome().concat("/sqlclients"));
                assert localPlugins.exists();
                String clientRex = "sqlclient-".concat(flinkMainVersion).concat(".*\\.jar");

                List<String> jars = Arrays.stream(Objects.requireNonNull(localPlugins.list())).filter(x -> x.matches(clientRex)).collect(Collectors.toList());
                if (jars.isEmpty()) {
                    throw new IllegalArgumentException("[FlinkServer] can no found sql-client jar in " + localPlugins);
                }
                if (jars.size() > 1) {
                    throw new IllegalArgumentException("[FlinkServer] found multiple sqlclient jar in " + localPlugins);
                }
                String localsqlDistJar = localPlugins.getAbsolutePath().concat("/").concat(jars.get(0));
                String flinkUserJar;
                if (job.getDeployModel().equals("yarn-application")) {
                    //3) plugin
                    String pluginPath = HdfsUtil.getDefaultFS(clusterInfo).concat(APP_SQLCLIENTS).concat("/flink").concat(flinkMainVersion);
                    flinkUserJar = String.format("%s/%s", pluginPath, jars.get(0));
                } else {
                    throw new UnsupportedOperationException("Unsupported..." + job.getDeployModel());
                }
                flinkRunRequest.setMainClass("com.hub.realtime.sqlclient.SqlClient");
                flinkRunRequest.setJarFile(flinkUserJar);
                flinkRunRequest.setSql(sql);
            }
            flinkRunRequest.setAppName(job.getJobName());


            /**
             基础配置
             */
            Map<String, String> options = new HashMap<>();
            flinkRunRequest.setParallelism(flinkConf.getParallelism().intValue());
            flinkRunRequest.setResolveOrder(flinkConf.getResolveOrder());
            options.put(JobManagerOptions.TOTAL_PROCESS_MEMORY.key(), flinkConf.getJobManagerMem().intValue() + "m");
            options.put(TaskManagerOptions.TOTAL_PROCESS_MEMORY.key(), flinkConf.getTaskManagerMem().intValue() + "m");
            options.put(TaskManagerOptions.NUM_TASK_SLOTS.key(), flinkConf.getSlotNum().intValue() + "");

            //flink高级配置
            makeOtherConf(flinkConf.getFlinkAdvConf(), options);

            //jvm 配置
            makeOtherConf(flinkConf.getJvmConf(), options);

            //flink args参数
            if (StringUtils.isNotEmpty(flinkConf.getArgsConf())) {
                Map<String, String> args = makeOtherConf(flinkConf.getArgsConf(), null);
                if (args != null && !args.isEmpty()) {
                    flinkRunRequest.setArgs(args);
                }
            }
            flinkRunRequest.setOptions(options);
            log.info("设置的配置参数：" + JSON.toJSONString(options));
            //设置是否从保存点启动
            if (param.getStartFromSavePoint() != null) {
                SavePointConf savePointConf = new SavePointConf();
                savePointConf.setSavePointDir(param.getStartFromSavePoint().getSavePointDir());
                savePointConf.setAllowNonRestoredState(param.getStartFromSavePoint().isAllowNonRestoredState());
                flinkRunRequest.setSavePointConf(savePointConf);
            }
            //yarn 资源设置
            if (job.getDeployModel().contains("yarn")) {
                YarnConf yarnConf = new YarnConf();
                if (StringUtils.isNotEmpty(flinkConf.getYarnApplicationType())) {
                    yarnConf.setYarnapplicationType(flinkConf.getYarnApplicationType());
                }
                if (StringUtils.isNotEmpty(flinkConf.getYarnApplicationName())) {
                    yarnConf.setYarnName(flinkConf.getYarnApplicationName());
                }
                if (StringUtils.isNotEmpty(flinkConf.getYarnQueueName())) {
                    yarnConf.setYarnqueue(flinkConf.getYarnQueueName());
                }

                if (flinkConf.getYarnJobManagerMem() != null && flinkConf.getYarnJobManagerMem() > 0) {
                    yarnConf.setJmMemory(flinkConf.getYarnJobManagerMem() + "m");
                }

                if (flinkConf.getYarnTaskManagerMem() != null && flinkConf.getYarnTaskManagerMem() > 0) {
                    yarnConf.setTmMemory(flinkConf.getYarnTaskManagerMem() + "m");
                }
                if (flinkConf.getYarnSlotNum() != null && flinkConf.getYarnSlotNum() > 0) {
                    yarnConf.setYarnslots(flinkConf.getYarnSlotNum().intValue());
                }

                Map<String, String> yarnOtherOptions = new HashMap<>();
                if (StringUtils.isNotEmpty(flinkConf.getYarnAdvConf())) {
                    Map<String, String> yarnMap = makeOtherConf(flinkConf.getYarnAdvConf(), null);
                    if (yarnMap != null && !yarnMap.isEmpty()) {
                        yarnOtherOptions.putAll(yarnMap);
                    }
                }
                if (yarnOtherOptions.size() > 0) {
                    yarnConf.setOptions(yarnOtherOptions);
                }
                flinkRunRequest.setYarnConf(yarnConf);
            }
            FlinkSubmitResponse flinkSubmitResponse = SubmitClient.runSubmit(flinkRunRequest);
            if (flinkSubmitResponse.getIsSubmitError()) {
                job.setYarnId(flinkSubmitResponse.getAppId());
                job.setJobState(JobRunState.FAILED.get());
                baseMapper.updateById(job);
                insertErrorLog(job, flinkSubmitResponse.getMsg(), "start");
                //插入失败的日志
            } else {
                job.setYarnId(flinkSubmitResponse.getAppId());
                job.setFlinkJobId(flinkSubmitResponse.getJobId());
                String yarnUrl = org.apache.commons.lang3.StringUtils.isNotEmpty(cluster.getMapUrl()) ? cluster.getMapUrl() : HadoopUtil.getRMWebAppURL(clusterInfo, true);
                String webUrl = yarnUrl + "/proxy/" + flinkSubmitResponse.getAppId() + "/#/overview";
                job.setFlinkWebUrl(webUrl);

//                String monitorUrl = configService.selectConfigByKey("sys:monotor:url");
//                if (StringUtils.isNotEmpty(monitorUrl)) {
//                    job.setFlinkMonitor(monitorUrl.concat("&var-JOB_ID=").concat(job.getFlinkJobId()));
//                }
                String localLogPath = flinkHome.concat("/conf/logback.xml");

                File logFile = new File(localLogPath);
                if (logFile.exists()) {
                    logFile.delete();
                }

                if (StringUtils.isNotEmpty(flinkSubmitResponse.getState())) {
                    job.setJobState(JobRunState.valueOf(flinkSubmitResponse.getState()).get());
                }
                baseMapper.updateById(job);
                syncJobState(job);
                //删除错误日志
                LambdaQueryWrapper<ResJobLog> queryWrapper = new LambdaQueryWrapper<>();
                queryWrapper.eq(ResJobLog::getJobId, job.getId());
                ResJobLog resJobLog = jobLogService.getBaseMapper().selectOne(queryWrapper);
                if (resJobLog != null) {
                    jobLogService.getBaseMapper().deleteById(resJobLog.getId());
                }

            }
        } catch (Exception ex) {
            String localLogPath = flinkHome.concat("/conf/logback.xml");
            File logFile = new File(localLogPath);
            if (logFile.exists()) {
                logFile.delete();
            }
            log.error("启动失败：" + ex.getMessage());
            ex.printStackTrace();
            job.setJobState(JobRunState.FAILED.get());
            baseMapper.updateById(job);
            insertErrorLog(job, ex.getCause().getMessage(), "start");
        }
    }

    private Map<String, String> makeOtherConf(String jsonConf, Map<String, String> otherMap) {
        if (StringUtils.isNotEmpty(jsonConf)) {
            List<FlinkConfParam> list = new ArrayList<FlinkConfParam>(JSONArray.parseArray(jsonConf, FlinkConfParam.class));
            Map<String, String> map = null;
            if (!list.isEmpty()) {
                map = new HashMap<>();
                for (FlinkConfParam param : list) {
                    map.put(param.getKey(), param.getValue());
                }
                if (otherMap != null) {
                    otherMap.putAll(map);
                }
            }
            return map;
        } else {
            return null;
        }

    }


    private String getHdfsUploadPath(ResFlinkJob job) {
        Long applicationId = job.getApplicationId();
        ResApplication application = applicationService.getById(applicationId);
        String applicationUid = application.getUid();
        String taskUid = job.getUid();
        String hdfsPath = UPLOAD_DEP_PATH.concat("/")
                .concat(applicationUid)
                .concat("/")
                .concat(taskUid);
        return hdfsPath;
    }

    /**
     * 解析依赖
     *
     * @param depJson
     * @return
     */
    private List<DependencyResult> getDeps(String depJson) {
        JSONArray objects = JSONObject.parseArray(depJson);
        List<DependencyResult> dependencyResults = new ArrayList<>();
        if (objects != null && objects.size() > 0) {
            for (int i = 0; i < objects.size(); i++) {
                DependencyResult dependencyResult = new DependencyResult();
                JSONObject jsonObject = (JSONObject) objects.get(i);
                String name = jsonObject.getString("name");
                String type = jsonObject.getString("type");
                //  boolean isSaved = jsonObject.getBoolean("isSaved");
                String dataJson = jsonObject.getString("data");
                dependencyResult.setName(name);
                //  dependencyResult.setSaved(isSaved);
                dependencyResult.setType(type);
                if (type.equals("pom")) {
                    MvnDep pom = JSONObject.parseObject(dataJson, MvnDep.class);
                    dependencyResult.setData(pom);
                } else {
                    UploadDep uploadJar = JSONObject.parseObject(dataJson, UploadDep.class);
                    dependencyResult.setData(uploadJar);
                }
                dependencyResults.add(dependencyResult);
            }
        }
        return dependencyResults;
    }


    /**
     * 插入异常日志
     *
     * @param job
     */
    private void insertErrorLog(ResFlinkJob job, String log, String type) {
        LambdaQueryWrapper<ResJobLog> queryWrapper = new LambdaQueryWrapper<>();
        queryWrapper.eq(ResJobLog::getJobId, job.getId());
        ResJobLog resJobLog = jobLogService.getBaseMapper().selectOne(queryWrapper);
        StringBuilder sb = new StringBuilder();
        if ("deploy".equals(type)) {
            sb.append("=====作业发布失败=====\n");
        } else if ("start".equals(type)) {
            sb.append("=====作业启动失败=====\n");
        } else {
            sb.append("=====作业停止失败=====\n");
        }
        sb.append("作业名称：".concat(job.getJobName()).concat("\n"));
        sb.append("失败原因如下：\n");
        sb.append(log.concat("\n"));
        sb.append("=============失败日期：").
                append(DateUtil.date()).
                append("=========\n");
        if (resJobLog != null) {
            resJobLog.setLogContent(sb.toString());
            resJobLog.setUpdateBy(" ");
            resJobLog.setUpdateTime(DateUtil.date());
            jobLogService.getBaseMapper().updateById(resJobLog);
        } else {
            resJobLog = new ResJobLog();
            resJobLog.setJobId(job.getId());
            resJobLog.setLogContent(sb.toString());
            resJobLog.setCreateBy(" ");
            resJobLog.setCreateTime(DateUtil.date());
            jobLogService.getBaseMapper().insert(resJobLog);
        }
    }

}
