package cn.getech.data.development.service.impl;


import cn.getech.data.development.config.properties.*;
import cn.getech.data.development.constant.*;
import cn.getech.data.development.dto.*;
import cn.getech.data.development.entity.*;
import cn.getech.data.development.mapper.*;
import cn.getech.data.development.model.vo.CopyJobNodeInfoVO;
import cn.getech.data.development.service.*;
import cn.getech.data.development.utils.*;
import cn.getech.data.development.utils.oozie.JobConfUtil;
import cn.getech.data.development.utils.oozie.JobUtil;
import cn.getech.data.development.utils.sap.SapCheckUtils;
import cn.getech.data.development.utils.shell.ExecuteShellUtil;
import cn.getech.data.development.utils.shell.ShellUtil;
import cn.getech.data.intelligence.common.exception.RRException;
import cn.getech.data.intelligence.common.utils.FileUtils;
import cn.getech.data.intelligence.common.utils.ParseDateTime;
import cn.getech.data.intelligence.common.utils.PojoUtils;
import cn.getech.data.intelligence.common.utils.RegexUtils;
import cn.getech.system.center.entity.SysUserEntity;
import cn.getech.system.center.mapper.SysUserMapper;
import cn.getech.system.center.utils.ShiroUtils;
import cn.hutool.core.bean.BeanUtil;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.date.DateUtil;
import cn.hutool.core.map.MapUtil;
import cn.hutool.core.thread.ThreadUtil;
import cn.hutool.core.util.ObjectUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.jcraft.jsch.JSchException;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import java.io.*;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;

/**
 * <p>
 * 任务节点信息表 服务实现类
 * </p>
 *
 * @author zenith
 * @since 2019-07-08
 */
@Slf4j
@Service
public class JobNodeInfoServiceImpl extends ServiceImpl<JobNodeInfoMapper, JobNodeInfo> implements JobNodeInfoService {

    @Autowired
    private JobNodeInfoMapper jobNodeInfoMapper;
    @Autowired
    private JobNodeInfoService jobNodeInfoService;

    @Autowired
    private JobNodeConfMapper jobNodeConfMapper;

    @Autowired
    private JobInfoMapper jobInfoMapper;

    @Autowired
    private BdpJobConfig bdpJobConfig;

    @Autowired
    private HiveConfig hiveConfig;
    @Autowired
    private ImplaConfig implaConfig;

    @Autowired
    private ConfConnectService confConnectService;

    @Autowired
    private TableInfoService tableInfoService;

    @Autowired
    private TableRelationService tableRelationService;

    @Autowired
    private JobRunHistoryMapper jobRunHistoryMapper;

    @Autowired
    private JobNodeRunHistoryMapper jobNodeRunHistoryMapper;
    @Autowired
    private TableFieldInfoMapper tableFieldInfoMapper;
    @Autowired
    private DataPermissionParamMapper dataPermissionParamMapper;
    @Autowired
    private DataDevelopmentConfig dataDevelopmentConfig;
    @Autowired
    private TableRelationMapper tableRelationMapper;

    @Autowired
    private HdfsUserUtil hdfsUserUtil;

    @Autowired
    private SparkConfig sparkConfig;

    @Autowired
    private OozieConfig oozieConfig;

    @Autowired
    private JobNodeConfService jobNodeConfService;

    @Autowired
    private JobLinkMapper jobLinkMapper;

    @Autowired
    private JobNodeLayConfigMapper jobNodeLayConfigMapper;
    @Autowired
    private WorkMenuJobRelMapper workMenuJobRelMapper;
    @Autowired
    private SysUserMapper sysUserMapper;
    @Autowired
    private WorkFlowDepMapper workFlowDepMapper;

    @Autowired
    private ConnectFieldInfoMapper connectFieldInfoMapper;

    @Autowired
    private IWorkMenuService iWorkMenuService;
    @Autowired
    private ProcInfoMapper procInfoMapper;
    @Autowired
    private RealtimeQueueMapper realtimeQueueMapper;
    @Autowired
    private BussessConfigService bussessConfigService;
    @Autowired
    private RealtimeResourceService realtimeResourceService;

    @Autowired
    private IDataAssetsModelService dataAssetsModelService;
    @Autowired
    private SecurityUtil securityUtil;

    @Override
    public JobNodeInfo getByNodeKeyAndProc(Integer procId, Integer nodeKey) {
        return this.baseMapper.selectOne(new QueryWrapper<JobNodeInfo>()
                .eq("node_key", nodeKey)
                .eq("proc_id", procId));
    }


    /**
     * @Description 验证对应的节点配置信息是否正确
     * @Author hujz
     * @Date 2019/8/9 12:46
     * @Param jobNodeConfig 对应的节点配置信息
     * typeId 任务类型 0 开始 1 数据接入 2 SQL计算 3 程序执行 4 数据推送 5 结束
     * @Return
     * @Exception
     */
    @Override
    public void checkJobConfigInfo(JobNodeConfig jobNodeConfig, Integer typeId) throws Exception {
        List<JobNodeConf> jobNodeConfs = JobNodeConfig.jobNodeConfigTolist(jobNodeConfig);
//        checkJobConf(jobNodeConfs,typeId,input_connect_type);
        try {
            checkJobConf(jobNodeConfs, typeId);
        } catch (Exception e) {
            throw e;
        }
    }

    /**
     * @Description 验证对应的节点配置信息是否正确
     * @Author hujz
     * @Date 2019/8/13 17:35
     * @Param
     * @Return
     * @Exception
     */
    private void checkJobConf(List<JobNodeConf> jobNodeConfs, Integer typeId) throws Exception {
        if (null == typeId) {
            throw new RRException(DataDevelopmentBizExceptionEnum.PARAM_AND_ERROR.getMessage());
        }
        switch (typeId) {
            case 0: {
                if (null == jobNodeConfs || jobNodeConfs.isEmpty()) {
                    throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_0_ERROR.getMessage());
                }
                List<JobNodeConf> jobNodeConfs1 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), "schedule_type")).collect(Collectors.toList());
                if (null == jobNodeConfs1 || jobNodeConfs1.isEmpty() || StringUtils.isEmpty(jobNodeConfs1.get(0).getValue())) {
                    throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_0_TYPE_CONFIG_ERROR.getMessage());
                }
                if (jobNodeConfs1.get(0).getValue().equals("1")) {
                    List<JobNodeConf> jobNodeConfs2 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), "expression")).collect(Collectors.toList());
                    if (null == jobNodeConfs2 || jobNodeConfs2.isEmpty() || StringUtils.isEmpty(jobNodeConfs2.get(0).getValue())) {
                        throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_0_TYPE_CONFIG_ERROR.getMessage());
                    }
                }
                break;
            }
            case 1: {
                if (null == jobNodeConfs || jobNodeConfs.isEmpty()) {
                    throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_0_ERROR.getMessage());
                }
                List<JobNodeConf> jobNodeConfs1 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), DataInto.input_connect_type.getKey())).collect(Collectors.toList());
                if (null == jobNodeConfs1 || jobNodeConfs1.isEmpty() || StringUtils.isEmpty(jobNodeConfs1.get(0).getValue())) {
                    throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_1_input_connect_type_ERROR.getMessage());
                }

                List<JobNodeConf> jobNodeConfs2 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), DataInto.input_connect_id.getKey())).collect(Collectors.toList());
                if (null == jobNodeConfs2 || jobNodeConfs2.isEmpty() || StringUtils.isEmpty(jobNodeConfs2.get(0).getValue())) {
                    throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_1_input_connect_id_ERROR.getMessage());
                }

                //sap和ftp 和 接口数据源 数据源不判断input_input_content的值
                if (!ObjectUtil.equal(jobNodeConfs1.get(0).getValue(), ConnectTypeEnum.Sap.getCode().toString())
                        && !ObjectUtil.equal(jobNodeConfs1.get(0).getValue(), ConnectTypeEnum.FTP.getCode().toString())
                        && !ObjectUtil.equal(jobNodeConfs1.get(0).getValue(), ConnectTypeEnum.SFTP.getCode().toString())
                        && !ObjectUtil.equal(jobNodeConfs1.get(0).getValue(), ConnectTypeEnum.JieKou.getCode().toString())
                        &&!ObjectUtil.equal(jobNodeConfs1.get(0).getValue(), ConnectTypeEnum.WebService.getCode().toString())
                        &&!ObjectUtil.equal(jobNodeConfs1.get(0).getValue(), ConnectTypeEnum.ElasticSearch.getCode().toString())) {
                    List<JobNodeConf> jobNodeConfs3 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), DataInto.input_input_content.getKey())).collect(Collectors.toList());
                    if (null == jobNodeConfs3 || jobNodeConfs3.isEmpty() || StringUtils.isEmpty(jobNodeConfs3.get(0).getValue())) {
                        throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_1_input_input_content_ERROR.getMessage());
                    }
                }

                //判断输出数据库
                List<JobNodeConf> jobNodeConfs4 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), DataInto.output_db_name.getKey())).collect(Collectors.toList());
                if (null == jobNodeConfs4 || jobNodeConfs4.isEmpty() || StringUtils.isEmpty(jobNodeConfs4.get(0).getValue())) {
                    throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_1_output_db_name_ERROR.getMessage());
                }

                if (!ObjectUtil.equal(jobNodeConfs1.get(0).getValue(), ConnectTypeEnum.Sap.getCode().toString())&&!ObjectUtil.equal(jobNodeConfs1.get(0).getValue(), ConnectTypeEnum.WebService.getCode().toString())) {
                    List<JobNodeConf> jobNodeConfs5 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), DataInto.output_table_name.getKey())).collect(Collectors.toList());
                    if (null == jobNodeConfs5 || jobNodeConfs5.isEmpty() || StringUtils.isEmpty(jobNodeConfs5.get(0).getValue())) {
                        throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_1_output_table_name_ERROR.getMessage());
                    }
                }

                List<JobNodeConf> jobNodeConfs6 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), DataInto.output_write_model.getKey())).collect(Collectors.toList());
                if (null == jobNodeConfs6 || jobNodeConfs6.isEmpty() || StringUtils.isEmpty(jobNodeConfs6.get(0).getValue())) {
                    throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_1_output_write_model_ERROR.getMessage());
                }

//                List<JobNodeConf> jobNodeConfs7 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), DataInto.hight_resource.getKey())).collect(Collectors.toList());
//                if (null == jobNodeConfs7 || jobNodeConfs7.isEmpty() || StringUtils.isEmpty(jobNodeConfs7.get(0).getValue())) {
//                    throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_1_hight_resource_ERROR.getMessage());
//                }

//                List<JobNodeConf> jobNodeConfs8 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), DataInto.hight_thread.getKey())).collect(Collectors.toList());
//                if (null == jobNodeConfs8 || jobNodeConfs8.isEmpty() || StringUtils.isEmpty(jobNodeConfs8.get(0).getValue())) {
//                    throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_1_hight_thread_ERROR.getMessage());
//                }

                List<JobNodeConf> jobNodeConfs9 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), DataInto.hight_file_num.getKey())).collect(Collectors.toList());
                if (null == jobNodeConfs9 || jobNodeConfs9.isEmpty() || StringUtils.isEmpty(jobNodeConfs9.get(0).getValue())) {
                    throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_1_hight_file_num_ERROR.getMessage());
                }
                break;
            }
            case 2: {
//                hdfsUserUtil.setRangerUser(ShiroUtils.getUserId());
//                HiveTableUtil hiveTableUtil = new HiveTableUtil(hiveConfig.getPressionUrl(),hdfsUserUtil.getRangerUserName(),hdfsUserUtil.getRangerUserPassword());
                if (null == jobNodeConfs || jobNodeConfs.isEmpty()) {
                    throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_2_ERROR.getMessage());
                }
                List<JobNodeConf> jobNodeConfs9 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), "sql_statment")).collect(Collectors.toList());
                if (null == jobNodeConfs9 || jobNodeConfs9.isEmpty() || StringUtils.isEmpty(jobNodeConfs9.get(0).getValue())) {
                    throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_2_sql_statment_ERROR.getMessage());
                } else {
//                    try {
//                        String sqlStatment = jobNodeConfs9.get(0).getValue();
//                        String[] sqlStatments = sqlStatment.split(";");
//                        for (String s : sqlStatments
//                        ) {
//                            hiveTableUtil.checkHiveSql(s);
//                        }
//                    } catch (Exception e) {
//                        throw e;
//                    }
                }
                break;
            }
            case 3: {
                break;
            }
            case 4: {
//                hdfsUserUtil.setRangerUser(ShiroUtils.getUserId());
//                HiveTableUtil hiveTableUtil = new HiveTableUtil(hiveConfig.getPressionUrl(),hdfsUserUtil.getRangerUserName(),hdfsUserUtil.getRangerUserPassword());
                if (null == jobNodeConfs || jobNodeConfs.isEmpty()) {
                    throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_4_ERROR.getMessage());
                }
                List<JobNodeConf> jobNodeConfs9 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), DataPush.input_input_content.getKey())).collect(Collectors.toList());
                if (null == jobNodeConfs9 || jobNodeConfs9.isEmpty() || StringUtils.isEmpty(jobNodeConfs9.get(0).getValue())) {
                    throw new RRException("请填写任务类型是数据推送的输入内容");
                } else {
//                    try {
//                        hiveTableUtil.checkHiveSql(jobNodeConfs9.get(0).getValue());
//                    } catch (Exception e) {
//                        throw e;
//                    }
                }
                List<JobNodeConf> jobNodeConfs8 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), DataPush.output_connect_id.getKey())).collect(Collectors.toList());
                if (null == jobNodeConfs8 || jobNodeConfs8.isEmpty() || StringUtils.isEmpty(jobNodeConfs8.get(0).getValue())) {
                    throw new RRException("请填写任务类型是数据推送的输出数据源id");
                }

                List<JobNodeConf> jobNodeConfs7 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), DataPush.output_connect_type.getKey())).collect(Collectors.toList());
                if (null == jobNodeConfs7 || jobNodeConfs7.isEmpty() || StringUtils.isEmpty(jobNodeConfs7.get(0).getValue())) {
                    throw new RRException("请填写任务类型是数据推送的输出数据源");
                }

                List<JobNodeConf> jobNodeConfs6 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), DataPush.output_pre_statment.getKey())).collect(Collectors.toList());
                if (null == jobNodeConfs6 || jobNodeConfs6.isEmpty() || StringUtils.isEmpty(jobNodeConfs6.get(0).getValue())) {
//                    throw new RRException(BizExceptionEnum.JOB_TYPE_4_output_pre_statment_ERROR);
                } else {
//                    try {
//                        hiveTableUtil.checkHiveSql(jobNodeConfs6.get(0).getValue());
//                    } catch (Exception e) {
//                        throw e;
//                    }
                }

                List<JobNodeConf> jobNodeConfs5 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), DataPush.output_write_model.getKey())).collect(Collectors.toList());
                if (null == jobNodeConfs5 || jobNodeConfs5.isEmpty() || StringUtils.isEmpty(jobNodeConfs5.get(0).getValue())) {
                    throw new RRException(DataDevelopmentBizExceptionEnum.JOB_TYPE_4_output_write_model_ERROR.getMessage());
                }
                break;
            }
            case 6: {
                List<JobNodeConf> jobNodeConfs9 = jobNodeConfs.stream().filter(jobNodeConf -> Objects.equals(jobNodeConf.getKey(), "sql_statment")).collect(Collectors.toList());
                if (null == jobNodeConfs9 || jobNodeConfs9.isEmpty() || StringUtils.isEmpty(jobNodeConfs9.get(0).getValue())) {
                    throw new RRException("请填写shell语句！");
                }
                break;
            }
            default:
                break;
        }
    }

    /**
     * @Description 验证只有一个开始节点配置、一个结束节点、开始节点是否有其它的连线、所有节点是否配置参数。还得设置关联
     * @Author hujz
     * @Date 2019/8/9 18:13
     * @Param jobInfo 参数数据
     * @Return
     * @Exception
     */
    @Override
    public void checkJobConfigData(JobInfo jobInfo) throws Exception {
        if (jobInfo == null || jobInfo.getId() == null) {
            throw new RRException(DataDevelopmentBizExceptionEnum.PARAM_ERROR.getMessage());
        }
        jobInfo = jobInfoMapper.selectById(jobInfo.getId());

        if (jobInfo.getEnable()) {
            throw new RRException(DataDevelopmentBizExceptionEnum.JOBINFO_ENABLE.getMessage());
        }

        List<JobNodeInfo> jobNodeInfos = jobNodeInfoMapper.selectList(new QueryWrapper<JobNodeInfo>().eq("job_id", jobInfo.getId()));
        if (null == jobNodeInfos || jobNodeInfos.isEmpty()) {
            throw new RRException(DataDevelopmentBizExceptionEnum.JOB_NODE_START_NO_CONF.getMessage());
        }

        //配置的开始节点有且只有一个
        List<JobNodeInfo> nodeInfos = jobNodeInfos.stream().filter(jobNodeInfo -> Objects.equals(jobNodeInfo.getTypeId(), 0)).collect(Collectors.toList());
        if (nodeInfos.size() != 1) {
            throw new RRException(DataDevelopmentBizExceptionEnum.JOB_NODE_START_NO_CONF.getMessage());
        }

        //配置的结束节点必须有且只有一个
        List<JobNodeInfo> endNodeInfos = jobNodeInfos.stream().filter(jobNodeInfo -> Objects.equals(jobNodeInfo.getTypeId(), 5)).collect(Collectors.toList());
        if (endNodeInfos.size() != 1) {
            throw new RRException(DataDevelopmentBizExceptionEnum.JOB_NODE_END_NO_CONF.getMessage());
        }

        if (StringUtils.isEmpty(jobInfo.getParam())) {
            throw new RRException(DataDevelopmentBizExceptionEnum.JOB_NODE_NO_CONF_EXIST.getMessage());
        }
        JSONObject jsonObject = JSONObject.parseObject(jobInfo.getParam());
        JSONArray nodeDataArray = jsonObject.getJSONArray("nodeDataArray");
        JSONArray linkDataArray = jsonObject.getJSONArray("linkDataArray");

        List<JSONObject> linkJsonObjects = linkDataArray.toJavaList(JSONObject.class);
        List<JSONObject> nodeJsonObjects = nodeDataArray.toJavaList(JSONObject.class);

        //获取节点的连线情况
        List<List<String>> linkList = getLinkDataList(linkJsonObjects, nodeJsonObjects, nodeInfos.get(0).getNodeKey());

        //验证连线情况
        if (null == linkList || linkList.isEmpty()) {
            throw new RRException(DataDevelopmentBizExceptionEnum.JOB_NODE_NULL.getMessage());
        }
        //开始节点下面的连线情况
        List<List<String>> beginList = linkList.stream().filter(dataList -> dataList.size() > 1).collect(Collectors.toList());
        if (beginList.isEmpty()) {
            throw new RRException(DataDevelopmentBizExceptionEnum.START_NODE_WITHOUT_NEXT_NODE.getMessage());
        }

        //验证连线是否存在循环引用的连线
        checkLinkToLink(linkList, linkJsonObjects, nodeJsonObjects);

        //禁用节点的信息更新
        disabledNodeListAndJenkins(jobNodeInfos, nodeJsonObjects, linkList, jobInfo.getId(), endNodeInfos.get(0).getNodeKey());

        //验证连线上所有的节点是否配置参数---只验证连接了结束节点的连线
        List<List<String>> havaEndNodeList = linkList.stream().filter(dataList -> Objects.equals(dataList.get(dataList.size() - 1), endNodeInfos.get(0).getNodeKey())).collect(Collectors.toList());
        if (havaEndNodeList.isEmpty()) {
            throw new RRException(DataDevelopmentBizExceptionEnum.CONNECT_END_NODE_ERROR.getMessage());
        }
        Set<String> nodeKeyList = new HashSet<>();
        for (List<String> strings : havaEndNodeList) {
            nodeKeyList.addAll(strings);
        }
        for (String s : nodeKeyList) {
            List<JobNodeInfo> jobNodeInfos1 = jobNodeInfos.stream().filter(jobNodeInfo -> Objects.equals(s, jobNodeInfo.getNodeKey())).collect(Collectors.toList());
            if (jobNodeInfos1.isEmpty()) {
                List<JSONObject> keys = nodeJsonObjects.stream().filter(nodeInfo -> Objects.equals(nodeInfo.getString("key"), s)).collect(Collectors.toList());
                String errorMsg = "";
                if (!keys.isEmpty()) {
                    errorMsg = keys.get(0).getString("text");
                }
                throw new RRException(String.format(DataDevelopmentBizExceptionEnum.A_JOB_NODE_NO_CONF_EXIST.getMessage(), errorMsg));
            }
            //验证对应的配置是否正确
            List<JobNodeConf> jobNodeConf = jobNodeConfMapper.selectList(new QueryWrapper<JobNodeConf>().eq("job_node_id", jobNodeInfos1.get(0).getId()));
            checkJobConf(jobNodeConf, jobNodeInfos1.get(0).getTypeId());
        }
    }

    @Override
    public void saveOrUpdateJobNode(JobNodeInfo param) {

        if (null == param || StringUtils.isEmpty(param.getNodeKey()) || null == param.getJobId()) {
            return;
        }
        JobInfo jobInfo = jobInfoMapper.selectById(param.getJobId());
        if (null == jobInfo) {
            return;
        }
        //保存节点数据到数据库
        JobNodeInfo jobNode = jobNodeInfoService.getOne(new QueryWrapper<JobNodeInfo>()
                .eq("node_key", param.getNodeKey())
                .eq("job_id", param.getJobId()));
        if (jobNode == null) {
            param.setCreatePer(ShiroUtils.getUserId().intValue());
            param.setCreateTime(DateUtil.date());
            //新增，节点默认禁用
            param.setEnable(false);
            param.setName(jobInfo.getName());
            jobNodeInfoService.save(param);
        }
        //修改
        else {
            jobNode.setModPer(ShiroUtils.getUserId().intValue());
            jobNode.setModTime(DateUtil.date());
            jobNode.setName(param.getName());
            jobNodeInfoService.updateById(jobNode);
        }
    }

    @Override
    public void initJobHistory(List<JobLinkDto> jobLinks, List<JobNodeInfoDto> jobNodeInfos, String confParams, Integer jobId, String oozieJobId, Integer type, Integer jobNodeInfoId, Integer runType, String cronStartTime, String cronEndTime, String cronQueueName) {
//        List<JobRunHistory> jobRunHistoryList = jobRunHistoryMapper.selectList(new QueryWrapper<JobRunHistory>().eq("job_info_id", jobId).orderByDesc("num"));
//        Long num = 1L;
//        if(CollectionUtil.isNotEmpty(jobRunHistoryList)){
//            num = jobRunHistoryList.get(0).getNum() + 1;
//        }
        //初始化对应的数据
        /**
         * 第一次执行默认版本号为1，否则为 当前任务的运行日志中最最大的版本号
         */
        JobRunHistory jobRunHistory = new JobRunHistory();
        jobRunHistory.setNum(null);
        jobRunHistory.setOozieJobId(oozieJobId);
        jobRunHistory.setParams(confParams);
        jobRunHistory.setState(LastRunState.RUNNING.getCode());
        jobRunHistory.setBeginTime(DateUtil.parseDateTime(DateUtil.now()));
        jobRunHistory.setJobInfoId(jobId);
        JobLinkAndJobNodeDto jobLinkAndJobNodeDto = new JobLinkAndJobNodeDto();
        jobLinkAndJobNodeDto.setEdges(jobLinks);
        jobLinkAndJobNodeDto.setNodes(jobNodeInfos);
        jobRunHistory.setNodeParams(JSONObject.toJSONString(jobLinkAndJobNodeDto));
        jobRunHistory.setOozieStatus("running");
        jobRunHistory.setReRunType(type);
        jobRunHistory.setReRunNodeId(jobNodeInfoId);
        jobRunHistory.setRunType(runType);
        //数据的开始时间和结束时间
        jobRunHistory.setCronEndTime(cronEndTime);
        jobRunHistory.setCronStartTime(cronStartTime);
        jobRunHistory.setQueueName(cronQueueName);
        jobRunHistoryMapper.insert(jobRunHistory);
    }

    @Override
    public void initJobHistoryAndJobNodeHistory(JobInfo param, String confParams) {
        //初始化对应的job信息
        if (null == param.getId()) {
            throw new RRException(String.format(DataDevelopmentBizExceptionEnum.PARAM_FAIL_ERROR.getMessage(), "任务"));
        }
        JobInfo jobInfo = jobInfoMapper.selectById(param.getId());
        if (null == jobInfo) {
            throw new RRException(String.format(DataDevelopmentBizExceptionEnum.PARAM_NO_EXIT_ERROR.getMessage(), "任务"));
        }
        //查询其下的开始节点
        List<JobNodeInfo> jobNodeInfos = jobNodeInfoMapper.selectList(new QueryWrapper<JobNodeInfo>().eq("job_id", jobInfo.getId()));
        if (null == jobNodeInfos || jobNodeInfos.isEmpty()) {
            throw new RRException(String.format(DataDevelopmentBizExceptionEnum.PARAM_NO_EXIT_ERROR.getMessage(), "任务下未配置执行的节点信息！"));
        }
        List<JobNodeInfo> begainList = jobNodeInfos.stream().filter(jobNodeInfo -> Objects.equals(jobNodeInfo.getTypeId(), 0)).collect(Collectors.toList());
        if (null == begainList || begainList.size() != 1) {
            throw new RRException(String.format(DataDevelopmentBizExceptionEnum.PARAM_NO_EXIT_ERROR.getMessage(), "任务下的开始节点信息！"));
        }
        List<JobRunHistory> jobRunHistoryList = jobRunHistoryMapper.selectList(new QueryWrapper<JobRunHistory>().eq("job_info_id", jobInfo.getId()).orderByDesc("num"));
        Long num = 1L;
        //得到最新的版本号
        if (null != jobRunHistoryList && !jobRunHistoryList.isEmpty()) {
            num = jobRunHistoryList.get(0).getNum() + 1;
        }
        //初始化对应的数据
        /**
         * 第一次执行默认版本号为1，否则为 当前任务的运行日志中最最大的版本号
         */
        JobRunHistory jobRunHistory = new JobRunHistory();
        jobRunHistory.setNum(num);
//        jobRunHistory.setOozieJobId(oozieJobId);
        jobRunHistory.setParams(confParams);
        jobRunHistory.setState(LastRunState.RUNNING.getCode());
        jobRunHistory.setBeginTime(DateUtil.parseDateTime(DateUtil.now()));
        jobRunHistory.setJobInfoId(jobInfo.getId());
        jobRunHistory.setNodeParams(jobInfo.getParam());
        jobRunHistory.setOozieStatus("running");
        jobRunHistoryMapper.insert(jobRunHistory);

        /**
         * 设置开始节点的初始信息
         */
        JobNodeRunHistory jobNodeRunHistory = new JobNodeRunHistory();
        jobNodeRunHistory.setJobInfoId(jobInfo.getId());
        jobNodeRunHistory.setNodeKey(begainList.get(0).getNodeKey());
        jobNodeRunHistory.setTypeId(begainList.get(0).getTypeId());
        jobNodeRunHistory.setName(begainList.get(0).getName());
        jobNodeRunHistory.setJobNodeInfoId(begainList.get(0).getId());
        jobNodeRunHistory.setParams(confParams);
        jobNodeRunHistory.setState(LastRunState.RUNNING.getCode());
//        jobNodeRunHistory.setOozieJobId(oozieJobId);
        jobNodeRunHistory.setNum(num);
        jobNodeRunHistory.setTime(DateUtil.parseDateTime(DateUtil.now()));
        jobNodeRunHistory.setOozieStatus("running");
        jobNodeRunHistoryMapper.insert(jobNodeRunHistory);

    }

    @Override
    public void uploadShell(JobNodeInfo param, String dst, OozieConfig oozieConfig) {
        if (null == param) {
            return;
        }
        if (null == param.getTypeId()) {
            return;
        }

        JobConfUtil jobConfUtil = new JobConfUtil(bdpJobConfig);
        String whoamiShell=" whoami\n ";
        String printUserShell=" lineNum=`cat /etc/shadow | wc -l`;\n" +
                "#echo $lineNum;\n" +
                "i=0;\n" +
                "while [ \"$i\" -lt $(($lineNum+0)) ];\n" +
                "do \n" +
                "  i=`expr $i + 1`;\n" +
                "  a=`sed -n \"$i\"p /etc/shadow`;\n" +
                "  if [[ $i -eq $(($lineNum+0)) ]];\n" +
                "  then\n" +
                "    echo -n ${a%%:*};\n" +
                "  else\n" +
                "    echo -n ${a%%:*}\",\";\n" +
                "  fi\n" +
                " \n" +
                " done\n" +
                " echo \n ";
        String shell = pingShellData(param, jobConfUtil, oozieConfig);
        if(param.getTypeId() == 1 || param.getTypeId() == 2 || param.getTypeId() == 4 || param.getTypeId() == 7){
            shell = "#!/bin/sh\nsource /etc/profile\n" +
                    "export LANG=zh_CN.UTF-8\n"  + whoamiShell+ printUserShell+pingStartSyncShell(param.getId()) + shell;
        }else{
            shell = "#!/bin/sh\nsource /etc/profile\n" +
                    "export LANG=zh_CN.UTF-8\n"   + whoamiShell+ printUserShell+shell;
        }

        HdfsUtil hdfsUtil = null;
        try {
            hdfsUtil = new HdfsUtil(bdpJobConfig);
        } catch (Exception e) {
            e.printStackTrace();
        }

        try {
            hdfsUtil.writeFile(shell.getBytes("utf-8"),dst);
        } catch (UnsupportedEncodingException e) {
            log.error("上传shell文件异常！error:{}",e.getMessage());
        }

        //配置上游的shell文件
        //是否需要配置插入上游的查询条件语句
        appSelectCountData(param.getJobNodeLayConfigDtoList(),hdfsUtil,oozieConfig);

        //判断其下游是否有配置依赖条件，如果有则还需要追加shell
        List<JobNodeLayConfigDto> jobNodeLayConfigDtos = jobNodeLayConfigMapper.selectDtoListByUpJobNodeId(param.getId(), param.getJobId());
        if(CollectionUtil.isNotEmpty(jobNodeLayConfigDtos)){
            appSelectCountData(jobNodeLayConfigDtos,hdfsUtil,oozieConfig);
        }

        if(null != hdfsUtil){
            hdfsUtil.close();
        }
    }

    /***
     * 节点开始检测回调接口
     * @param jobNodeId  节点id
     * @return  请求的shell
     */
    private String pingStartSyncShell(Integer jobNodeId) {
        StringBuilder startShell = new StringBuilder();
        startShell.append("jobNodeId=").append(jobNodeId).append("\n");
        startShell.append("resultsCheack=`curl --connect-timeout ").append(dataDevelopmentConfig.getCurlconnectTime()).append(" --max-time ").append(dataDevelopmentConfig.getCurlMaxTime()).append(" -X GET ").append(oozieConfig.getRest_url()).append("/nodeStartCheck?nodeId=${jobNodeId}`").append("\n");
        startShell.append("echo ${resultsCheack}\n");
        startShell.append("if [[ ! -n \"${resultsCheack}\" ]]; then\n\t");
        startShell.append("echo 'resultsCheack is null'\n");
        startShell.append("else\n\t");
        startShell.append("if [[ ${resultsCheack} == 'success' ]]; then\n\t\t");
        startShell.append("echo 'success'\n\t");
        startShell.append("else\n\t\t");
        startShell.append("exit 10\n\t");
        startShell.append("fi\n");
        startShell.append("fi\n");
        return startShell.toString();
    }

    private void appSelectCountData(List<JobNodeLayConfigDto> jobNodeLayConfigDtoList, HdfsUtil hdfsUtil, OozieConfig oozieConfig) {
        Integer tableId = null;
        String uptableName = "";
        if(CollectionUtil.isNotEmpty(jobNodeLayConfigDtoList)){
            for (JobNodeLayConfigDto jobNodeLayConfigDto : jobNodeLayConfigDtoList) {
                if(null != jobNodeLayConfigDto.getOutputTableId()){
                    tableId = jobNodeLayConfigDto.getOutputTableId();
                    JobNodeInfo jobNodeInfo = jobNodeInfoMapper.selectById(jobNodeLayConfigDto.getUpNodeId());
                    if(null != jobNodeInfo){
                        //修改为追加查询表个数的shell
                        // hdfs dfs -appendToFile jobid_3999.json hdfs:///tmp/yhc/kill.sh
                        // 先上传一个虚拟的appendFiled.sh  然后将这个文件追加到另外一个文件中
                        if(null != tableId){
                            TableInfo tableInfo = tableInfoService.getById(tableId);
                            if(null != tableInfo){
                                JobInfo jobInfo = jobInfoMapper.selectById(jobNodeInfo.getJobId());
                                if(null != jobInfo){
                                    //先获取，然后再覆盖
                                    String nodeName = JobType.ObjOf(jobNodeInfo.getTypeId()).getEnName();
                                    String olderDst = "workflow-app/" + WorkFlowType.WORKFLOW.getCode() + "/" + jobInfo.getProcId() + "/" + jobInfo.getId() + "/" + nodeName + "_" + jobNodeInfo.getId() + ".sh";
                                    String s = hdfsUtil.readFile2(olderDst);
                                    uptableName = tableInfo.getDbName() + "." + tableInfo.getTableName();
                                    String shell = appendTableCountShell(uptableName, oozieConfig);
                                    if(StringUtils.isEmpty(s)){
                                        s = "";
                                    }
                                    if(s.contains(shell)){
                                        s = s.replace(shell,"");
                                    }
                                    s = s + shell;
                                    try {
                                        hdfsUtil.writeFile(s.getBytes("utf-8"),olderDst);
                                    } catch (UnsupportedEncodingException e) {
                                        e.printStackTrace();
                                    }
                                    try {
                                        hdfsUtil.dfs.close();
                                    }catch (Exception e){
                                        log.error("关闭hdfs流异常！error:{}",e.getMessage());
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
    }

    @Override
    public void uploadJobShell(Integer typeId, String dst, OozieConfig oozieConfig,Integer menuDepId) {
        String shell = "";
        if(typeId == JobType.START.getCode()){
            shell = "echo '开始执行' \n" +
                    "sleep 8 \n" +
                    "depOOzieId=`curl --connect-timeout "+dataDevelopmentConfig.getCurlconnectTime()
                    +" --max-time "+dataDevelopmentConfig.getCurlMaxTime()+" -X GET "+oozieConfig.getRest_url()+"/getJobLayOozieId/"+menuDepId+"`\necho ${depOOzieId} \n" +
                    "if [[ ! -n \"${depOOzieId}\" ]]; then\n\t" +
                    "exit 10\n" +
                    "fi\n" +
                    "if [[ ${depOOzieId} == '-1' ]]; then\n\t" +
                    "exit 10\n" +
                    "fi\n" +
                    "echo \"depOOzieId:$depOOzieId\"";
        }
        if(typeId == JobType.END.getCode()){
            shell = "echo '结束执行' \nsleep 1";
        }
        shell = "#!/bin/sh\n" + shell;
        HdfsUtil hdfsUtil = null;
        try {
            hdfsUtil = new HdfsUtil(bdpJobConfig);
            hdfsUtil.writeFile(shell.getBytes(),dst);
            hdfsUtil.close();
        } catch (Exception e) {
            if(null != hdfsUtil){
                hdfsUtil.close();
            }
            e.printStackTrace();
        }
    }

    @Override
    public void uploadJobLayShell(Integer procId,Integer jobId,String dst, OozieConfig oozieConfig) {
        JobConfUtil jobConfUtil = new JobConfUtil(bdpJobConfig);
        String shell = pingJobLayShellData(procId,jobId,jobConfUtil, oozieConfig);
        shell = "#!/bin/sh\n" + shell;
        HdfsUtil hdfsUtil = null;
        try {
            hdfsUtil = new HdfsUtil(bdpJobConfig);
            hdfsUtil.writeFile(shell.getBytes(),dst);
            hdfsUtil.close();
        } catch (Exception e) {
            if(null != hdfsUtil){
                hdfsUtil.close();
            }
            e.printStackTrace();
        }

    }

    @Override
    public List<JobNodeInfoDto> selectListByIds(Set<Integer> jobNodeIds) {
        return jobNodeInfoMapper.selectListByIds(jobNodeIds);
    }

    @Override
    public List<IdAndNameDto> selectNodeLayConfig(Integer jobNodeId) {
        //先查询到对应的节点信息
        JobNodeInfo jobNodeInfo = jobNodeInfoMapper.selectById(jobNodeId);
        if(null == jobNodeInfo){
            throw new RRException("当前节点不存在！");
        }
        List<IdAndNameDto> list = new ArrayList<>();
        if(Objects.equals(1,jobNodeInfo.getTypeId())){
            //数据同步
            //先查询对应的节点对应的数据源类型，如果是sap数据源，则存储的字段不一样
            JobNodeConf inputType = jobNodeConfService.getOne(new QueryWrapper<JobNodeConf>().eq("`key`", "input_connect_type").eq("job_type", 0).eq("job_node_id", jobNodeId));
            if(null != inputType){
                if(ConnectTypeEnum.Sap.getCode().toString().equals(inputType.getValue())){
                    List<JobNodeConf> list1 = jobNodeConfService.list(new QueryWrapper<JobNodeConf>().eq("`key`", "sap_output_table_name").eq("job_type", 0).eq("job_node_id", jobNodeId));
                    //sap连接类型，其输出表的字段不一样
                    if(CollectionUtil.isNotEmpty(list1)){
                        JobNodeConfigDto jobNodeConfigDto = jobNodeConfService.getJobNodeConf(list1);
                        List<String> sap_output_table_name_older = jobNodeConfigDto.getSap_output_table_name();
                        if(null != sap_output_table_name_older && sap_output_table_name_older.size() != 0){
                            for (String sapName : sap_output_table_name_older) {
                                String sapNameSub = sapName.substring(1, sapName.length() - 1);
                                String[] sapArray = sapNameSub.split(",");
                                List<String> sapList = Arrays.asList(sapArray);
                                for (String s : sapList) {
                                    if (!s.trim().equals("")) {
                                        String[] split = s.trim().split("\\.");
                                        if(null != split && split.length >= 2){
                                            TableInfo tableInfo = tableInfoService.getOne(new QueryWrapper<TableInfo>().eq("db_name", split[0]).eq("table_name", split[1]));
                                            if(null == tableInfo){
                                                throw new RRException("节点对应的表不存在！");
                                            }
                                            IdAndNameDto idAndNameDto = new IdAndNameDto();
                                            idAndNameDto.setId(tableInfo.getId());
                                            idAndNameDto.setName(tableInfo.getTableName());
                                            list.add(idAndNameDto);
                                        }
                                    }
                                }
                                return list;
                            }
                        }
                    }
                }else{
                    JobNodeConf dbName = jobNodeConfService.getOne(new QueryWrapper<JobNodeConf>().eq("`key`", "output_db_name").eq("job_type", 0).eq("job_node_id",jobNodeId));
                    JobNodeConf tableName = jobNodeConfService.getOne(new QueryWrapper<JobNodeConf>().eq("`key`", "output_table_name").eq("job_type", 0).eq("job_node_id",jobNodeId));
                    if(null == dbName || StringUtils.isEmpty(dbName.getValue())){
                        throw new RRException("节点未配置输出！");
                    }
                    if(null == tableName || StringUtils.isEmpty(tableName.getValue())){
                        throw new RRException("节点未配置输出！");
                    }
                    //查询到对应的输出的仓库表
                    TableInfo tableInfo = tableInfoService.getOne(new QueryWrapper<TableInfo>().eq("db_name", dbName.getValue()).eq("table_name", tableName.getValue()));
                    if(null == tableInfo){
                        throw new RRException("节点对应的表不存在！");
                    }
                    IdAndNameDto idAndNameDto = new IdAndNameDto();
                    idAndNameDto.setId(tableInfo.getId());
                    idAndNameDto.setName(tableInfo.getTableName());
                    list.add(idAndNameDto);
                    return list;
                }
            }

        }
        if(Objects.equals(2,jobNodeInfo.getTypeId())){
            //sql计算
            JobNodeConf jobOutPutTableIds = jobNodeConfService.getOne(new QueryWrapper<JobNodeConf>().eq("`key`", "jobOutPutTableIds").eq("job_type", 0).eq("job_node_id",jobNodeId));
            if(null == jobOutPutTableIds || StringUtils.isEmpty(jobOutPutTableIds.getValue())){
                return null;
            }
            JSONArray jsonArray = JSONArray.parseArray(jobOutPutTableIds.getValue());
            if(null == jsonArray || jsonArray.size() == 0){
                return null;
            }
            for (Object id : jsonArray) {
                TableInfo tableInfo = tableInfoService.getById(Integer.valueOf(id.toString().trim()));
                IdAndNameDto idAndNameDto = new IdAndNameDto();
                idAndNameDto.setId(tableInfo.getId());
                idAndNameDto.setName(tableInfo.getTableName());
                list.add(idAndNameDto);
            }
            return list;
        }

        if(Objects.equals(4,jobNodeInfo.getTypeId())){
            //数据表输出不能配置路由
        }
        return null;
    }

    @Override
    public void cheackIsDrop(Integer jobNodeId) {
        JobNodeInfo jobNodeInfo = jobNodeInfoService.getById(jobNodeId);
        if(null != jobNodeInfo &&
                (Objects.equals(jobNodeInfo.getTypeId(),JobType.OUTPUT.getCode()) || Objects.equals(jobNodeInfo.getTypeId(),JobType.SQL.getCode()))){
            List<JobNodeConf> jobNodeConfs = jobNodeConfMapper.selectList(new QueryWrapper<JobNodeConf>().eq("job_node_id", jobNodeId).in("`key`", "sql_statment", "input_input_content"));
            JobNodeConfig jobNodeConfig = JobNodeConfig.listToJobNodeConfig(jobNodeConfs);
            cheackDropHiveTables(jobNodeConfig,jobNodeInfo.getTypeId());
        }

    }

    /**
     * 批量修改
     *
     * @param ids
     * @param assigneePerId
     */
    @Override
    public void updateBatch(List<Long> ids, Integer assigneePerId) {
        jobNodeInfoMapper.updateBatch(ids,assigneePerId);
    }

    @Override
    public String listRunSysparams(String sql,String jobNodeId) {
        if(StringUtils.isEmpty(sql) || StringUtils.isEmpty(jobNodeId)){
            return sql;
        }
        try {
            Integer jobid = null;
            Long depid = null;
            //对应的配置数
            Map<String,String> allMaps = new HashMap<>();
            //获取节点的参数配置
            JobNodeConf jobNodeConf = jobNodeConfMapper.selectOne(new QueryWrapper<JobNodeConf>()
                    .eq("job_node_id", jobNodeId)
                    .eq("job_type", 0)
                    .eq("`key`", "nodeParam"));
            JobNodeInfo jobNodeInfo = jobNodeInfoMapper.selectById(jobNodeId);
            if(null == jobNodeInfo){
                return sql;
            }
            jobid = jobNodeInfo.getJobId();
            //解析节点配置的参数
            if(null != jobNodeConf && StringUtils.isNotEmpty(jobNodeConf.getValue()) && !"null".equals(jobNodeConf.getValue())){
                ananlyNodeParam(allMaps,jobNodeConf);
            }

            //获取配置的工作流参数配置
            List<WorkMenuJobRel> workMenuJobRels = workMenuJobRelMapper.selectList(new QueryWrapper<WorkMenuJobRel>()
                    .eq("work_flow_id", jobNodeInfo.getJobId()).eq("work_type", 1));
            if(CollectionUtil.isNotEmpty(workMenuJobRels)){
                JobNodeConf workflowConf = jobNodeConfMapper.selectOne(new QueryWrapper<JobNodeConf>()
                        .eq("job_node_id", workMenuJobRels.get(0).getWorkMenuId())
                        .eq("job_type", 1)
                        .eq("`key`", "nodeParam"));
                if(null != workflowConf && StringUtils.isNotEmpty(workflowConf.getValue()) && !"null".equals(workflowConf.getValue())){
                    ananlyNodeParam(allMaps,workflowConf);
                }

                //获取配置的工作流依赖的参数配置
                List<WorkMenuJobRel> workMenuJobRels2 = workMenuJobRelMapper.selectList(new QueryWrapper<WorkMenuJobRel>()
                        .eq("work_flow_id", workMenuJobRels.get(0).getWorkFlowId()).eq("work_type", 2));
                if(CollectionUtil.isNotEmpty(workMenuJobRels2)){
                    JobNodeConf workflowLayConf = jobNodeConfMapper.selectOne(new QueryWrapper<JobNodeConf>()
                            .eq("job_node_id", workMenuJobRels2.get(0).getWorkMenuId())
                            .eq("job_type", 1)
                            .eq("`key`", "nodeParam"));
                    if(null != workflowLayConf && StringUtils.isNotEmpty(workflowLayConf.getValue()) && !"null".equals(workflowLayConf.getValue())){
                        ananlyNodeParam(allMaps,workflowLayConf);
                    }
                    //查询作业流依赖id
                    List<WorkFlowDep> workFlowDeps = workFlowDepMapper.selectList(new QueryWrapper<WorkFlowDep>().eq("work_menu_id", workMenuJobRels2.get(0).getWorkMenuId()));
                    if(CollectionUtil.isNotEmpty(workFlowDeps)){
                        depid = workFlowDeps.get(0).getId();
                    }
                }
            }

            //解析sql,并且替换所有的运行参数
            return repalceAllSh(sql,allMaps,depid,jobid,jobNodeId);

        }catch (Exception e){
            log.error("解析运行参数异常！error:{}",e.getMessage());
        }
        return sql;
    }

    @Override
    public String runShellNodes(String jobNodeId) {

        JobNodeInfo jobNodeInfo = jobNodeInfoMapper.getJobNodeInfo(jobNodeId);
        if(null == jobNodeInfo){
            return "[error-oozi] jobNodeInfo is not exit!";
        }
        //先从hdfs下载数据
        HdfsUtil hdfsUtil = null;
        try {
            hdfsUtil = new HdfsUtil(bdpJobConfig);
        } catch (Exception e) {
            log.error("[error] get HdfsUtil fail!error:{}",e.getMessage());
            return "[error-oozi] get HdfsUtil fail!";
        }
        if(null == hdfsUtil){
            return "[error-oozi] get HdfsUtil fail!";
        }
        String pathStr = String.format("%s/%s/%s%s.sh", bdpJobConfig.getJobconfig(), jobNodeInfo.getTypeId(), "nodeShell_", jobNodeInfo.getId());
        String shellConnect = null;
        try {
            shellConnect = hdfsUtil.readFile2(pathStr);
        }catch (Exception e){
            log.error("[error] get HdfsUtil shell fail!error:{}",e.getMessage());
            return "[error-oozi] get HdfsUtil shell fail!";
        }finally {
            if(null != hdfsUtil){
                hdfsUtil.close();
            }
        }
        if(StringUtils.isEmpty(shellConnect)){
            return "";
        }
        //替换数据
        String replaceShell = listRunSysparams(shellConnect, jobNodeId);
        if(StringUtils.isEmpty(replaceShell)){
            return replaceShell;
        }

        //warp shell 开始日志  和结束日志
        //replaceShell = warpShell(replaceShell);

        //ssh到服务器
        if(StringUtils.isEmpty(jobNodeInfo.getHostUrl())){
            return "[error-oozi] hostUrl is empty!";
        }
        if(null == jobNodeInfo.getPort()){
            return "[error-oozi] port is null!";
        }
        if(StringUtils.isEmpty(jobNodeInfo.getUsername())){
            return "[error-oozi] username is empty!";
        }
        if(StringUtils.isEmpty(jobNodeInfo.getPassword())){
            return "[error-oozi] password is empty!";
        }
        ExecuteShellUtil instance = ExecuteShellUtil.getInstance();
        try {
            instance.init(jobNodeInfo.getHostUrl(),jobNodeInfo.getPort(),jobNodeInfo.getUsername(),jobNodeInfo.getPassword());
        } catch (JSchException e) {
            log.error("[error] connect remote server fail!error:{}",e.getMessage());
            //关闭连接
            closeSsh(instance);
            return "[error-oozi] connect remote server fail!";
        }
        String tmpPaths = "/tmp/"+jobNodeInfo.getUsername() + "/"+jobNodeInfo.getId();

        String deletPathCmd = "rm -rf " +tmpPaths+  " ";
        try {
            instance.execCmd(deletPathCmd);
        }catch (Exception e){
            log.error("[error] rm remote server file fail!error:{}",e.getMessage());
        }
        ThreadUtil.sleep(1000);

        String addPathCmd = "mkdir -p " +tmpPaths+  " ";
        try {
            instance.execCmd(addPathCmd);
        }catch (Exception e){
            log.error("[error] mkdir remote server file fail!error:{}",e.getMessage());
            //关闭连接
            closeSsh(instance);
            return "[error-oozi] mkdir remote server file fail!";
        }
        ThreadUtil.sleep(1000);
        String logName = "log_" + jobNodeInfo.getId() + ".log";
        //上传文件
        String fileName = "nodeShell_" + jobNodeInfo.getId() + ".sh";
        String filePath = tmpPaths +  "/" + fileName;
        try {
            instance.uploadFile(replaceShell,filePath);
        } catch (Exception e) {
            log.error("[error] upload shell to remote server fail!error:{}",e.getMessage());
            //关闭连接
            closeSsh(instance);
            return "[error-oozi] upload shell to remote server fail!";
        }
        ThreadUtil.sleep(1000);

        String cmd = "cd "+tmpPaths+" &&  sh  "+filePath+" || echo \"`date +'%Y-%m-%d %H:%M:%S'` [error-oozi] running shell on remote host failed\"";

        //同步获取执行结果
        List<String> resultList = new ArrayList<>();
        try {
            ExecuteCommandACallable<String> buffer = new ExecuteCommandACallable<String>() {
                @Override
                public boolean IamDone() {
                    return false;
                }

                @Override
                public ExecuteCommandACallable<String> appendBuffer(String content) {
                    resultList.add(content);
                    return null;
                }

                @Override
                public String endBuffer() {
                    return null;
                }
            };
            //同步请求数据
           ShellUtil.executeCommandWithAuth(cmd, jobNodeInfo.getHostUrl(),
                   jobNodeInfo.getPort(), jobNodeInfo.getUsername(), jobNodeInfo.getPassword(), buffer);
        } catch (Exception e) {
            log.error("执行shell文件异常！error:{}",e.getMessage());
            resultList.add("[error-oozi] running shell on remote host failed!" + e.getMessage());
        } finally {
            //关闭连接
            closeSsh(instance);
        }
        if(CollectionUtil.isNotEmpty(resultList)){
            StringBuilder sb = new StringBuilder();
            for (String s : resultList) {
                sb.append(s).append("\n");
            }
            return sb.toString();
        }
        return "";

//        try {
//            instance.execCmd(cmd);
//        } catch (Exception e) {
//            log.error("[error] exe shell to remote server fail!error:{}",e.getMessage());
//            //关闭连接
//            closeSsh(instance);
//            return "[error-oozi] exe shell to remote server fail!";
//        }
//        ThreadUtil.sleep(10000);
//        ExecuteShellUtil shell = ExecuteShellUtil.getInstance();
//        try {
//            shell.init(jobNodeInfo.getHostUrl(),jobNodeInfo.getPort(),jobNodeInfo.getUsername(),jobNodeInfo.getPassword());
//        } catch (JSchException e) {
//            log.error("[error] connect remote server fail!error:{}",e.getMessage());
//            //关闭连接
//            closeSsh(shell);
//            return "[error-oozi] connect remote server fail!";
//        }
//        //改为同步的
//        Long outTimes = 60 * 60 *1000L;
//        long startTimes = System.currentTimeMillis();
//        long endTimes = System.currentTimeMillis();
//        //得到结果并且返回
//        String file = "";
//        int errorCount = 0;
//        while ( (!file.contains(DataSyncTaskConstant.SHELL_FLAGE_END)
//                && !file.contains(DataSyncTaskConstant.SHELL_FLAGE_ERROR))
//        && endTimes - startTimes <= outTimes
//                && errorCount <= 30){
//            ThreadUtil.sleep(1000);
//            //更新时间
//            endTimes = System.currentTimeMillis();
//            try {
//                file = shell.getFileLog(logPath);
//            }catch (Exception e){
//                file = e.getMessage();
//                errorCount++;
//            }
//        }
//        //关闭连接
//        closeSsh(instance);
//        closeSsh(shell);
//
//        return "";
    }

    @Override
    public JobNodeInfo getDataById(Integer jobNodeId) {
        return jobNodeInfoMapper.getDataById(jobNodeId);
    }

    @Override
    public String listRunSysparams(MultipartFile file, String jobNodeId) {
        File changeFile = null;
        FileReader fileReader = null;
        BufferedReader bufferedReader = null;
        StringBuilder stringBuilder = new StringBuilder();
        try {
            changeFile = FileUtils.multipartFileToFile(file);
            fileReader = new FileReader(changeFile);
            bufferedReader = new BufferedReader(fileReader);
            String temp = "";
            while ((temp = bufferedReader.readLine()) != null) {
                stringBuilder.append(temp + "\n");
            }
            bufferedReader.close();
        } catch (Exception e) {
            e.printStackTrace();
            log.error("转换文件异常！error:{}",e.getMessage());
        } finally {
            try {
                if(null != bufferedReader){
                    bufferedReader.close();
                }
            }catch (Exception e){
                log.error("关闭shell文件的文件流异常！error:{}",e.getMessage());
            }
        }
        if(StringUtils.isNotEmpty(stringBuilder.toString())){
            return listRunSysparams(stringBuilder.toString(),jobNodeId);
        }
        return null;
    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public void copySaveJobNodeConfig(CopyJobNodeInfoVO copyJobNodeInfoVO) throws Exception {
        //先验证工作流目录和一些参数
        if(copyJobNodeInfoVO == null){
            throw new RRException(DataDevelopmentBizExceptionEnum.PARAM_ERROR.getMessage());
        }
        if(copyJobNodeInfoVO.getId() == null){
            throw new RRException("复制节点id为空！");
        }
        JobNodeInfo jobNodeInfo = jobNodeInfoMapper.selectById(copyJobNodeInfoVO.getId());
        if (null == jobNodeInfo) {
            throw new RRException("待复制的节点任务不存在!");
        }
        //查询到老的这个对应的菜单id
        WorkMenuJobRel workMenuJobRelOlder = workMenuJobRelMapper.
                selectOne(new QueryWrapper<WorkMenuJobRel>().eq("work_type", 1).eq("work_flow_id", jobNodeInfo.getJobId()));
        if (null == workMenuJobRelOlder) {
            throw new RRException("待任务和默认作业流菜单关联不存在!");
        }

        if(copyJobNodeInfoVO.getWorkFlowId() == null){
            throw new RRException("菜单目录id为空！");
        }
        WorkMenu workMenu = iWorkMenuService.getById(copyJobNodeInfoVO.getWorkFlowId());
        if (null == workMenu) {
            throw new RRException("默认作业流不存在!");
        }
        if (WorkFlowMenuType.WORKFLOW_FIELD.getCode() != workMenu.getWorkType()) {
            throw new RRException("上级目录不是默认作业流类型!");
        }
        //名字必须传
        if (StringUtils.isEmpty(copyJobNodeInfoVO.getName())) {
            throw new RRException("任务节点的名字必须传递!");
        }
        //验证节点的名字是否重复
        WorkMenuJobRel workMenuJobRel = workMenuJobRelMapper.selectOne(new QueryWrapper<WorkMenuJobRel>().eq("work_type", 1).eq("work_menu_id", copyJobNodeInfoVO.getWorkFlowId()));
        if (null == workMenuJobRel) {
            throw new RRException("任务和默认作业流菜单关联不存在!");
        }
        JobNodeInfo jobNodeInfo1 = workMenuJobRelMapper.selectJobNodeInfoByMenuIdAndName(copyJobNodeInfoVO.getWorkFlowId().intValue(),copyJobNodeInfoVO.getName(),copyJobNodeInfoVO.getProcId());
        if (null != jobNodeInfo1) {
            throw new RRException("同一作业流菜单下的任务名称不能有重复!");
        }
        //验证节点是否存在
        if(copyJobNodeInfoVO.getJobNodeInfo() == null){
            throw new RRException("节点配置为空!");
        }
        if(copyJobNodeInfoVO.getJobNodeInfo().getId() == null){
            throw new RRException("节点配置为空!");
        }
        //先保存节点和菜单的关系
        JobNodeInfo newJobNodeInfo = jobNodeInfoMapper.selectById(copyJobNodeInfoVO.getJobNodeInfo().getId());
        if(newJobNodeInfo == null){
            throw new RRException("数据错误，请重试!");
        }
        //工作流菜单转换，则需要修改
        newJobNodeInfo.setJobId(workMenuJobRel.getWorkFlowId().intValue());
        newJobNodeInfo.setIsVirCopy(0);
        newJobNodeInfo.setName(copyJobNodeInfoVO.getName());
        jobNodeInfoMapper.updateById(newJobNodeInfo);

        //添加节点关联表数据
        workMenuJobRelMapper.delete(new QueryWrapper<WorkMenuJobRel>().eq("work_type",0).eq("work_menu_id",workMenuJobRelOlder.getWorkMenuId()).eq("work_flow_id",newJobNodeInfo.getId()));
        WorkMenuJobRel workMenuJobRel1 = new WorkMenuJobRel();
        BeanUtils.copyProperties(workMenuJobRel, workMenuJobRel1);
        workMenuJobRel1.setId(null);
        workMenuJobRel1.setWorkFlowId(Long.valueOf(newJobNodeInfo.getId()));
        workMenuJobRel1.setWorkType(0);
        workMenuJobRelMapper.insert(workMenuJobRel1);

        //再保存对应的节点的配置数据
        JobNodeInfo jobNodeInfo2 = copyJobNodeInfoVO.getJobNodeInfo();
        jobNodeInfo2.setName(copyJobNodeInfoVO.getName());
        saveJobNodeConfig(jobNodeInfo2);

    }

    @Override
    public Long concurrentNum(String jobNodeId) {
        return jobNodeInfoMapper.getConcurrentNum(jobNodeId);
    }

    @Override
    public List<String> filePaths(String jobNodeId){
        List<String> tmpLists = new ArrayList<>();
        //ftp的文件名
        if(StringUtils.isEmpty(jobNodeId)){
            tmpLists.add("get ftp file error:jobNodeId is null!");
            return tmpLists;
        }
        //获取到ftp的连接
        //查询其下配置的节点的配置信息
        List<JobNodeConf> jobNodeConfs = jobNodeConfMapper.selectList(new QueryWrapper<JobNodeConf>()
                .eq("job_type", 0)
                .eq("job_node_id", jobNodeId));
        if(CollectionUtil.isEmpty(jobNodeConfs)){
            tmpLists.add("get ftp file error:jobNodeConfs is null!");
            return tmpLists;
        }
        //得到路径下的所有文件名
        JobNodeConf connect = jobNodeConfs.stream().filter(o -> Objects.equals(o.getKey(), "input_connect_id")).findAny().orElse(null);
        JobNodeConf ftpFileLocation = jobNodeConfs.stream().filter(o -> Objects.equals(o.getKey(), "ftpFileLocation")).findAny().orElse(null);
        JobNodeConf ftpFileType = jobNodeConfs.stream().filter(o -> Objects.equals(o.getKey(), "ftpFileType")).findAny().orElse(null);
        JobNodeConf ftpFileMatchType = jobNodeConfs.stream().filter(o -> Objects.equals(o.getKey(), "ftpFileMatchType")).findAny().orElse(null);
        JobNodeConf ftpFileName = jobNodeConfs.stream().filter(o -> Objects.equals(o.getKey(), "ftpFileName")).findAny().orElse(null);
        if(connect == null){
            tmpLists.add("get ftp file error:jobNodeConfs is null!");
            return tmpLists;
        }
        String value = connect.getValue();
        if(StringUtils.isEmpty(value)){
            tmpLists.add("get ftp file error:jobNodeConfs is null!");
            return tmpLists;
        }
        ConfConnect confConnect = confConnectService.getById(value);
        if(confConnect == null){
            tmpLists.add("get ftp file error:jobNodeConfs is null!");
            return tmpLists;
        }
        if(ftpFileLocation == null){
            tmpLists.add("get ftp file error:ftpFileLocation is null!");
            return tmpLists;
        }
        List<String> lists = new ArrayList<>();
        log.error("开始解析ftp文件！jobNodeId:{}",jobNodeId);
        if(Objects.equals("2",ftpFileMatchType.getValue()) || Objects.equals("3",ftpFileMatchType.getValue())){
            //判断表达式是否有填写
            if(ftpFileName == null || StringUtils.isEmpty(ftpFileName.getValue())){
                tmpLists.add("get ftp file error:ftpFileName is null!");
                return tmpLists;
            }
            String port = confConnect.getPort();
            //先获取window ftp
            WindowsFTP myFtp = new WindowsFTP();
            try {
                myFtp.connect(confConnect.getHost(), Integer.valueOf(port), confConnect.getUsername(), securityUtil.decrypt(confConnect.getPassword()));
                myFtp.initFileList(tmpLists,ftpFileLocation.getValue());
            }catch (Exception e){
                log.error("window ftp get file error:{}",e.getMessage());
                //如果失败，则用linux方式获取
                //再获取linux ftp
                FTPListAllFiles f = new FTPListAllFiles();
                try {
                    f.login(confConnect.getHost(), Integer.valueOf(port), confConnect.getUsername(), securityUtil.decrypt(confConnect.getPassword()));
                    f.lists(tmpLists,ftpFileLocation.getValue());
                }catch (Exception e2){
                    log.error("linux ftp get file error:{}",e2.getMessage());
                    e2.printStackTrace();
                    tmpLists.add("get ftp file error:"+e.getMessage());
                    return tmpLists;
                }finally {
                    f.disConnection();
                }
            }finally {
                myFtp.disconnect();
            }

            //先去掉.ok的文件
            log.error("连接ftp数据源获取到的ftp文件名！jobNodeId:{},ftpList:{},ftpFileLocation:{}",jobNodeId,tmpLists,ftpFileLocation.getValue());
            if(CollectionUtil.isNotEmpty(tmpLists)){
                List<String> allFiles = tmpLists.stream().filter(o -> !o.endsWith(".ok")).collect(Collectors.toList());
                if(CollectionUtil.isNotEmpty(allFiles)){
                    //获取文件的格式
                    String type = (ftpFileType == null && StringUtils.isEmpty(ftpFileType.getValue())) ? ".json" : "."+ftpFileType.getValue();
                    List<String> tmpAllLists = allFiles.stream().filter(o -> o.endsWith(type)).collect(Collectors.toList());
                    //按照规则匹配文件名
                    if(CollectionUtil.isNotEmpty(tmpAllLists)){
                        String regexOrTime = ftpFileName.getValue();
                        if(Objects.equals("2",ftpFileMatchType.getValue())){
                            //正则表达式
                            Pattern p = Pattern.compile(regexOrTime);
                            for (String allFile : tmpAllLists) {
                                Matcher m = p.matcher(allFile);
                                if (m.matches()) {
                                    lists.add(ftpFileLocation.getValue() + allFile);
                                }
                            }
                        }else{
                            //动态时间
                            String fileNameTmp = listRunSysparams(regexOrTime, jobNodeId);
                            List<String> collect = tmpAllLists.stream().filter(o -> o.contains(fileNameTmp)).collect(Collectors.toList());
                            if(CollectionUtil.isNotEmpty(collect)){
                                for (String s : collect) {
                                    lists.add(ftpFileLocation.getValue() + s);
                                }
                            }
                        }
                    }
                }
            }
        }else{
            String data = ftpFileLocation.getValue();
            String fileName = null;
            if(ftpFileName != null){
                fileName = ftpFileName.getValue();
            }
            if(StringUtils.isNotEmpty(data)){
                if(StringUtils.isNotEmpty(fileName)){
                    data = data + fileName;
                }
                String type = (ftpFileType == null && StringUtils.isEmpty(ftpFileType.getValue())) ? ".json" : "."+ftpFileType.getValue();
                if(data.endsWith(type)){
                    lists.add(data);
                }else{
                    lists.add(data + type);
                }
            }else{
                lists.add("get ftp file error:ftpFileLocation is null!");
            }
        }
        return lists;
    }

    private void closeSsh(ExecuteShellUtil instance) {
        if(null != instance){
            try {
                instance.close();
            }catch (Exception e){
                log.error("关闭远程服务器ssh连接异常!");
            }
        }
    }

    private String warpShell(String replaceShell) {
        //特殊标识
        String endShell = "echo \"`date +'%Y-%m-%d %H:%M:%S'` [INFO] "+DataSyncTaskConstant.SHELL_FLAGE_END+"\"";
        return "echo \"start.\"\n" + replaceShell + "\n" + endShell;
    }

    private String repalceAllSh(String sql, Map<String, String> sysParams, Long depid, Integer jobid, String taskid) {

        //替换对应的sql语句
        if(MapUtil.isNotEmpty(sysParams)){
            for(Map.Entry<String,String> entery : sysParams.entrySet()){
                String key = entery.getKey();
                String value = entery.getValue();
                String repallStr = "${" + key + "}";
                if(sql.contains(repallStr)){
                    String res = wapper(value,depid,jobid,taskid);
                    sql = sql.replace(repallStr,res);
                }
            }
        }

        SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
        SimpleDateFormat sdf1 = new SimpleDateFormat("yyyyMMdd");
        Date date = new Date();
        long data = date.getTime() - 24 * 60 * 60 * 1000L;
        sql = sql.replace("${bdp.system.cyctime}",sdf.format(date));
        sql = sql.replace("${bdp.system.bizdate}",sdf1.format(new Date(data)));

        //查询到系统动态数据时间的替换
        if(sql.contains("${bdp.data.start.time}") || sql.contains("${bdp.data.end.time}")){
            JobRunHistory jobRunHistory = jobRunHistoryMapper.selctListByJobIdOne(jobid);
            if(null == jobRunHistory){
                log.error("替换数据动态参数异常！未查询到对应的配置数据。jobid：" + jobid);
            }else{
                sql = sql.replace("${bdp.data.start.time}",jobRunHistory.getCronStartTime());
                sql = sql.replace("${bdp.data.end.time}",jobRunHistory.getCronEndTime());
            }
        }

        //资源文件的替换
        sql = repaceAllRescores(sql);

        //先替换表达式${yyyy-?} 或者${yyyy}
        return ParseDateTime.replaceDateTime(sql, DateUtil.now());
    }

    private String repaceAllRescores(String sql) {
        String reg = "\\$\\{bdp.hdfs.file.[0-9]*\\}";
        Pattern pattern = Pattern.compile(reg);
        Matcher matcher = pattern.matcher(sql);
        while(matcher.find()){
            String e = matcher.group();
            String resId = e.substring(16, e.length()-1);
            RealtimeResourceEntity realtimeResourceEntity = realtimeResourceService.getById(resId);
            if(null != realtimeResourceEntity){
                sql = sql.replace(e,dataDevelopmentConfig.getResPath() + "/development/realtimeresource/download?resourceId=" + realtimeResourceEntity.getResourceId());
            }
        }
        return sql;
    }

    private String wapper(String value, Long depid, Integer jobid, String taskid) {
        //是否是内置的数据
        switch (value){
            case "$cyctime" : {
                SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
                return sdf.format(new Date());
            }
            case "$gmtdate" : {
                SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
                return sdf.format(new Date());
            }
            case "$bizdate" : {
                SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
                long data = new Date().getTime() - 24 * 60 * 60 * 1000L;
                return sdf.format(new Date(data));
            }
            case "$jobid" : {
                return jobid.toString();
            }
            case "$taskid" : {
                return taskid;
            }
            case "$depid" : {
                return  null == depid ? "" : depid.toString();
            }
            case "${bdp.system.cyctime}" : {
                SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
                return sdf.format(new Date());
            }
            case "${bdp.system.bizdate}" : {
                SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
                long data = new Date().getTime() - 24 * 60 * 60 * 1000L;
                return sdf.format(new Date(data));
            }
            default: {
                //先替换表达式${yyyy-?} 或者${yyyy}
                return ParseDateTime.replaceDateTime(value, DateUtil.now());
            }
        }
    }

    private void ananlySysParam(Map<String,String> sysParams, JobNodeConf workflowConf) {
        if(null != workflowConf){
            JSONArray jsonArray = JSONArray.parseArray(workflowConf.getValue());
            if(CollectionUtil.isNotEmpty(jsonArray)){
                for (Object o : jsonArray) {
                    if(null != o){
                        JSONObject jsonObject1 = JSONObject.parseObject(o.toString());
                        if(null != jsonObject1){
                            Map<String, Object> innerMap = jsonObject1.getInnerMap();
                            Set<String> strings = innerMap.keySet();
                            for (String s : strings) {
                                sysParams.put(s,innerMap.get(s).toString());
                            }
                        }
                    }
                }
            }
        }
    }

    private void ananlyNodeParam(Map<String,String> sysParams, JobNodeConf jobNodeConf) {
        if(null != jobNodeConf){
            String nodeParam = jobNodeConf.getValue();
            JSONArray jsonArray = JSONArray.parseArray(nodeParam);
            if(CollectionUtil.isNotEmpty(jsonArray)){
                for (Object o : jsonArray) {
                    if(null != o){
                        JSONObject jsonObject1 = JSONObject.parseObject(o.toString());
                        if(null != jsonObject1){
                            sysParams.put(jsonObject1.getString("key"),jsonObject1.getString("value"));
                        }
                    }
                }
            }
        }
    }

    private String pingJobLayShellData(Integer procId,Integer jobId,JobConfUtil jobConfUtil, OozieConfig oozieConfig) {
        //调用对应的job任务，让其运行
        //睡眠...等待对应的job运行完成
        //job运行完成则放行
        StringBuilder paramCmd = new StringBuilder(String.format("if [ ! -f 'data-development-job-1.0.jar' ];then\n" +
                "\thdfs dfs -get %s/data-development-job-1.0.jar .  || exit 10\n" +
                "fi\n" +
                "if [ ! -f 'joda-time-2.8.1.jar' ];then\n" +
                "\thdfs dfs -get %s/joda-time-2.8.1.jar .  || exit 10\n" +
                "fi\n", bdpJobConfig.getJoblib(), bdpJobConfig.getJoblib()));

        //执行程序的代码
        paramCmd.append("jobId=").append(jobId).append("\n")
                .append("procId=").append(procId).append("\n")
                .append("flag1=").append(true).append("\n")
                .append("depOOzieId=$1").append("\n")
                .append("while ${flag1}\ndo\n\tfor((i=1;i<=1000;i++));\n\tdo\n\tsleep 1\n\t")
                .append("oozieId=`curl --connect-timeout 10 -X POST ").append(oozieConfig.getRest_url()).append("/syncRunWorkJob/${jobId}/${depOOzieId}`\n\t")
                .append("echo ${oozieId}\n\t")
                .append("if [[ ! -n \"${oozieId}\" ]]; then\n\t\t")
                .append("echo 'oozieId is null'\n\telse\n\t\tflag1=false\n\t\ti=1001\n\tfi\n\tdone\n\tflag1=false\ndone\nif [[ ${oozieId} == '-1' ]]; then\n\texit 10\nfi\n")
                .append("runningStatus='running'\nflag=true\nwhile ${flag}\ndo\n\tsleep 1\n\t")
                .append("result=`curl --connect-timeout ").append(dataDevelopmentConfig.getCurlconnectTime()).append(" --max-time ").append(dataDevelopmentConfig.getCurlMaxTime()).append(" -X GET ").append(oozieConfig.getRest_url()).append("/getJobStatus/${oozieId}/${procId}/${jobId}`\n\t")
                .append("echo ${result}\n\t")
                .append("if [[ ${result} == '-1' ]]; then\n\t\texit 10\n\tfi\n\tif [[ ${runningStatus} != ${result} ]]; then\n\t\tif [[ ${result} == 'killed' || ${result} == 'failed' ]]; then\n\t\t\texit 10\n\t\tfi\n\t\tflag=false\n\tfi\ndone");
        return paramCmd.toString();
    }

    @Override
    public void deleteShell(String dst) throws IOException {

        HdfsUtil hdfsUtil = null;
        try {
            hdfsUtil = new HdfsUtil(bdpJobConfig);
        } catch (Exception e) {
            e.printStackTrace();
        }
        hdfsUtil.delete(dst);
        if(null != hdfsUtil){
            hdfsUtil.close();
        }
    }

    @Override
    public JobNodeConfig checkResourceConfig(JobNodeConfig jobNodeConfig) {
        if (null==jobNodeConfig.getResource_ec()||"".equals(jobNodeConfig.getResource_ec())){
            jobNodeConfig.setResource_ec(sparkConfig.getExecutorCores());
        }else{
            if(!RegexUtils.checkInt(jobNodeConfig.getResource_ec())){
                throw new RRException("请输入正整数");
            }
        }
        if (null==jobNodeConfig.getResource_em()||"".equals(jobNodeConfig.getResource_em())){
            jobNodeConfig.setResource_em(sparkConfig.getExecutorMemory());

        }else{
            if(!RegexUtils.checkInt(jobNodeConfig.getResource_em())){
                throw new RRException("请输入正整数");
            }
        }
        if (null==jobNodeConfig.getResource_dc()||"".equals(jobNodeConfig.getResource_dc())){
            jobNodeConfig.setResource_dc(sparkConfig.getDriverCores());

        }else{
            if(!RegexUtils.checkInt(jobNodeConfig.getResource_dc())){
                throw new RRException("请输入正整数");
            }
        }
        if (null==jobNodeConfig.getResource_dm()||"".equals(jobNodeConfig.getResource_dm())){
            jobNodeConfig.setResource_dm(sparkConfig.getDriverMemory());

        }else{
            if(!RegexUtils.checkInt(jobNodeConfig.getResource_dm())){
                throw new RRException("请输入正整数");
            }
        }
//        if (null==jobNodeConfig.getHight_thread()||"".equals(jobNodeConfig.getHight_thread())){
//            jobNodeConfig.setHight_thread(sparkConfig.getNumExecutors());
//        }else {
//            if(!RegexUtils.checkInt(jobNodeConfig.getHight_thread())){
//                throw new RRException("请输入正整数");
//            }
//        }
        return jobNodeConfig;
    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public void saveJobNodeConfig(JobNodeInfo param) throws Exception {
        if(null == param){
            throw new RRException("输入参数不存在！");
        }
        if(null == param.getId()){
            throw new RRException("节点数据不存在！");
        }
        JobNodeInfo jobNodeInfo = jobNodeInfoService.getById(param.getId());
        if (jobNodeInfo==null){
            throw new RRException("该任务节点不存在");
        }
        param.setTypeId(jobNodeInfo.getTypeId());
        param.setJobId(jobNodeInfo.getJobId());
        if(null == param.getProcId()){
            throw new RRException("工程id不存在！");
        }
        if(null == param.getTypeId()){
            throw new RRException("任务类型必须得传！");
        }

        //验证是否有权限
        iWorkMenuService.cheackProcId(param.getProcId(),ShiroUtils.getUserId(), true,jobNodeInfo.getCreatePer(),"admin",param.getIsImport());


        //验证是否可以修改这个节点
        cheackJobNodeStatus(param.getId());

        //sql 语法
        JobNodeConfig jobNodeConfig = param.getJobNodeConfig();
//        if (StringUtils.isNotEmpty(jobNodeConfig.getSql_statment())) {
//            String res = SqlParseUtil.verifySql(jobNodeConfig.getSql_statment(), JdbcConstants.HIVE);
//            if (StringUtils.isNotEmpty(res)) {
//                throw new RRException("sql不合法:"+res);
//            }
//        }
//        cheackExpress(jobNodeConfig);
        cheackRunParams(jobNodeConfig);
        //清理数据
        clearOthersData(param.getJobNodeLayConfigDtoList());

        //验证对应的节点配置信息是否正确,sap数据源接入不需要验证input_input_content
        checkJobConfigInfo(jobNodeConfig, param.getTypeId());

        //验证spark 的 resource配置，未配置则是默认值
        if (param.getTypeId().equals(JobType.INPUT.getCode())||param.getTypeId().equals(JobType.OUTPUT.getCode())){
            jobNodeConfig = checkResourceConfig(jobNodeConfig);
        }
        if (param.getTypeId().equals(JobType.ALGORITHM.getCode())) {
            //验证算法任务是否有所需要的x字段和主键字段;
//            dataAssetsModelService.pdTableExistsXfieldAndPrimaryKey(jobNodeConfig.getInputPreTableId(), jobNodeConfig.getModelId());
            List<Map<String, Object>> pre_result_infos = jobNodeConfig.getPreResultInfos();
//            String filedName="";
//            for(Map<String,String> filedMap:pre_result_infos){
//                filedName = new ArrayList<>(filedMap.values()).get(0);
//               break;
//            }
//            dataAssetsModelService.checkIsPrimaryKey(jobNodeConfig.getOutputPreTableId(), filedName);
            dataAssetsModelService.checkSameFiled(pre_result_infos);
//            dataAssetsModelService.checkMappperResultFiledNum(jobNodeConfig.getOutputPreTableId(),pre_result_infos);

        }
        //验证sap的表达式配置
        net.sf.json.JSONObject jsonObject = new net.sf.json.JSONObject();
        jsonObject =cheackSapConfig(jobNodeConfig,jsonObject);

        JobNodeConfigDto jobNodeConfigDto = new JobNodeConfigDto();
        BeanUtil.copyProperties(jobNodeConfig,jobNodeConfigDto);
        /***spa 字段转换配置*/
        if( null != jobNodeConfig.getInput_connect_type() &&
                (ConnectTypeEnum.Sap.getCode().equals(jobNodeConfig.getInput_connect_type())
                        ||ConnectTypeEnum.WebService.getCode().equals(jobNodeConfig.getInput_connect_type()))
                && jsonObject != null){
            if(ConnectTypeEnum.WebService.getCode().equals(jobNodeConfig.getInput_connect_type()) && Objects.equals(jobNodeConfig.getOlderRequestType(),3)){
                jobNodeConfigDto.setReqType(jobNodeConfig.getReqType());
                jobNodeConfigDto.setWebserviceText(jobNodeConfig.getWebserviceText());
                jobNodeConfigDto.setResType(jobNodeConfig.getResType());
                jobNodeConfigDto.setAnaTableNameUrl(jobNodeConfig.getAnaTableNameUrl());
            }else{
                jobNodeConfigDto.setSapConnectFieldInfos(jsonObject.toString());
            }
            param.setJobNodeConfig(jobNodeConfig);
        }
        /***es 的配置验证*/
        if(param.getTypeId().equals(JobType.INPUT.getCode()) && null != jobNodeConfig.getInput_connect_type() &&
                (ConnectTypeEnum.ElasticSearch.getCode().equals(jobNodeConfig.getInput_connect_type()))){
            cheackEsConf(jobNodeConfig.getEsSqlCondition());
        }
        /***ftp 的配置验证*/
        if(param.getTypeId().equals(JobType.INPUT.getCode()) && null != jobNodeConfig.getInput_connect_type() &&
                (ConnectTypeEnum.FTP.getCode().equals(jobNodeConfig.getInput_connect_type()))){
            cheackFtpMatchType(jobNodeConfig);
        }
        /**接口字段转换字段配置*/
        if( null != jobNodeConfig.getInput_connect_type() &&
                (ConnectTypeEnum.JieKou.getCode().equals(jobNodeConfig.getInput_connect_type()))){
            jsonObject = net.sf.json.JSONObject.fromObject(jobNodeConfig.getSapConnectFieldInfo());
            jobNodeConfigDto.setSapConnectFieldInfos(jsonObject.toString());
            param.setJobNodeConfig(jobNodeConfig);
        }
        /**算法开发字段转换字段配置*/
        if( null != jobNodeConfig.getPreResultInfos() &&
                Objects.equals(param.getTypeId(), JobType.ALGORITHM.getCode())){
            jobNodeConfigDto.setPreResultInfos(JSON.toJSONString(jobNodeConfig.getPreResultInfos()));
            param.setJobNodeConfig(jobNodeConfig);
        }

        //验证节点是否含有drop语句并且可以drop表
        cheackDropHiveTables(param.getJobNodeConfig(), param.getTypeId());

        //如果当前节点的输出表在其下游节点中有配置依赖条件,则不能修改这个输出表设置
        cheackCurrentJobNodeCondition(param);

        //保存对应的依赖配置或者修改依赖配置---暂时对连线不构成影响（只做配置）
        //changeJobNodeLinkAndConfig(param.getJobNodeLayConfigDtoList(),nodeInfo);
        List<JobNodeLayConfig> olderJobNodeLayConfigs = jobNodeLayConfigMapper.selectList(new QueryWrapper<JobNodeLayConfig>()
                .eq("current_node_id", jobNodeInfo.getId())
                .eq("job_id", jobNodeInfo.getJobId()));
        if(JobType.SQL.getCode() == param.getTypeId()
                || JobType.OUTPUT.getCode() == param.getTypeId()
        || JobType.SHELL.getCode() == param.getTypeId()){
            if(CollectionUtil.isNotEmpty(olderJobNodeLayConfigs)){
                saveNodeConfig(param.getJobNodeLayConfigDtoList(),jobNodeInfo.getId(),jobNodeInfo.getJobId(),jobNodeInfo.getName());
            }
        }

        //保存或者更新job节点信息
        jobNodeInfoService.saveOrUpdateJobNodeNew(param);
        JobNodeInfo nodeInfo = jobNodeInfoService.getById(param.getId());
        if(!Objects.equals(param.getTypeId(), JobType.END.getCode())){
            jobNodeConfigDto.setJobNodeId(nodeInfo.getId());
            jobNodeConfig.setJobNodeId(nodeInfo.getId());
            jobNodeConfig.setJobType(0);
            jobNodeConfigDto.setJobType(0);
            if(param.getTypeId().equals( JobType.INPUT.getCode())){
                 /**接口保存字段配置保存文件*/
                if(null != param.getJobNodeConfig().getInput_connect_id()
                        && (ConnectTypeEnum.JieKou.getCode().equals(param.getJobNodeConfig().getInput_connect_type()))){
                    /**先删除之前 sapConnectFieldInfos 的配置  随后在添加*/
                    jobNodeConfService.remove(new QueryWrapper<JobNodeConf>().eq("job_node_id", param.getId()).eq("job_type",0).eq("`key`","sapConnectFieldInfos"));
                    jobNodeConfService.upsertKVByJobNodeId(JobNodeConfig.jobNodeConfigDtoTolist(jobNodeConfigDto));
                }
                if(null != param.getJobNodeConfig().getInput_connect_id()
                        && !ConnectTypeEnum.Sap.getCode().equals(param.getJobNodeConfig().getInput_connect_type())
                        && !ConnectTypeEnum.WebService.getCode().equals(param.getJobNodeConfig().getInput_connect_type())){
                    //jobNodeConfService.remove(new QueryWrapper<JobNodeConf>().eq("job_node_id", param.getId()).eq("job_type",0));
                    jobNodeConfService.upsertKVByJobNodeId(JobNodeConfig.jobNodeConfigTolist(jobNodeConfig));
                }else if(null != param.getJobNodeConfig().getInput_connect_id()
                        && (ConnectTypeEnum.Sap.getCode().equals(param.getJobNodeConfig().getInput_connect_type())
                        ||ConnectTypeEnum.WebService.getCode().equals(param.getJobNodeConfig().getInput_connect_type()))){
                    jobNodeConfService.upsertKVByJobNodeId(JobNodeConfig.jobNodeConfigDtoTolist(jobNodeConfigDto));
                }
            }else if(Objects.equals(param.getTypeId(), JobType.ALGORITHM.getCode())){
                //算法开发任务的时候需要绑定模型任务表;
                BdpModelTask bdpModelTask = new BdpModelTask();
                bdpModelTask.setModelId(jobNodeConfig.getModelId());
                bdpModelTask.setTaskId(jobNodeConfig.getJobNodeId());
                jobNodeConfService.saveOrUpdateModelTask(bdpModelTask);
                jobNodeConfService.upsertKVByJobNodeId(JobNodeConfig.jobNodeConfigDtoTolist(jobNodeConfigDto));
            }else {
                jobNodeConfService.upsertKVByJobNodeId(JobNodeConfig.jobNodeConfigTolist(jobNodeConfig));
            }
        }

        //查询到对应的job
        JobInfo jobInfo = jobInfoMapper.selectById(nodeInfo.getJobId());
        if(null == jobInfo){
            throw new RRException("job工作流不存在！jobId:"+nodeInfo.getJobId());
        }
        List<WorkMenuJobRel> workMenuJobRels = workMenuJobRelMapper.selectList(new QueryWrapper<WorkMenuJobRel>().eq("work_flow_id", jobInfo.getId()).eq("work_type", 1));
        if(CollectionUtil.isNotEmpty(workMenuJobRels)){
            //查询这个任务对应是否有配置系统参数
            if(null != param.getJobNodeConfig()){
                inintSysParam(workMenuJobRels.get(0).getWorkMenuId().intValue(),param);
            }
        }

        // 将shell保存至hdfs
        Integer jobId = nodeInfo.getJobId();
        Integer nodeId = nodeInfo.getId();
        String nodeName = JobType.ObjOf(param.getTypeId()).getEnName();
        String dst =  "workflow-app/" + WorkFlowType.WORKFLOW.getCode() + "/"+jobInfo.getProcId() + "/" + jobId.toString() + "/" + nodeName +"_"+  nodeId + ".sh";
        param.setJobId(jobId);
        jobNodeInfoService.uploadShell(param,dst, oozieConfig);

        //重新生成对应的workFlow虚拟对应的路由节点
        if(CollectionUtil.isNotEmpty(param.getJobNodeLayConfigDtoList())){
            for (JobNodeLayConfigDto jobNodeLayConfigDto : param.getJobNodeLayConfigDtoList()) {
                List<JobNodeLayConfig> filterOne = olderJobNodeLayConfigs.stream().filter(olderJobNodeLayConfig -> Objects.equals(olderJobNodeLayConfig.getId(), jobNodeLayConfigDto.getId())).collect(Collectors.toList());
                if(CollectionUtil.isEmpty(filterOne)){
                    throw new RRException("配置的依赖条件有误!");
                }
            }
            //如果有修改，则重新生成workFlow.xml
            iWorkMenuService.updateRouteWorkFlowXml(jobNodeInfo.getJobId());
        }
    }

    private void cheackFtpMatchType(JobNodeConfig jobNodeConfig) {
        if(jobNodeConfig == null){
            throw new RRException("ftp参数配置为空！");
        }
        if(StringUtils.isEmpty(jobNodeConfig.getFtpFileLocation())){
            throw new RRException("ftp文件路径未配置！");
        }
        //验证对应的ftp配置参数
        if(Objects.equals(jobNodeConfig.getFtpFileMatchType(),2)
                || Objects.equals(jobNodeConfig.getFtpFileMatchType(),3)){
            if(StringUtils.isEmpty(jobNodeConfig.getFtpFileName())){
                throw new RRException("ftp文件名规则未配置！");
            }
        }
    }

    private void cheackEsConf(String input_input_content) {
        if(StringUtils.isNotEmpty(input_input_content)){
            input_input_content = input_input_content.replace("\t"," ");
            try {
                JSONObject.parseObject(input_input_content);
            }catch (Exception e){
                log.error("json解析异常！error:{}",e.getMessage());
                throw new RRException("es的检索条件必须是json格式");
            }
        }
    }

    private void clearOthersData(List<JobNodeLayConfigDto> jobNodeLayConfigDtoList) {
        if(CollectionUtil.isNotEmpty(jobNodeLayConfigDtoList)){
            for (JobNodeLayConfigDto jobNodeLayConfigDto : jobNodeLayConfigDtoList) {
                if(Objects.equals(0,jobNodeLayConfigDto.getConfigType())){
                    jobNodeLayConfigDto.setOutputTableId(null);
                }
            }
        }
    }

    private void cheackRunParams(JobNodeConfig jobNodeConfig) {
        if(null != jobNodeConfig){
            List<JSONObject> nodeParam = jobNodeConfig.getNodeParam();
            if(CollectionUtil.isNotEmpty(nodeParam)){
                Set<String> keys = new HashSet<>();
                for (JSONObject jsonObject : nodeParam) {
                    keys.add(jsonObject.getString("key"));
                }
                if(keys.size() != nodeParam.size()){
                    throw new RRException("存在重名的运行参数,请修改!");
                }
            }
        }
    }

    private void inintSysParam(Integer nodeId, JobNodeInfo param) {
        List<JobNodeConf> jobNodeConfs = jobNodeConfMapper.selectList(new QueryWrapper<JobNodeConf>()
                .eq("job_type", 1)
                .eq("job_node_id", nodeId)
                .eq("`key`", "sysParam"));
        if(CollectionUtil.isNotEmpty(jobNodeConfs)){
            for (JobNodeConf jobNodeConf : jobNodeConfs) {
                //得到这个节点的数据结构，并且并且重新赋值
                if ("sysParam".equals(jobNodeConf.getKey())) {
                    if (null != jobNodeConf.getValue()) {
                        JSONArray jsonArray = JSONArray.parseArray(jobNodeConf.getValue());
                        if(CollectionUtil.isNotEmpty(jsonArray)){
                            List<Map<String,Object>> llDatas = new ArrayList<>();
                            for (Object o : jsonArray) {
                                if(null != o){
                                    Map<String,Object> data = JSONObject.parseObject(o.toString()).getInnerMap();
                                    llDatas.add(data);
                                }
                            }
                            if(null != param.getJobNodeConfig()){
                                param.getJobNodeConfig().setSysParam(llDatas);
                            }
                        }
                    }
                }
            }
        }
    }

    /***
     * 验证节点是否有drop语句,并且验证是否有关联的仓库
     * @param jobNodeConfig
     * @param typeId
     */
    private Set<Integer> cheackDropHiveTables(JobNodeConfig jobNodeConfig, Integer typeId) {
        Set<Integer> tableIds = new HashSet<>();
        String querySql = "";
        if(Objects.equals(JobType.SQL.getCode(),typeId)){
            //sql计算
            if(null == jobNodeConfig || StringUtils.isEmpty(jobNodeConfig.getSql_statment())){
                throw new RRException("sql语句不存在!");
            }
            querySql = jobNodeConfig.getSql_statment();
        }
        if(Objects.equals(JobType.OUTPUT.getCode(),typeId)){
            //数据表输出
            if(null == jobNodeConfig || StringUtils.isEmpty(jobNodeConfig.getInput_input_content())){
                throw new RRException("sql语句不存在!");
            }
            querySql = jobNodeConfig.getInput_input_content();
        }
        Set<String> dropTableList = hiveSqlParse(querySql);
        if(CollectionUtil.isNotEmpty(dropTableList)){
            //查询到到任务关联的所有的数据
            Set<Integer> allSetList = listAllOutPutTables();
            cheackTables(dropTableList,tableIds,allSetList);
        }
        return tableIds;
    }

    private Set<Integer> listAllOutPutTables() {
        Set<Integer> allSetList = new HashSet<>();
        //sap对应的输出的仓库表
        List<JobNodeConf> list1 = jobNodeConfService.list(new QueryWrapper<JobNodeConf>().eq("`key`", "sap_output_table_name").ne("`value`","[]").ne("`value`","[[]]").ne("`value`","[[[]]]"));
        if(CollectionUtil.isNotEmpty(list1)){
            JobNodeConfigDto jobNodeConfigDto = jobNodeConfService.getJobNodeConf(list1);
            List<String> sap_output_table_name_older = jobNodeConfigDto.getSap_output_table_name();
            if(null != sap_output_table_name_older && sap_output_table_name_older.size() != 0){
                for (String sapName : sap_output_table_name_older) {
                    if(!Objects.equals("[]",sapName) && StringUtils.isNotEmpty(sapName)){
                        String sapNameSub = sapName.substring(1, sapName.length() - 1);
                        String[] sapArray = sapNameSub.split(",");
                        List<String> sapList = Arrays.asList(sapArray);
                        for (String s : sapList) {
                            if (!s.trim().equals("")) {
                                String[] split = s.trim().split("\\.");
                                if(null != split && split.length >= 2){
                                    TableInfo tableInfo = tableInfoService.getOne(new QueryWrapper<TableInfo>().eq("db_name", split[0]).eq("table_name", split[1]));
                                    if(null != tableInfo){
                                        allSetList.add(tableInfo.getId());
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
        //仓库表设置
        List<TableInfo> list2 = jobNodeConfMapper.selectByDbNameAndTableName();
        if(CollectionUtil.isNotEmpty(list2)){
            for (TableInfo tableInfo : list2) {
                TableInfo tableInfo1 = tableInfoService.getOne(new QueryWrapper<TableInfo>().eq("db_name", tableInfo.getDbName()).eq("table_name", tableInfo.getTableName()));
                if(null != tableInfo1){
                    allSetList.add(tableInfo1.getId());
                }
            }
        }
        //设置的输出表
        List<JobNodeConf> jobOutPutTableIds = jobNodeConfService.list(new QueryWrapper<JobNodeConf>().eq("`key`", "jobOutPutTableIds").ne("`value`","[]"));
        if(CollectionUtil.isNotEmpty(jobOutPutTableIds)){
            for (JobNodeConf jobOutPutTable : jobOutPutTableIds) {
                if(null != jobOutPutTable && StringUtils.isNotEmpty(jobOutPutTable.getValue())){
                    JSONArray jsonArray = JSONArray.parseArray(jobOutPutTable.getValue());
                    if(CollectionUtil.isNotEmpty(jsonArray)){
                        for (Object id : jsonArray) {
                            if(null != id && !Objects.equals("[]",id.toString().trim())){
                                allSetList.add(Integer.valueOf(id.toString().trim()));
                            }
                        }
                    }
                }
            }
        }
        return allSetList;
    }

    private void cheackTables(Set<String> dropTableList,Set<Integer> tableIds,Set<Integer> allSetList) {
        if(CollectionUtil.isNotEmpty(dropTableList)){
            for (String s : dropTableList) {
                String[] split = s.split("\\.");
                if(split.length <2){
                    throw new RRException("sql语句中操作的表，请使用库名.表名!");
                }
                TableInfo tableInfo = tableInfoService.getOne(new QueryWrapper<TableInfo>().eq("db_name", split[0]).eq("table_name", split[1]));
                if(null == tableInfo){
                    throw new RRException("sql语句中操作的仓库表不存在!"+split[0] + "." + split[1]);
                }
                //验证表是否被任务引用
                if(allSetList.contains(tableInfo.getId())){
                    throw new RRException(DataDevelopmentBizExceptionEnum.TABLE_DEPEND_PROC.getMessage());
                }
                tableIds.add(tableInfo.getId());
            }
        }
    }


    private Set<String> hiveSqlParse(String input_input_content) {
        Set<String> dropTableList = new HashSet<>();
        if(StringUtils.isNotEmpty(input_input_content)){
            //截取分号
            String[] split = input_input_content.split(";");
            if(null != split && split.length > 0){
                for (String s : split) {
                    HiveLineageInfo lep = new HiveLineageInfo();
                    try {
                        lep.getLineageInfo(s);
                        if(CollectionUtil.isNotEmpty(lep.getDropTableList())){
                            dropTableList.addAll(lep.getDropTableList());
                        }
                    } catch (Exception | Error e) {
                        log.error("解析sql语句异常！error:{}",e.getMessage());
                        //判断是否含有drop table字段
                        if(StringUtils.isNotEmpty(s)){
                            if(s.trim().toLowerCase().contains("droptable")){
                                String s1 = s.replaceAll("droptable", "aaaaaaaaa");
                                String s2 = null;
                                try {
                                    s2 = lep.changeHiveSql(s1);
                                }catch (Exception e1){
                                    log.error("解析sql语句异常！error:{}",e1.getMessage());
                                }
                                if(StringUtils.isNotEmpty(s2)){
                                    if(s2.trim().toLowerCase().contains("droptable")){
                                        throw new RRException("sql语句中含有drop table的错误语法!");
                                    }
                                }
                            }
                        }
                    }
                }
            }
        }
        return dropTableList;
    }

    private void cheackCurrentJobNodeCondition(JobNodeInfo param) {
        if(null == param){
            return;
        }
        //查询当前节点配置的依赖下游节点
        List<JobNodeLayConfigDto> jobNodeLayConfigDtos = jobNodeLayConfigMapper.selectDtoListByUpJobNodeId(param.getId(), param.getJobId());
        if(CollectionUtil.isNotEmpty(jobNodeLayConfigDtos)){
            for (JobNodeLayConfigDto jobNodeLayConfigDto : jobNodeLayConfigDtos) {
                if(null != jobNodeLayConfigDto.getOutputTableId()){
                    cheackOuptJobNodeIsConfig(jobNodeLayConfigDto.getOutputTableId(),param.getId(),param.getJobNodeConfig());
                }
            }
        }
    }

    private void cheackOuptJobNodeIsConfig(Integer outputTableId, Integer jobNodeId, JobNodeConfig jobNodeConfig) {
        //验证输出表配置是否有修改，是否设置的下游依赖中配置的表当前参数中不存在
        JobNodeInfo jobNodeInfo = jobNodeInfoMapper.selectById(jobNodeId);
        TableInfo tableInfo = tableInfoService.getById(outputTableId);
        if(null == tableInfo){
            throw new RRException("下游配置的依赖条件对应的输出表不存在！");
        }
        if(null != jobNodeInfo){
            if(Objects.equals(1,jobNodeInfo.getTypeId())){
                //数据同步
                if(null != jobNodeConfig){
                    if(ConnectTypeEnum.Sap.getCode().toString().equals(jobNodeConfig.getInput_input_type())){
                        List<Integer> listOutTableIds = new ArrayList<>();
                        List<String> sap_output_table_name = jobNodeConfig.getSap_output_table_name();
                        if(CollectionUtil.isNotEmpty(sap_output_table_name)){
                            for (String s : sap_output_table_name) {
                                String[] split = s.split("\\.");
                                if(split.length>=2){
                                    TableInfo one = tableInfoService.getOne(new QueryWrapper<TableInfo>().eq("db_name", split[0]).eq("table_name", split[1]));
                                    if(null != one){
                                        listOutTableIds.add(one.getId());
                                    }
                                }
                            }
                        }
                        if(!listOutTableIds.contains(outputTableId)){
                            throw new RRException("输出表设置不能修改，其下游有配置对应的输出表："+tableInfo.getTableName()+"！");
                        }
                    }else {
                        //查询到对应的输出的仓库表
                        TableInfo newTableInfo = tableInfoService.getOne(new QueryWrapper<TableInfo>().eq("db_name", jobNodeConfig.getOutput_db_name()).eq("table_name", jobNodeConfig.getOutput_table_name()));
                        if(null == newTableInfo || !Objects.equals(outputTableId,newTableInfo.getId())){
                            throw new RRException("输出表设置不能修改，其下游有配置对应的输出表："+tableInfo.getTableName()+"！");
                        }
                    }
                }
            }

            if(Objects.equals(2,jobNodeInfo.getTypeId())){
                //sql计算
                List<String> listOutTableIds = new ArrayList<>();
                List<String> jobOutPutTableIds1 = jobNodeConfig.getJobOutPutTableIds();
                if(CollectionUtil.isNotEmpty(jobOutPutTableIds1)){
                    for (String s : jobOutPutTableIds1) {
                        listOutTableIds.add(s.trim());
                    }
                }
                if(!listOutTableIds.contains(outputTableId.toString())){
                    throw new RRException("输出表设置不能修改，其下游有配置对应的输出表："+tableInfo.getTableName()+"！");
                }
            }
        }
    }


    private void cheackJobNodeStatus(Integer jobNodeId) {
        //1.如果是引用了这个job的工作流依赖是启用的，则不能修改
        //2.如果是引用了这个job的工作流依赖是禁用的,工作流依赖或者工作流执行中，则不能修改
        //3.如果没有引用，则只需要验证工作流是否在执行中
        JobNodeInfo jobNodeInfo = jobNodeInfoMapper.selectById(jobNodeId);
        if(null == jobNodeId){
            throw new RRException("节点不存在！");
        }
        if(null == jobNodeInfo.getPosY() && null == jobNodeInfo.getPosX()){
            //没有工作流应用这个节点，可以修改
            return;
        }
        JobInfo jobInfo = jobInfoMapper.selectById(jobNodeInfo.getJobId());
        if(null == jobInfo){
            throw new RRException("节点对应的任务不存在！");
        }
        if(null != jobInfo.getLastRunState() && LastRunState.RUNNING.getCode() == jobInfo.getLastRunState()){
            throw new RRException("节点对应的任务正在运行中,请稍后重试！");
        }
        //引用了这个job的任务工作流依赖
        WorkMenuJobRel workMenuJobRel = workMenuJobRelMapper.selectOne(new QueryWrapper<WorkMenuJobRel>().eq("work_type", 2).eq("work_flow_id", jobInfo.getId()));
        if(null != workMenuJobRel){
            WorkFlowDep workFlowDep = workFlowDepMapper.selectOne(new QueryWrapper<WorkFlowDep>().eq("work_menu_id", workMenuJobRel.getWorkMenuId()));
            if(null != workFlowDep){
                if(Objects.equals(workFlowDep.getJobLayStatus(),1)){
                    throw new RRException("节点对应的作业流对应的作业流依赖已启用,不能修改节点配置信息！");
                }
//                if(null != workFlowDep.getLastRunState() && LastRunState.RUNNING.getCode() == workFlowDep.getLastRunState()){
//                    throw new RRException("节点对应的作业流对应的作业流依赖已启用,不能修改节点配置信息！");
//                }
            }
        }
    }

    /***
     * 保存依赖配置信息
     * @param jobNodeLayConfigDtoList 获取到的依赖配置信息
     * @param name
     */
    private void saveNodeConfig(List<JobNodeLayConfigDto> jobNodeLayConfigDtoList, Integer jobNodeId, Integer jobId, String name) {
        List<JobNodeLayConfig> jobNodeLayConfigs = jobNodeLayConfigMapper.selectList(new QueryWrapper<JobNodeLayConfig>()
                .eq("current_node_id", jobNodeId)
                .eq("job_id", jobId));
        Set<Integer> ids = new HashSet<>();
        if(CollectionUtil.isNotEmpty(jobNodeLayConfigs)){
            jobNodeLayConfigs.forEach(jobNodeLayConfig -> {
                ids.add(jobNodeLayConfig.getId());
            });
            if(CollectionUtil.isEmpty(jobNodeLayConfigDtoList)){
                throw new RRException("依赖配置参数未传递，请重新打开该任务，再进行保存操作！");
            }
            jobNodeLayConfigDtoList.forEach(jobNodeLayConfigDto -> {
                if(!ids.contains(jobNodeLayConfigDto.getId())){
                    throw new RRException("依赖配置参数不对，请重新打开该任务，再进行保存操作！");
                }
            });
            //特殊处理（如果有配置多个父节点路由到当前这个节点。oozie里的switch case不支持）
            if(jobNodeLayConfigDtoList.size() >= 2){
                for (JobNodeLayConfigDto jobNodeLayConfigDto : jobNodeLayConfigDtoList) {
                    if(null != jobNodeLayConfigDto.getOutputTableId()){
                        throw new RRException("多父节点路由,暂时不支持！");
                    }
                }
            }
            //错误的路由条件
            long count = jobNodeLayConfigDtoList.stream().filter(o -> (Objects.equals(o.getConfigType(), RouteTypeEnum.STATUS.getId()) && Objects.equals(o.getConditionValue(), "1"))).count();
            if(count > 0){
                //验证其下级节点
                List<JobNodeLayConfig> jobNodeLayConfigs11 = jobNodeLayConfigMapper.selectList(new QueryWrapper<JobNodeLayConfig>()
                        .eq("up_node_id", jobNodeId));
                if(CollectionUtil.isNotEmpty(jobNodeLayConfigs11)){
                    Set<Integer> ids1 = jobNodeLayConfigs11.stream().map(JobNodeLayConfig::getCurrentNodeId).collect(Collectors.toSet());
                    if(CollectionUtil.isNotEmpty(ids1)){
                        List<JobNodeInfo> jobNodeInfos = jobNodeInfoMapper.selectList(new QueryWrapper<JobNodeInfo>().in("id", ids1));
                        if(CollectionUtil.isNotEmpty(jobNodeInfos)){
                            StringBuilder stringBuilder = new StringBuilder();
                            stringBuilder.append("失败路由节点，暂时不支持有下游依赖节点");
                            for (JobNodeInfo jobNodeInfo : jobNodeInfos) {
                                stringBuilder.append(",").append(jobNodeInfo.getName());
                            }
                            throw new RRException(stringBuilder.toString());
                        }
                    }
                }

                //验证当前节点的上游除失败以外不能配置其它类型的路由方式（暂时不支持）
                Set<JobNodeLayConfigDto> otherErrList = new HashSet<>();
                if(CollectionUtil.isNotEmpty(jobNodeLayConfigDtoList)){
                    jobNodeLayConfigDtoList.forEach(o -> {
                        if(!Objects.equals(o.getConfigType(), RouteTypeEnum.STATUS.getId())
                                || (Objects.equals(o.getConfigType(), RouteTypeEnum.STATUS.getId()) && !Objects.equals(o.getConditionValue(), "1"))){
                            otherErrList.add(o);
                        }
                    });
                }
                if(CollectionUtil.isNotEmpty(otherErrList)){
                    throw new RRException("有配置失败路由,暂时不支持失败路由和其它路由方式(条件、成功)路由到当前节点：【"+name+"】,请改变连线或者修改路由条件判断！");
                }

                //验证错误路由的个数必须<=1
                Set<Integer> upNodes = jobNodeLayConfigs.stream().map(JobNodeLayConfig::getUpNodeId).collect(Collectors.toSet());
                List<JobNodeLayConfig> jobNodeLayConfigs1 = jobNodeLayConfigMapper.selectList(new QueryWrapper<JobNodeLayConfig>()
                        .in("up_node_id", upNodes));
                if(CollectionUtil.isNotEmpty(jobNodeLayConfigs1)){
                    Set<Integer> errorNodes = jobNodeLayConfigs1.stream().filter(o -> (Objects.equals(o.getConfigType(), RouteTypeEnum.STATUS.getId()) && Objects.equals(o.getConditionValue(), "1")))
                            .map(JobNodeLayConfig :: getCurrentNodeId).collect(Collectors.toSet());
                    if(CollectionUtil.isNotEmpty(errorNodes)){
                        errorNodes.remove(jobNodeId);
                        if(CollectionUtil.isNotEmpty(errorNodes)){
                            List<JobNodeInfo> jobNodeInfos = jobNodeInfoMapper.selectList(new QueryWrapper<JobNodeInfo>().in("id", errorNodes));
                            if(CollectionUtil.isNotEmpty(jobNodeInfos)){
                                StringBuilder stringBuilder = new StringBuilder();
                                stringBuilder.append("当前节点的父节点存在多个失败的路由条件，暂时不支持");
                                for (JobNodeInfo jobNodeInfo : jobNodeInfos) {
                                    stringBuilder.append(",").append(jobNodeInfo.getName());
                                }
                                throw new RRException(stringBuilder.toString());
                            }
                        }
                    }
                }
            }
        }else {
            if(CollectionUtil.isNotEmpty(jobNodeLayConfigDtoList)){
                throw new RRException("依赖配置参数不对，请重新打开该任务，再进行保存操作！");
            }
        }
        if(CollectionUtil.isNotEmpty(jobNodeLayConfigDtoList)){
            jobNodeLayConfigDtoList.forEach(o -> {
                o.setConditionNamme(RouteTypeEnum.ObjOf(o.getConfigType()).getNameDes());
            });
            //保存对应的配置信息
            jobNodeLayConfigMapper.batchUpdate(jobNodeLayConfigDtoList);
        }

    }

    /***
     * 保存对应的依赖配置
     * @param jobNodeLayConfigDtoList
     * @param nodeInfo
     */
    private void changeJobNodeLinkAndConfig(List<JobNodeLayConfigDto> jobNodeLayConfigDtoList, JobNodeInfo nodeInfo) {
        //先删除数据
        jobNodeLayConfigMapper.delete(new QueryWrapper<JobNodeLayConfig>()
                .eq("current_node_id",nodeInfo.getId())
                .eq("job_id",nodeInfo.getJobId()));
        //查询到对应的菜单和节点的关系表
        List<WorkMenuJobRel> workMenuJobRels = workMenuJobRelMapper.selectList(new QueryWrapper<WorkMenuJobRel>().eq("work_flow_id", nodeInfo.getId()).eq("work_type", 0));
        if(CollectionUtil.isEmpty(workMenuJobRels)){
            throw new RRException("节点对应的菜单不存在！id:" + nodeInfo.getId());
        }
        jobLinkMapper.delete(new QueryWrapper<JobLink>().eq("menu_id",workMenuJobRels.get(0).getWorkMenuId())
                .eq("linke_type",0).eq("dst_node_id",nodeInfo.getId()));
        if(CollectionUtil.isNotEmpty(jobNodeLayConfigDtoList)){
            //保存对应的数据（画布中没有节点，该如何显示）
            List<JobNodeLayConfig> jobNodeLayConfigs = PojoUtils.listConvert(JobNodeLayConfig.class, jobNodeLayConfigDtoList);
            jobNodeLayConfigs.forEach(jobNodeLayConfig -> {
                jobNodeLayConfig.setDutyUser(nodeInfo.getCreatePer());
            });
            jobNodeLayConfigMapper.batchInsert(jobNodeLayConfigs);
        }
    }

    @Override
    public void saveOrUpdateJobNodeNew(JobNodeInfo param) {
        if (null == param || null == param.getId()) {
            return;
        }
        //保存节点数据到数据库
        JobNodeInfo jobNode = jobNodeInfoService.getById(param.getId());
        if(null == jobNode){
            throw new RRException("任务节点不存在！");
        }
        jobNode.setModPer(ShiroUtils.getUserId().intValue());
        jobNode.setModTime(DateUtil.date());
        jobNode.setName(param.getName());
        jobNodeInfoService.updateById(jobNode);
    }

    @Override
    public List<JobNodeLayConfigDto> selectJobNodeLayList(JobNodeInfo jobNodeInfo) {
        List<JobNodeLayConfigDto> jobNodeLayConfigDtos = jobNodeLayConfigMapper.selectDtoListByJobNodeId(jobNodeInfo.getId(), jobNodeInfo.getJobId());
        if(CollectionUtil.isNotEmpty(jobNodeLayConfigDtos)){
            for (JobNodeLayConfigDto jobNodeLayConfigDto : jobNodeLayConfigDtos) {
                SysUserEntity sysUserEntity = sysUserMapper.selectById(jobNodeLayConfigDto.getDutyUser());
                if(null != sysUserEntity){
                    jobNodeLayConfigDto.setDutyUserName(sysUserEntity.getUsername());
                }
            }
        }
        return jobNodeLayConfigDtos;
    }

    @Override
    public List<JobNodeInfoDto> listAllNodeList(Set<Integer> nodeIds) {

        if(CollectionUtil.isEmpty(nodeIds)){
            return null;
        }
        List<JobNodeInfoDto> jobNodeInfoDtos = jobNodeInfoMapper.selectListByIds1(nodeIds);
        if(CollectionUtil.isEmpty(jobNodeInfoDtos)){
            return null;
        }
        //查询到对应的节点的配置信息
        for (JobNodeInfoDto jobNodeInfoDto : jobNodeInfoDtos) {
            List<JobNodeConf> jobNodeConfs = jobNodeConfMapper.selectList(new QueryWrapper<JobNodeConf>().eq("job_type", 0).eq("job_node_id", jobNodeInfoDto.getId()));
            jobNodeInfoDto.setWorkFlowNodeName(JobType.ObjOf(jobNodeInfoDto.getTypeId()).getEnName() + "_" +  jobNodeInfoDto.getId());
            if(CollectionUtil.isNotEmpty(jobNodeConfs)){
                jobNodeInfoDto.setJobNodeConfig(JobNodeConfig.listToJobNodeConfig(jobNodeConfs));
            }
            //查询对应的路由条件
            jobNodeInfoDto.setJobNodeLayConfigDtoList(jobNodeLayConfigMapper.selectDtoListByJobNodeId(jobNodeInfoDto.getId(),jobNodeInfoDto.getJobId()));
        }
        return jobNodeInfoDtos;
    }



    private net.sf.json.JSONObject cheackSapConfig(JobNodeConfig jobNodeConfig,net.sf.json.JSONObject jsonObject)throws Exception {
        if( null != jobNodeConfig.getInput_connect_type() ) {
            SapConnectField sapConnectField = jobNodeConfig.getSapConnectFieldInfo();
            if ((ConnectTypeEnum.Sap.getCode().equals(jobNodeConfig.getInput_connect_type())
                    || ConnectTypeEnum.WebService.getCode().equals(jobNodeConfig.getInput_connect_type()))) {
                //webservice - POST-raw
                if(ConnectTypeEnum.WebService.getCode().equals(jobNodeConfig.getInput_connect_type()) && Objects.equals(jobNodeConfig.getOlderRequestType(),3)){
                    if(Objects.equals(jobNodeConfig.getOlderRequestType(),3)){
                        if(StringUtils.isEmpty(jobNodeConfig.getWebserviceText())){
                            throw new RRException("webservice raw类型的请求xml必须填写！");
                        }
                        if(StringUtils.isEmpty(jobNodeConfig.getAnaTableNameUrl())){
                            throw new RRException("webservice raw类型的请求xml解析表名必须填写！");
                        }
                        if(null == jobNodeConfig.getReqType()){
                            throw new RRException("webservice raw类型的请求格式必须填写！");
                        }
                        if(null == jobNodeConfig.getResType()){
                            throw new RRException("webservice raw类型的返回格式必须填写！");
                        }
                        return jsonObject;
                    }
                }
                if (sapConnectField == null) {
                    throw new RRException("sap的数据源输入参数不能为空");
                }
                List<String> sap_output_table_names = jobNodeConfig.getSap_output_table_name();
                String dbname = null;
                List<String> tableNames = new ArrayList<>();
                if (null != sap_output_table_names) {
                    if (!CollectionUtil.isEmpty(sap_output_table_names)) {
                        dbname = sap_output_table_names.get(0).substring(0, sap_output_table_names.get(0).indexOf("."));
                    }
                    for (int i = 0; i < sap_output_table_names.size(); i++) {
                        tableNames.add(sap_output_table_names.get(i).substring(sap_output_table_names.get(i).indexOf(".") + 1));
                    }
                }
                //校验分区
                if (null != dbname && tableNames.size() != 0) {
                    checkSap(tableNames, dbname);
                }
            }
            if ((ConnectTypeEnum.Sap.getCode().equals(jobNodeConfig.getInput_connect_type()))) {
                //获取sap配置信息,并判断是否合规
                jobNodeConfig.setSapConnectFieldInfo(new SapCheckUtils().checkSapParams(sapConnectField));
                jsonObject = net.sf.json.JSONObject.fromObject(sapConnectField);
                return jsonObject;
            }
            if ((ConnectTypeEnum.WebService.getCode().equals(jobNodeConfig.getInput_connect_type()))) {
                List<ConnectFieldInfo> connectFieldInfos = connectFieldInfoMapper.selectList(new QueryWrapper<ConnectFieldInfo>()
                        .eq("connect_id",jobNodeConfig.getInput_connect_id()));
                //获取sap配置信息,并判断是否合规
                jobNodeConfig.setSapConnectFieldInfo(new SapCheckUtils().checkWebServiceParams(jobNodeConfig,connectFieldInfos));
                jsonObject = net.sf.json.JSONObject.fromObject(jobNodeConfig.getSapConnectFieldInfo());
                return jsonObject;
            }
        }
        return null;
    }

    private void checkSap(List<String> tableNames,String dbName){
        List<Integer> tableInfoIds = tableInfoService.getIdsByNames(tableNames,dbName);
        List<TableFieldInfo> tableFieldInfos = new ArrayList<>();
        Set<String> partitionNames = new HashSet<>();String tableNameWithNoPartition = "";
        boolean hasPartition = true;
        int partitonNum = 0;
        for (Integer t : tableInfoIds
        ) {
            Set<String> presentPartitionNames = new HashSet<>();
            tableFieldInfos = tableFieldInfoMapper.selectList(new QueryWrapper<TableFieldInfo>().eq("is_partition", 1).eq("table_id", t));

            if(null != tableFieldInfos && tableFieldInfos.size() != 0){
                if(!hasPartition){
                    throw new RRException(String.format("选中的输出表%s未设置分区",tableNameWithNoPartition));
                }
                if(partitonNum == 0){
                    partitonNum = tableFieldInfos.size();
                }else if(partitonNum != tableFieldInfos.size()){
                    throw new RRException("选中的输出表中分区个数不一致");
                }
                tableFieldInfos.forEach(tableFieldInfo -> presentPartitionNames.add(tableFieldInfo.getFieldName()));
                if(partitionNames.size() == 0){
                    tableFieldInfos.forEach(tableFieldInfo -> partitionNames.add(tableFieldInfo.getFieldName()));
                }else if(!checkSetSame(partitionNames,presentPartitionNames)){
                    throw new RRException("选中的输出表的分区名称不同");
                }
            }else{
                TableInfo tableInfo = tableInfoService.getOne(new QueryWrapper<TableInfo>().eq("id",t));
                if(null == tableInfo){
                    throw new RRException("后台错误,未查询到数仓表格数据");
                }
                if(partitionNames.size() != 0){
                    throw new RRException(String.format("选中的输出表%s未设置分区",tableInfo.getTableName()));
                }
                hasPartition = false;
                tableNameWithNoPartition = tableInfo.getTableName();
            }
        }
    }

    /**
     * 检测两个set是否内容一致
     * @param set1
     * @param set2
     * @return
     */
    private Boolean checkSetSame(Set<String> set1, Set<String> set2){
        Set<String> set3 = new HashSet<>();
        set3.addAll(set1);
        if(set1.size() != set2.size()){
            return false;
        }else{
            set1.addAll(set2);
            if(set1.size() != set3.size()){
                return false;
            }
        }
        return true;
    }

    private void cheackExpress(JobNodeConfig jobNodeConfig) {
        if(null != jobNodeConfig){
            if (null!=jobNodeConfig.getExpression()&&!"null".equals(jobNodeConfig.getExpression())){
                JSONObject minuteJson = JSONObject.parseObject(jobNodeConfig.getExpression());
                if (null!=minuteJson.get("minute")&&!"".equals(minuteJson.get("minute"))){
                    String minute = minuteJson.get("minute").toString();
                    if(RegexUtils.checkInt(minute)){
                        if (Integer.parseInt(minute)>59||Integer.parseInt(minute)<0){
                            throw new RRException("请输入0-59之间的数字");
                        }
                    } else{
                        throw  new RRException("请输入0-59之间的数字");
                    }
                }
            }
        }
    }

    /**
     * //解析到对应的输出表
     */
    public String getOutPutTableInfoFormat( JobNodeConfig jobNodeConfig ){
        //查询到节点的id
        TableInfo tableInfo = tableInfoService.getOne(new QueryWrapper<TableInfo>()
                .eq(jobNodeConfig.getOutput_db_name() != null, "db_name", jobNodeConfig.getOutput_db_name())
                .eq(jobNodeConfig.getOutput_table_name() != null, "table_name", jobNodeConfig.getOutput_table_name()));
        if (tableInfo == null) {
            throw new RRException(jobNodeConfig.getOutput_db_name() + "数据库中没有" + jobNodeConfig.getOutput_table_name() + "表");
        }
        return tableInfo.getFormat();
    }

    /**
     * @Description 拼接sheel脚本
     * @Author hujz
     * @Date 2019/8/14 19:57
     * @Param
     * @Return
     * @Exception
     */
    private String pingShellData(JobNodeInfo param, JobConfUtil jobConfUtil, OozieConfig oozieConfig) {
        String shell = "";

        if (null == param) {
            return shell;
        }
        if (null == param.getTypeId()) {
            return shell;
        }
        //查询到这个节点对应的工程的队列名称
        String queueName = getQueueName(param.getJobId());

        JobNodeConfig jobNodeConfig = param.getJobNodeConfig();
        StringBuilder paramCmd = new StringBuilder(String.format("if [ ! -f 'data-development-job-1.0.jar' ];then\n" +
                "\thdfs dfs -get %s/data-development-job-1.0.jar .  || exit 10\n" +
                "fi\n" +
                "if [ ! -f 'joda-time-2.8.1.jar' ];then\n" +
                "\thdfs dfs -get %s/joda-time-2.8.1.jar .  || exit 10\n" +
                "fi\n", bdpJobConfig.getJoblib(), bdpJobConfig.getJoblib()));
        switch (param.getTypeId()) {
            //由于机器的性能问题，导致结束的接口，在开始的接口之前执行。（坑）
            //此坑是因为调用开始的接口后加了个&符号导致，已解决
            //开始
            case 0: {
                shell = "echo '开始执行' \nsleep 1";
                break;
            }
            //结束
            case 5: {
                shell = "echo '结束执行' \nsleep 1";
                break;
            }
            //数据接入
            case 1: {
                //查询连接和相关的字段
                ConfConnect conf = confConnectService.getById(jobNodeConfig.getInput_connect_id());
                if(conf!=null) {
                    conf.setConnectFieldInfos(connectFieldInfoMapper.selectList(new QueryWrapper<ConnectFieldInfo>().eq("connect_id", conf.getId())));
                }
                ConnectTypeEnum confConnectType = ConnectTypeEnum.ObjOf(conf.getTypeId());
                //新版本
                JobNodeInfo jobNodeInfo = jobNodeInfoMapper.selectById(param.getId());
                jobNodeConfig.setJobNodeId(jobNodeInfo.getId());
                //处理分区字段类型是string、date datetime的分区
                if(null!=jobNodeConfig.getOutput_data_partition()&&!"".equals(jobNodeConfig.getOutput_data_partition())){
                    String outPutDbName = jobNodeConfig.getOutput_db_name();
                    String outPutTableName = jobNodeConfig.getOutput_table_name();
                    TableInfo outPutTable;
                    outPutTable = tableInfoService.getOne(new QueryWrapper<TableInfo>().eq("table_name",outPutTableName).eq("db_name",outPutDbName));
                    if (null == outPutTable){
                        if (CollectionUtil.isNotEmpty(jobNodeConfig.getSap_output_table_name())){
                            outPutTable = tableInfoService.getOne(new QueryWrapper<TableInfo>().eq("table_name",jobNodeConfig.getSap_output_table_name().get(0)).eq("db_name",outPutDbName));
                        }
                    }
                    if (null!= outPutTable){
                        String output_data_partition = jobNodeConfig.getOutput_data_partition();
                        if(StringUtils.isNotEmpty(output_data_partition)){
                            String[] split = output_data_partition.split(",");
                            if(null != split && split.length > 0){
                                StringBuffer newOutput_data_partition = new StringBuffer();
                                for (String s : split) {
                                    String[] split1 = s.split("=");
                                    if(null == split1 || split1.length <2){
                                        throw new RRException("分区字段格式不正确！");
                                    }
                                    TableFieldInfo outputDataPartitionFiled = tableFieldInfoMapper.selectOne(new QueryWrapper<TableFieldInfo>().eq("table_id",outPutTable.getId()).eq("field_name",split1[0]));
                                    if(null == outputDataPartitionFiled){
                                        throw new RRException("分区字段：【"+split1[0]+"】不存在！");
                                    }
                                    if (TableDataType.isStringOrDate(outputDataPartitionFiled.getFieldType().toUpperCase())){
                                        String s1 = split1[0] + "=" + "\"" + split1[1] + "\"";
                                        newOutput_data_partition.append(s1).append(",");
                                    }
                                }
                                if(StringUtils.isNotEmpty(newOutput_data_partition.toString())){
                                    jobNodeConfig.setOutput_data_partition(newOutput_data_partition.toString().substring(0,newOutput_data_partition.toString().length() -1));
                                }
                            }
                        }
                    }
                }

                //查询到对应的仓库表对应的字段是否加密---暂时不需要这个是否加密的东西---暂时用下面的只使用表id
                List<Integer> restTableIds = listAllTableIds(confConnectType,param.getJobNodeConfig());
                String tableIds = "";
                for (Integer restTableId : restTableIds) {
                    tableIds = tableIds + restTableId + ",";
                }
                if(StringUtils.isNotEmpty(tableIds)){
                    tableIds = tableIds.substring(0,tableIds.length()-1);
                }
                //如果是sap数据源，用spark原生实现
                if (confConnectType.getType().getCode() == ConTypeType.Sap.getCode()) {
                    //查询到节点的id
                    String rangerUserName = dataDevelopmentConfig.getUserPreffix() + ShiroUtils.getUserId();
                    shell = jobConfUtil.genSapOutPutShell(jobNodeConfig, param, conf, bdpJobConfig,tableIds,paramCmd,rangerUserName,dataDevelopmentConfig,queueName,ShiroUtils.getUserId());
                    break;
                }
                //如果是JDBC数据源，用spark原生实现
                else if (confConnectType.getType().getCode() == ConTypeType.JDBC.getCode()) {
                    String jdbcVersion = connectFieldInfoMapper.getJDBCVersionById(conf.getId());
                    conf.setJdbcVersion(jdbcVersion);
                    //解析到对应的输出表
                    String outPutTableInfoFormat = getOutPutTableInfoFormat(jobNodeConfig);
                    shell = jobConfUtil.getJDBCInPutShell(jobNodeConfig, conf, bdpJobConfig, outPutTableInfoFormat, paramCmd,tableIds,dataDevelopmentConfig,queueName,param.getId(),ShiroUtils.getUserId());
                    break;
                }
                //如果是JDBC数据源，oracle集群版本
                else if (confConnectType.getType().getCode() == ConTypeType.OracleNTS.getCode()) {
                    //解析到对应的输出表
                    String outPutTableInfoFormat = getOutPutTableInfoFormat(jobNodeConfig);
                    shell = jobConfUtil.getJDBCOracleNtsInPutShell(jobNodeConfig, conf, bdpJobConfig, outPutTableInfoFormat, paramCmd,tableIds,dataDevelopmentConfig,queueName,param.getId(),ShiroUtils.getUserId());
                    break;
                }
                //es数据源，用spark原生实现
                else if (confConnectType.getType().getCode() == ConTypeType.ES.getCode()){
                    //解析到对应的输出表
                    String outPutTableInfoFormat = getOutPutTableInfoFormat(jobNodeConfig);
                    shell = jobConfUtil.getEsShell(jobNodeConfig, conf, bdpJobConfig, outPutTableInfoFormat,paramCmd,tableIds,dataDevelopmentConfig,queueName,ShiroUtils.getUserId());
                    break;
                }
                //如果是FTP数据源，用spark原生实现
                else if (confConnectType.getType().getCode() == ConTypeType.FTP.getCode()) {
                    //查询到节点的id
                    String outPutTableInfoFormat = getOutPutTableInfoFormat(jobNodeConfig);
                    shell = jobConfUtil.getFTPInPutShell(jobNodeConfig, conf, bdpJobConfig, outPutTableInfoFormat, paramCmd,tableIds,dataDevelopmentConfig,queueName,ShiroUtils.getUserId());
                    break;
                }
                //如果是SFTP数据源，用spark原生实现
                else if (confConnectType.getType().getCode() == ConTypeType.SFTP.getCode()) {
                    //查询到节点的id
                    String outPutTableInfoFormat = getOutPutTableInfoFormat(jobNodeConfig);
                    shell = jobConfUtil.getSFTPInPutShell(jobNodeConfig, conf, bdpJobConfig, outPutTableInfoFormat, paramCmd,tableIds,dataDevelopmentConfig,queueName,ShiroUtils.getUserId());
                    break;
                }
                //如果是接口数据源
                else if (confConnectType.getType().getCode() == ConTypeType.JieKou.getCode()) {
                    String outPutTableInfoFormat = getOutPutTableInfoFormat(jobNodeConfig);
                    shell = jobConfUtil.getJieKouInPutShell(jobNodeConfig, conf, bdpJobConfig, outPutTableInfoFormat, paramCmd,tableIds,dataDevelopmentConfig,queueName,ShiroUtils.getUserId());
                    break;
                }
                //webService数据源，用spark原生实现
                else if (confConnectType.getType().getCode() == ConTypeType.WebService.getCode()){
                    shell = jobConfUtil.getWebServiceInPutShell(jobNodeConfig, conf, bdpJobConfig, paramCmd,tableIds,dataDevelopmentConfig,queueName,ShiroUtils.getUserId());
                    break;
                }
            }
            //SQL
            case 2: {
                try {
                    //解析表依赖关系
                    HiveLineageInfo lep = new HiveLineageInfo();
                    Set<String> TableNames = new HashSet<>();
                    try {
                        for (String query : jobNodeConfig.getSql_statment().split(";")
                        ) {
                            //过掉set语法
                            if (skipSetSql(query)) {
                                continue;
                            }

                            try {
                                lep.getLineageInfo(query);
                            }catch (Exception e){
                                log.error("解析血缘异常，error:{}",e.getMessage());
                            } catch (Error e){
                                log.error("解析血缘异常，error:{}",e.getMessage());
                            }

                            Set<Integer> inTableIds = new HashSet<>();
                            Set<Integer> outTableIds = new HashSet<>();
                            for (String inTable : lep.getInputTableList()
                            ) {
                                if (inTable.split("\\.").length > 1) {
                                    String inDbName = inTable.split("\\.")[0];
                                    String intTabName = inTable.split("\\.")[1];
                                    TableInfo tableInfo = tableInfoService.getOne(new QueryWrapper<TableInfo>().eq("table_name", intTabName).eq("db_name", inDbName));
                                    if (tableInfo != null) {
                                        inTableIds.add(tableInfo.getId());
                                    }
                                    TableNames.add(intTabName);
                                }
                            }
                            for (String outTable : lep.getOutputTableList()
                            ) {
                                if (outTable.split("\\.").length > 1) {
                                    String outDbName = outTable.split("\\.")[0];
                                    String outTabName = outTable.split("\\.")[1];
                                    TableInfo tableInfo = tableInfoService.getOne(new QueryWrapper<TableInfo>().eq("table_name", outTabName).eq("db_name", outDbName));
                                    if (tableInfo != null) {
                                        outTableIds.add(tableInfo.getId());
                                    }
                                    TableNames.add(outTabName);
                                }
                            }

                            //插入数据库
                            List<TableRelation> tableRelations = new ArrayList<>();
                            for (Integer inTableId : inTableIds
                            ) {
                                for (Integer outTableId : outTableIds
                                ) {
                                    //数据库中不存在表关系才插入
                                    TableRelation tableRelation1 = new TableRelation();
                                    tableRelation1.setCid(inTableId);
                                    tableRelation1.setPid(outTableId);
                                    tableRelationMapper.insertIfNotExists(tableRelation1);

//                                    //只有当输出表（cid）对应的输入表不包含此次保存的输入表时才进行保存操作，保证同一个任务多个sql计算节点之间不重复保存
//                                    List<TableRelation> cids = tableRelationService.list(new QueryWrapper<TableRelation>().eq("cid", outTableId));
//                                    TableRelation tableRelation1 = new TableRelation();
//                                    if (CollectionUtil.isEmpty(cids)) {
//                                        tableRelation1.setPid(inTableId);
//                                        tableRelation1.setCid(outTableId);
//                                        tableRelations.add(tableRelation1);
//                                    }
//                                    for (TableRelation tableRelation : cids
//                                    ) {
//                                        if (!inTableId.equals(tableRelation.getPid())) {
//                                            tableRelation1.setPid(inTableId);
//                                            tableRelation1.setCid(outTableId);
//                                            tableRelations.add(tableRelation1);
//                                        }
//                                    }

                                }
                            }

//                            if (tableRelations.size() > 0) {
//                                tableRelationService.saveOrUpdateBatch(tableRelations);
//                            }
                        }
                    } catch (Exception e) {
                        log.error("血缘关系解析错误：err:{}", e.getMessage());
                    }catch (ExceptionInInitializerError e){
                        log.error("血缘关系解析错误：err:{}", e.getMessage());
                    }

                    //是否有drop表
                    Set<Integer> dropHiveTables = cheackDropHiveTables(param.getJobNodeConfig(), param.getTypeId());

                    shell = jobConfUtil.genSQLShell(paramCmd,jobNodeConfig, bdpJobConfig,dataDevelopmentConfig,ShiroUtils.getUserId(),hiveConfig,dropHiveTables,oozieConfig,queueName,implaConfig);

//                    shell = String.format("%s\necho \"\"\"%s\"\"\" > script.hql \n%s -f script.hql"
//                            , paramCmd.toString()
//                            , jobConfUtil.replace_date_to_normal(jobNodeConfig.getSql_statment(), paramsMap.keySet())
//                            , beelineCmd);

                } catch (Exception e) {
                    throw new RRException("sql计算的语法错误");
                }
                break;
            }
            //程序执行
            case 3: {
                Map<String, String> paramsMap = jobConfUtil.extraParams(jobNodeConfig.getBase_proc_main_args(), jobNodeConfig.getBase_proc_main_in(), jobNodeConfig.getBase_proc_main_out());
                for (String p : paramsMap.values()
                ) {
                    paramCmd.append(p);
                }
                try {
                    shell = jobConfUtil.gentProcExeShell(jobNodeConfig, oozieConfig, bdpJobConfig, paramsMap.keySet());
                } catch (Exception e) {
                    throw new RRException(DataDevelopmentBizExceptionEnum.SERVER_ERROR.getMessage());
                }
                break;
            }
            //数据推送
            case 4: {
                ConfConnect conf = confConnectService.getById(jobNodeConfig.getOutput_connect_id());
                if(conf != null){
                    String jdbcVersion = connectFieldInfoMapper.getJDBCVersionById(conf.getId());
                    conf.setJdbcVersion(jdbcVersion);
                }
                ConnectTypeEnum confConnectType = ConnectTypeEnum.ObjOf(jobNodeConfig.getOutput_connect_type());
                //是否有drop表
                Set<Integer> dropHiveTables = cheackDropHiveTables(param.getJobNodeConfig(), param.getTypeId());
                shell = jobConfUtil.genOutPutShell(jobNodeConfig, conf, confConnectType, bdpJobConfig,ShiroUtils.getUserId(),dataDevelopmentConfig,paramCmd,dropHiveTables,oozieConfig,queueName,param.getId());
                break;
            }
            //shell脚本
            case 6: {
                //查询到配置的服务器资源数据
                BussessConfigEntity bussessConfigEntity = bussessConfigService.getOne(new QueryWrapper<BussessConfigEntity>().eq("bussess_type", 1).eq("bussess_id", param.getId()));
                if(null == bussessConfigEntity){
                    throw new RRException("shell脚本未配置服务器资源信息！");
                }
                shell = jobConfUtil.genShellNode(jobNodeConfig, bdpJobConfig,ShiroUtils.getUserId(),dataDevelopmentConfig,paramCmd,oozieConfig,queueName,bussessConfigEntity);
                break;
            }
            //算法开发;
            case 7:{
                shell = jobConfUtil.genAlgorithmShell(jobNodeConfig,paramCmd,queueName);
                break;
            }
            default:
                break;
        }

        return shell;
    }

    private String getQueueName(Integer jobId) {
        JobInfo jobInfo = jobInfoMapper.selectById(jobId);
        if(null == jobInfo){
            return "default";
        }
        ProcInfo procInfo = procInfoMapper.selectById(jobInfo.getProcId());
        if(null == procInfo){
            return "default";
        }

        if(null != procInfo.getQueueId()){
            RealtimeQueueEntity realtimeQueueEntity = realtimeQueueMapper.selectById(procInfo.getQueueId());
            if(null != realtimeQueueEntity){
                return realtimeQueueEntity.getQueueName();
            }
        }
        return "default";
    }

    /***
     * 追加查询对应的条的条数
     * @param tableName   表名字（包括库.表）
     * @param oozieConfig
     * @return
     */
    private String appendTableCountShell(String tableName ,OozieConfig oozieConfig){
        return  "\nvirCount=`curl -X GET "+oozieConfig.getRest_url()+"/tableCount/"+tableName+"`\necho \"virCount:$virCount\"";
    }

    private List<Integer> listAllTableIds(ConnectTypeEnum confConnectType, JobNodeConfig jobNodeConfig){
        List<Integer> restTableIds = new ArrayList<>();
        if(null == jobNodeConfig){
            return restTableIds;
        }
        Set<String> tableNameSet = new HashSet<>();
        String olderDbName = jobNodeConfig.getOutput_db_name();
        if (ConTypeType.Sap.getCode()==confConnectType.getType().getCode()||ConTypeType.WebService.getCode()==confConnectType.getType().getCode()) {
            //可以不传值，或者可以传多个;list为空，默认将所有的表的数据存入hive
            for (String outTableName : jobNodeConfig.getSap_output_table_name()
            ) {
                tableNameSet.add(outTableName.substring(outTableName.indexOf(".") + 1));
                if (StringUtils.isEmpty(olderDbName)) {
                    olderDbName = outTableName.substring(0, outTableName.indexOf("."));
                }
            }
        } else if (confConnectType.getType().getCode() == ConTypeType.JDBC.getCode()
                || confConnectType.getType().getCode() == ConTypeType.FTP.getCode()
                || confConnectType.getType().getCode() == ConTypeType.ES.getCode()
                || confConnectType.getType().getCode() == ConTypeType.JieKou.getCode()
                || confConnectType.getType().getCode() == ConTypeType.Kudu.getCode()
                || confConnectType.getType().getCode() == ConTypeType.OracleNTS.getCode()) {
            tableNameSet.add(jobNodeConfig.getOutput_table_name());
        }
        if (CollectionUtil.isEmpty(tableNameSet)) {
            return restTableIds;
        }
        //根据库名和表明查询对应的表集合
        List<TableInfo> allTableInfoList = tableInfoService.selectListByDbNameAndTableName(olderDbName, tableNameSet);
        if(CollectionUtil.isNotEmpty(allTableInfoList)){
            for (TableInfo tableInfo : allTableInfoList) {
                restTableIds.add(tableInfo.getId());
            }
        }
        return restTableIds;
    }

    private List<TableAndFieldDto> getAllTableFieldList(ConnectTypeEnum confConnectType, JobNodeConfig jobNodeConfig) {
        if (null == jobNodeConfig) {
            return null;
        }
        List<TableAndFieldDto> resTList = new ArrayList<>();
        Set<String> tableNameSet = new HashSet<>();
        String olderDbName = jobNodeConfig.getOutput_db_name();
        if (confConnectType.getType().getCode() == ConTypeType.Sap.getCode()) {
            //可以不传值，或者可以传多个;list为空，默认将所有的表的数据存入hive
            for (String outTableName : jobNodeConfig.getSap_output_table_name()
            ) {
                tableNameSet.add(outTableName.substring(outTableName.indexOf(".") + 1));
                if (StringUtils.isEmpty(olderDbName)) {
                    olderDbName = outTableName.substring(0, outTableName.indexOf("."));
                }
            }
        } else if (confConnectType.getType().getCode() == ConTypeType.JDBC.getCode() || confConnectType.getType().getCode() == ConTypeType.FTP.getCode()) {
            tableNameSet.add(jobNodeConfig.getOutput_table_name());
        }
        if (CollectionUtil.isEmpty(tableNameSet)) {
            return resTList;
        }
        List<TableFieldInfo> tableFieldInfos = tableFieldInfoMapper.selectAllTavleFieldInfoByDbNameAndTableName(olderDbName, tableNameSet);
        if (CollectionUtil.isNotEmpty(tableFieldInfos)) {
            Map<String, List<TableFieldInfo>> resMap = tableFieldInfos.stream().collect(Collectors.groupingBy(TableFieldInfo::getTableName));
            if (CollectionUtil.isNotEmpty(resMap)) {
                for (Map.Entry<String, List<TableFieldInfo>> entry : resMap.entrySet()) {
                    TableAndFieldDto tableAndFieldDto = new TableAndFieldDto();
                    List<TableFieldDto> reTableFiledList = new ArrayList<>();
                    if (CollectionUtil.isEmpty(entry.getValue())) {
                        continue;
                    }
                    //查看配置的默认是否加密
                    for (TableFieldInfo tableFieldInfo : entry.getValue()) {
                        TableFieldDto tableFieldDto = new TableFieldDto();
                        BeanUtils.copyProperties(tableFieldInfo, tableFieldDto);
                        if (null == tableFieldInfo.getEncyStyleId()) {
                            tableFieldDto.setEncyStyleId(PressionEnParamType.NOEN.getId());
                        } else {
                            List<DataPermissionParam> allDataPermissionParamList = dataPermissionParamMapper.selectListByTypeAndIds(null, PressionParamType.ENSTYPE.getId());
                            if (CollectionUtil.isNotEmpty(allDataPermissionParamList)) {
                                List<DataPermissionParam> collect = allDataPermissionParamList.stream().filter(dataPermissionParam -> Objects.equals(dataPermissionParam.getPermissName(), PressionEnParamType.NOEN.getName())).collect(Collectors.toList());
                                if (CollectionUtil.isNotEmpty(collect)) {
                                    if (!Objects.equals(tableFieldInfo.getEncyStyleId(), collect.get(0).getId())) {
                                        tableFieldDto.setEncyStyleId(PressionEnParamType.EN.getId());
                                    }
                                    reTableFiledList.add(tableFieldDto);
                                    continue;
                                }
                            }
                            tableFieldDto.setEncyStyleId(PressionEnParamType.NOEN.getId());
                        }
                        reTableFiledList.add(tableFieldDto);
                    }
                    if (CollectionUtil.isNotEmpty(reTableFiledList)) {
                        tableAndFieldDto.setDbName(olderDbName);
                        tableAndFieldDto.setTableName(entry.getKey());
                        tableAndFieldDto.setAllFieldList(reTableFiledList);
                        resTList.add(tableAndFieldDto);
                    }
                }
            }
        }
        return resTList;
    }

    /***
     * 过滤掉sql的执行情况
     * @param query sql语句
     */
    private Boolean skipSetSql(String query) {
        Boolean rest = false;
        if (StringUtils.isEmpty(query) || StringUtils.isEmpty(query.trim())) {
            return true;
        }
        if (query.toUpperCase().contains("set".toUpperCase())) {
            return true;
        }
        return rest;
    }

    /**
     * @Description 禁用节点的信息和更新jenkins中对应的这个节点为禁用
     * @Author hujz
     * @Date 2019/8/14 16:14
     * @Param jobNodeInfos:job节点信息  nodeJsonObjects：所有的节点信息  linkList：所有的连线信息  id: jobId   endNodeKey:结束节点key
     * @Return
     * @Exception
     */
    private void disabledNodeListAndJenkins(List<JobNodeInfo> jobNodeInfos, List<JSONObject> nodeJsonObjects, List<List<String>> linkList, Integer id, String endNodeKey) {

        if (null == jobNodeInfos || jobNodeInfos.isEmpty() || null == nodeJsonObjects || nodeJsonObjects.isEmpty()
                || null == linkList || linkList.isEmpty() || null == id || StringUtils.isEmpty(endNodeKey)) {
            return;
        }
        //查找到所有结束节点的连线和未配置结束节点的节点信息
        List<List<String>> endKeyLinkDataList = linkList.stream().filter(datalist -> Objects.equals(datalist.get(datalist.size() - 1), endNodeKey)).collect(Collectors.toList());
        List<List<String>> noHaveEndKeyDataList = linkList.stream().filter(datalist -> !Objects.equals(datalist.get(datalist.size() - 1), endNodeKey)).collect(Collectors.toList());
        Set<String> needDisabledNodeList = new HashSet<>();
        Set<String> nodeKeyList = new HashSet<>();
        Set<String> allNodeKeyList = new HashSet<>();
        for (List<String> strings : linkList) {
            nodeKeyList.addAll(strings);
        }
        for (JSONObject jsonObject : nodeJsonObjects) {
            allNodeKeyList.add(jsonObject.getString("key"));
        }
        //连线节点不包含的节点keys
        allNodeKeyList.forEach(keydata -> {
            if (!nodeKeyList.contains(keydata)) {
                needDisabledNodeList.add(keydata);
            }
        });

        //含有结束节点的连线keys
        Set<String> endAndKeys = new HashSet<>();
        if (!endKeyLinkDataList.isEmpty()) {
            for (List<String> strings : endKeyLinkDataList) {
                endAndKeys.addAll(strings);
            }
        }

        //含有未到达结束节点的keys
        Set<String> noEndKeys = new HashSet<>();
        if (!noHaveEndKeyDataList.isEmpty()) {
            for (List<String> strings : noHaveEndKeyDataList) {
                noEndKeys.addAll(strings);
            }
        }

        if (!endAndKeys.isEmpty()) {
            if (!noEndKeys.isEmpty()) {
                noEndKeys.forEach(dataKey -> {
                    if (!endAndKeys.contains(dataKey)) {
                        needDisabledNodeList.add(dataKey);
                    }
                });
            }
        } else {
            needDisabledNodeList.addAll(noEndKeys);
        }

        //禁用需要禁用的node和禁用jenkins中的数据
        if (!needDisabledNodeList.isEmpty()) {
            disabledOrabledJenkins(needDisabledNodeList, jobNodeInfos, id, false);
        }

        //启用需要启用的node和启用jenkins中的数据
        if (!endAndKeys.isEmpty()) {
            disabledOrabledJenkins(endAndKeys, jobNodeInfos, id, true);
        }
    }


    /**
     * @Description 启用或者禁用需要禁用的node和禁用jenkins中的数据
     * @Author hujz
     * @Date 2019/8/14 16:55
     * @Param needDisabledNodeList:需要进行禁用的nodekey   id:jobId  jobNodeInfos: job节点的信息  flage:true:启用，false:禁用
     * @Return
     * @Exception
     */
    @Override
    public void disabledOrabledJenkins(Set<String> needDisabledNodeList, List<JobNodeInfo> jobNodeInfos, Integer id, Boolean flage) {
        if (null == needDisabledNodeList || needDisabledNodeList.isEmpty() || null == jobNodeInfos || jobNodeInfos.isEmpty()) {
            return;
        }
        List<JobNodeInfo> jobNodeList = new ArrayList<>();
        jobNodeInfos.forEach(jobNodeInfo -> {
            if (needDisabledNodeList.contains(jobNodeInfo.getNodeKey())) {
                //如果需要修改的状态和当前状态一致，则不需要修改状态
                if (!flage.equals(jobNodeInfo.getEnable())) {
                    jobNodeList.add(jobNodeInfo);
                }
            }
        });

        if (!jobNodeList.isEmpty()) {
            //修改数据库中的节点的状态
            jobNodeList.forEach(jobNodeInfo -> {
                JobNodeInfo jobNodeInfo1 = new JobNodeInfo();
                BeanUtils.copyProperties(jobNodeInfo, jobNodeInfo1);
                jobNodeInfo1.setEnable(flage);
                jobNodeInfo1.setModPer(ShiroUtils.getUserId().intValue());
                jobNodeInfo1.setModTime(DateUtil.date());
                jobNodeInfoMapper.updateById(jobNodeInfo1);
            });
        }

    }

    @Override
    public List<JobNodeInfo> selectJobNodeInfoByJobId(Integer jobId) {

        JobInfo jobInfo = jobInfoMapper.selectById(jobId);
        if (null == jobInfo) {
            return null;
        }
        List<JobNodeInfo> jobNodeInfos = jobNodeInfoMapper.selectList(new QueryWrapper<JobNodeInfo>().eq("job_id", jobInfo.getId()));
        if (null == jobNodeInfos || jobNodeInfos.isEmpty()) {
            return null;
        }

        //配置的开始节点有且只有一个
        List<JobNodeInfo> nodeInfos = jobNodeInfos.stream().filter(jobNodeInfo -> Objects.equals(jobNodeInfo.getTypeId(), 0)).collect(Collectors.toList());
        if (nodeInfos.size() != 1) {
            return null;
        }

        //配置的结束节点必须有且只有一个
        List<JobNodeInfo> endNodeInfos = jobNodeInfos.stream().filter(jobNodeInfo -> Objects.equals(jobNodeInfo.getTypeId(), 5)).collect(Collectors.toList());
        if (endNodeInfos.size() != 1) {
            return null;
        }

        if (StringUtils.isEmpty(jobInfo.getParam())) {
            return null;
        }
        JSONObject jsonObject = JSONObject.parseObject(jobInfo.getParam());
        JSONArray nodeDataArray = jsonObject.getJSONArray("nodeDataArray");
        JSONArray linkDataArray = jsonObject.getJSONArray("linkDataArray");

        List<JSONObject> linkJsonObjects = linkDataArray.toJavaList(JSONObject.class);
        List<JSONObject> nodeJsonObjects = nodeDataArray.toJavaList(JSONObject.class);

        //获取节点的连线情况
        List<List<String>> linkList = getLinkDataList(linkJsonObjects, nodeJsonObjects, nodeInfos.get(0).getNodeKey());
        if (CollectionUtil.isEmpty(linkList)) {
            return null;
        }
        //将对应的节点按照顺序返回
        List<JobNodeInfo> restList = new ArrayList<>();
        int maxSize = 2;
        String endNodeKey = "";
        String startNodeKey = "";
        for (List<String> strings : linkList) {
            if (CollectionUtil.isNotEmpty(strings)) {
                if (StringUtils.isEmpty(endNodeKey)) {
                    endNodeKey = strings.get(strings.size() - 1);
                }
                if (StringUtils.isEmpty(startNodeKey)) {
                    startNodeKey = strings.get(0);
                }
                if (strings.size() > maxSize) {
                    maxSize = strings.size();
                }
            }
        }
        //每层 多少节点id 排除开始和结束节点
        List<Set<String>> allNodeKeyList = new ArrayList<>();
        //按照层级进行排序对应的任务节点
        for (int i = 1; i < maxSize - 1; i++) {
            Set<String> allSet = new HashSet<>();
            for (List<String> strings : linkList) {
                if (CollectionUtil.isNotEmpty(strings)) {
                    if (strings.size() > i) {
                        if (!Objects.equals(strings.get(i), endNodeKey)) {
                            allSet.add(strings.get(i));
                        }
                    }
                }
            }
            allNodeKeyList.add(allSet);
        }

        JobNodeInfo jobNodeInfoStartNode = new JobNodeInfo();
        jobNodeInfoStartNode.setJobId(jobId);
        jobNodeInfoStartNode.setNodeKey(startNodeKey);
        restList.add(jobNodeInfoStartNode);

        if (CollectionUtil.isNotEmpty(allNodeKeyList)) {
            for (Set<String> strings : allNodeKeyList) {
                if (CollectionUtil.isNotEmpty(strings)) {
                    for (String string : strings) {
                        JobNodeInfo jobNodeInfoNode = new JobNodeInfo();
                        jobNodeInfoNode.setJobId(jobId);
                        jobNodeInfoNode.setNodeKey(string);
                        restList.add(jobNodeInfoNode);
                    }
                }
            }
        }

        JobNodeInfo jobNodeInfoEndNode = new JobNodeInfo();
        jobNodeInfoEndNode.setJobId(jobId);
        jobNodeInfoEndNode.setNodeKey(endNodeKey);
        restList.add(jobNodeInfoEndNode);


        return restList;
    }

    @Override
    public void deleteHdfsInfo(JobNodeInfo jobNodeInfo) {
        //上传job配置文件到hdfs上
        try {
            if(null == jobNodeInfo){
                return;
            }
            if(Objects.equals(jobNodeInfo.getTypeId(),JobType.SQL.getCode())){
                JobConfUtil jobConfUtil = new JobConfUtil(bdpJobConfig);
                jobConfUtil.deleteSQLJobConf(jobNodeInfo, bdpJobConfig);
            }
        } catch (Exception e) {
            throw new RRException(DataDevelopmentBizExceptionEnum.UPLOAD_JOB_CONF_ERROR.getMessage());
        }
    }

    @Override
    public void deleteHdfsShellFile(JobInfo jobInfo,JobNodeInfo jobNodeInfo) {

        HdfsUtil hdfsUtil  = null;
        try {
            hdfsUtil  = new HdfsUtil(bdpJobConfig);
            JobUtil jobUtil = new JobUtil(hdfsUtil);
            jobUtil.deleteJob(ShiroUtils.getUserId(),jobInfo,jobNodeInfo);
        } catch (Exception e) {
            e.printStackTrace();
        }finally {
            if(null != hdfsUtil){
                hdfsUtil.close();
            }
        }

    }


    /**
     * @Description 验证连线是否存在循环引用的连线
     * @Author hujz
     * @Date 2019/8/14 11:23
     * @Param linkList 连线情况  linkJsonObjects 原始的连线情况
     * @Return
     * @Exception
     */
    private void checkLinkToLink(List<List<String>> linkList, List<JSONObject> linkJsonObjects, List<JSONObject> nodeJsonObjects) {

        if (null == linkList || linkList.isEmpty() || null == linkJsonObjects || linkJsonObjects.isEmpty()) {
            return;
        }
        //如果节点中有重复的key,那么肯定是循环引用
        for (List<String> strings : linkList) {
            for (int i = 0; i < strings.size() - 1; i++) {
                for (int j = i + 1; j < strings.size(); j++) {
                    if (Objects.equals(strings.get(i), strings.get(j))) {
                        String errorFromMsg = "", errorToMsg = "";
                        if (null != nodeJsonObjects && !nodeJsonObjects.isEmpty()) {
                            int finalJ = j;
                            List<JSONObject> collect1 = nodeJsonObjects.stream().filter(nodeJsonObject -> Objects.equals(nodeJsonObject.getString("key"), strings.get(finalJ - 1))).collect(Collectors.toList());
                            List<JSONObject> collect2 = nodeJsonObjects.stream().filter(nodeJsonObject -> Objects.equals(nodeJsonObject.getString("key"), strings.get(finalJ))).collect(Collectors.toList());
                            if (!collect1.isEmpty()) {
                                errorFromMsg = collect1.get(0).getString("text");
                            }
                            if (!collect2.isEmpty()) {
                                errorToMsg = collect2.get(0).getString("text");
                            }
                        }
                        throw new RRException(String.format("存在循环指向%s任务节点到%s任务节点！", errorFromMsg, errorToMsg));
                    }
                }
            }
        }
    }

    /**
     * @Description 得到数组中两两结合的集合
     * @Author hujz
     * @Date 2019/8/14 11:31
     * @Param
     * @Return
     * @Exception
     */
    private List<JSONObject> getLinkTwo(List<String> strings) {
        if (null == strings || strings.isEmpty()) {
            return null;
        }
        List<JSONObject> res = new ArrayList<>();
        for (int i = 0; i < strings.size() - 1; i++) {
            for (int j = i + 1; j < strings.size(); j++) {
                JSONObject data = new JSONObject();
                data.putIfAbsent("from", strings.get(i));
                data.putIfAbsent("to", strings.get(j));
                res.add(data);
            }
        }
        return res;
    }

    /**
     * @Description 获取节点的连线情况
     * @Author hujz
     * @Date 2019/8/13 11:22
     * @Param List<JSONObject> linkJsonObjects  List<JSONObject> nodeJsonObjects   beginNodeKey
     * @Return List<List < Integer>> 可以使用的任务依赖信息
     * @Exception
     */
    private List<List<String>> getLinkDataList(List<JSONObject> linkJsonObjects, List<JSONObject> nodeJsonObjects, String beginNodeKey) {

        /**
         * 保存任务节点信息
         * { "class": "GraphLinksModel",
         *   "linkFromPortIdProperty": "fromPort",
         *   "linkToPortIdProperty": "toPort",
         *   "nodeDataArray": [
         * {"category":"Start", "text":"开始", "key":-1, "loc":"-93.5 353"},
         * {"category":"Sqlcompute","text":"SQL计算", "key":-2, "loc":"45.5 400"},
         * {"category":"End", "text":"结束", "key":-4, "loc":"335.5 364"}
         *  ],
         *   "linkDataArray": [
         * {"from":-1, "to":-2, "fromPort":"R", "toPort":"L", "points":[-69.2266599078511,353,-59.2266599078511,353,-18.918089933173597,353,-18.918089933173597,400,21.390480041503906,400,31.390480041503906,400]},
         * {"from":-2, "to":-7, "fromPort":"R", "toPort":"L", "points":[59.609519958496094,400,69.6095199584961,400,139.61274337768555,400,139.61274337768555,330.3125747680664,209.615966796875,330.3125747680664,219.615966796875,330.3125747680664]},
         * {"from":-6, "to":-7, "fromPort":"R", "toPort":"L", "points":[148.384033203125,307,158.384033203125,307,184,307,184,319.6874252319336,209.615966796875,319.6874252319336,219.615966796875,319.6874252319336]},
         * {"from":-7, "to":-4, "fromPort":"R", "toPort":"L", "points":[267.384033203125,325,277.384033203125,325,291.3810020712919,325,291.3810020712919,364,305.3779709394588,364,315.3779709394588,364]},
         * {"from":-1, "to":-8, "fromPort":"R", "toPort":"L", "points":[-69.2266599078511,353,-59.2266599078511,353,-32.30534655548805,353,-32.30534655548805,312,-5.384033203125,312,4.615966796875,312]},
         * {"from":-8, "to":-6, "fromPort":"R", "toPort":"T", "points":[52.384033203125,312,62.384033203125,312,77.5,312,77.5,281.0622756958008,124.5,281.0622756958008,124.5,291.0622756958008]}
         *  ]}
         */
        if (null == linkJsonObjects || linkJsonObjects.isEmpty() || null == nodeJsonObjects || nodeJsonObjects.isEmpty()) {
            throw new RRException(DataDevelopmentBizExceptionEnum.JOB_NODE_NO_CONF_EXIST.getMessage());
        }

        //从开始节点开始，依次查找到对应的节点信息
        List<List<String>> beginLinkList = new ArrayList<>();
        List<String> beginStr = new ArrayList<>();
        beginStr.add(beginNodeKey);
        beginLinkList.add(beginStr);
        List<JSONObject> beginNodeList = new ArrayList<>();
        JSONObject jsonObject = new JSONObject();
        jsonObject.putIfAbsent("key", beginNodeKey);
        beginNodeList.add(jsonObject);
        //递归获取连线数据
        List<List<String>> linkList = reducLinkNodeList(linkJsonObjects, beginNodeList, beginLinkList);
        return linkList;
    }

    /**
     * @Description 获取节点的连线信息
     * @Author hujz
     * @Date 2019/8/13 13:39
     * @Param linkJsonObjects 节点信息  beginNodeList 需要查找的节点信息
     * @Return
     * @Exception
     */
    private List<List<String>> reducLinkNodeList(List<JSONObject> linkJsonObjects, List<JSONObject> beginNodeList, List<List<String>> inputList) {
        List<List<String>> tempList = new ArrayList<>();
        if (null == linkJsonObjects || linkJsonObjects.isEmpty() || null == beginNodeList || beginNodeList.isEmpty()) {
            return inputList;
        }

        Set<JSONObject> beginSet = new HashSet<>();
        for (JSONObject beginNodeData : beginNodeList) {
            List<JSONObject> collect = linkJsonObjects.stream().filter(linkJsonObject -> Objects.equals(linkJsonObject.getString("from"), beginNodeData.getString("key"))).collect(Collectors.toList());
            //获取到对应的已经保存过的数据集合
            List<List<String>> pingList = inputList.stream().filter(dataList -> Objects.equals(dataList.get(dataList.size() - 1), beginNodeData.getString("key"))).collect(Collectors.toList());
            for (List<String> strings : pingList) {
                if (collect.isEmpty()) {
                    //需要判断的节点连线
                    JSONObject newObject = new JSONObject();
                    newObject.putIfAbsent("key", beginNodeData.getString("key"));
                    beginSet.add(newObject);

                    //重新添加数据
                    tempList.add(strings);

                }
                for (JSONObject jsonObject : collect) {
                    List<String> keyTempList = new ArrayList<>(strings);
                    keyTempList.add(jsonObject.getString("to"));
                    //重新添加数据
                    tempList.add(keyTempList);

                    //新的这个是需要去重的
                    JSONObject newObject = new JSONObject();
                    newObject.putIfAbsent("key", jsonObject.getString("to"));
                    beginSet.add(newObject);
                }
            }
        }

        List<JSONObject> beginNodeTempList = new ArrayList<>(beginSet);

        //找不到依赖也应该对应的结束循环
        if (tempList.isEmpty()) {
            return inputList;
        }

        //当输入的和输出的结果一样的时候，也可以结束
        if (tempList.size() == inputList.size() && tempList.containsAll(inputList)) {
            return inputList;
        }

        //把使用过的节点 。应该以结束标准来表示对应的结束循环,其中最大的一个连线个数达到了这个连线集合的总个数，则直接跳出循环
        int maxCountSize = tempList.get(0).size();
        for (int i = 1; i < tempList.size(); i++) {
            if (tempList.get(i).size() > maxCountSize) {
                maxCountSize = tempList.get(i).size();
            }
        }
        //这里必须+1
        if (maxCountSize >= linkJsonObjects.size() + 1) {
            return tempList;
        }

        //递归调用
        return reducLinkNodeList(linkJsonObjects, beginNodeTempList, tempList);
    }


}
