package cn.getech.data.development.utils.oozie;

import cn.getech.data.development.config.properties.*;
import cn.getech.data.development.constant.*;
import cn.getech.data.development.dto.JobNodeSQLDto;
import cn.getech.data.development.dto.SapConnectFieldToSparkDto;
import cn.getech.data.development.entity.*;
import cn.getech.data.development.utils.HdfsUtil;
import cn.getech.data.intelligence.common.exception.RRException;
import cn.hutool.core.collection.CollectionUtil;
import cn.hutool.core.map.MapUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import org.apache.commons.lang.StringUtils;

import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Field;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;


public class JobConfUtil {

    private static BdpJobConfig bdpJobConfig;


    public JobConfUtil(BdpJobConfig bdpJobConfig) {
        this.bdpJobConfig = bdpJobConfig;
    }

    public static String resolveCrontab(JobNodeConfig jobNodeConfig) {
        String crontab = null;
        //运行一次
        if (jobNodeConfig.getScheduleOrInterval() == ScheduleRepeatType.REPEAT.getCode()) {
            //循环运行
            //expression {"type":"month","day":1,"hour":12,"minute":10}
            JSONObject expression = JSONObject.parseObject(jobNodeConfig.getExpression());
            if (expression != null) {
                switch (expression.getString("type")) {
                    case "hour": {
                        checkNumber(expression.getString("minute"),59,0,"分钟(minute)");
                        crontab = String.format("%s * * * *", expression.getString("minute"));
                    }
                    break;
                    case "day": {
                        checkNumber(expression.getString("minute"),59,0,"分钟(minute)");
                        checkNumber(expression.getString("hour"),23,0,"小时(hour)");
                        crontab = String.format("%s %s * * *", expression.getString("minute")
                                , expression.getString("hour"));
                    }
                    break;
                    case "week": {
                        checkNumber(expression.getString("week"),7,1,"周(week)");
                        checkNumber(expression.getString("minute"),59,0,"分钟(minute)");
                        checkNumber(expression.getString("hour"),23,0,"小时(hour)");
                        crontab = String.format("%s %s * * %s", expression.getString("minute")
                                , expression.getString("hour"), expression.getString("week"));
                    }
                    break;
                    case "month": {
                        checkNumber(expression.getString("day"),31,1,"日(day)");
                        checkNumber(expression.getString("minute"),59,0,"分钟(minute)");
                        checkNumber(expression.getString("hour"),23,0,"小时(hour)");
                        crontab = String.format("%s %s %s * *", expression.getString("minute")
                                , expression.getString("hour"), expression.getString("day"));
                    }
                    break;
                    default:
                        break;
                }
            }
        }
        else if (jobNodeConfig.getScheduleOrInterval() == ScheduleRepeatType.INTERVAL.getCode()){
            JSONObject expression = JSONObject.parseObject(jobNodeConfig.getExpression());
            if (expression != null) {
                switch (expression.getString("type")) {
                    case "hour": {
                        checkNumber(expression.getString("minute"),59,6,"分钟(minute)");
                        crontab = String.format("*/%s * * * *", expression.getString("minute"));
                    }
                    break;
                    case "day": {
                        checkNumber(expression.getString("hour"),23,1,"小时(hour)");
                        crontab = String.format("* */%s * * *", expression.getString("hour"));
                    }
                    break;
                    default:
                        break;
                }
            }
        }
        return crontab;
    }

    public  static  void checkNumber(String number,Integer maxNum,Integer minNum,String typeName){
        if (StringUtils.isBlank(number)){
            throw new RRException("请输入"+typeName+"的值");
        }
        Integer num = Integer.parseInt(number);
        if (num>maxNum||num<minNum){
            throw new RRException(typeName+"的值请在["+minNum+"~"+maxNum+"]之间");
        }
    }

    public static String resolveCrontab(String expressionValue) {
        String crontab = null;
        if(StringUtils.isNotEmpty(expressionValue)){
            //循环运行
            //expression {"type":"month","day":1,"hour":12,"minute":10}
            JSONObject expression = JSONObject.parseObject(expressionValue);
            if (expression != null) {
                switch (expression.getString("type")) {
                    case "hour": {
                        crontab = String.format("0 %s * * * ?", expression.getString("minute"));
                    }
                    break;
                    case "day": {
                        crontab = String.format("0 %s %s * * ?", expression.getString("minute")
                                , expression.getString("hour"));
                    }
                    break;
                    case "week": {
                        crontab = String.format("0 %s %s ? * %s", expression.getString("minute")
                                , expression.getString("hour"), getWeekData(expression.getString("week")));
                    }
                    break;
                    case "month": {
                        crontab = String.format("0 %s %s %s * ?", expression.getString("minute")
                                , expression.getString("hour"), expression.getString("day"));
                    }
                    break;
                    default:
                        break;
                }
            }
        }
        return crontab;
    }

    private static String getWeekData(String week){
        if(StringUtils.isEmpty(week)){
            return "MON";
        }
        int weekD = 1;
        try {
            weekD = Integer.valueOf(week);
        }catch (Exception e){

        }
        if(1 == weekD){
            return "MON";
        }
        if(2 == weekD){
            return "TUE";
        }
        if(3 == weekD){
            return "WED";
        }
        if(4 == weekD){
            return "THU";
        }
        if(5 == weekD){
            return "FRI";
        }
        if(6 == weekD){
            return "SAT";
        }
        if(7 == weekD){
            return "SUN";
        }
        return "MON";
    }

    public String wrapShell(String shell ) {
//        String begin = "source /etc/profile\n" +
//                "param=${time_hour}\n" +
//                "stat_date=`date +%Y-%m-%d`\n" +
//                "if [ -z '$param' ]; then\n" +
//                "param=`date '+%Y-%m-%d %H:%M:%S'`\n" +
//                "fi\n" +
//                "curl -d \"jobName=${JOB_NAME}&number=${BUILD_NUMBER}&stat_date=${stat_date}&params=${param}\" \"" + jenkinsConfig.getRest_url() + "/job_begin\" &\n" +
//                "function run(){\n";
//        String end = "\n}\n" +
//                "run && (curl -d \"jobName=${JOB_NAME}&number=${BUILD_NUMBER}&stat_date=${stat_date}\" \"" + jenkinsConfig.getRest_url() + "/job_end\" &) " +
//                "|| (curl -d \"jobName=${JOB_NAME}&number=${BUILD_NUMBER}&stat_date=${stat_date}\" \"" + jenkinsConfig.getRest_url() + "/job_end\" & exit 1)";


        //shell 的错误的判断问题，是单引号和双引号的问题 if [ -z '$param' ]; then
        String begin = "source /etc/profile\n" +
                "param=${time_hour}\n" +
                "stat_date=`date +%Y-%m-%d`\n" +
                "if [ -z \"$param\" ]; then\n" +
                "param=`date '+%Y-%m-%d %H:%M:%S'`\n" +
                "fi\n" +
//                "curl -H \"Content-Type:application/json;charset=UTF-8\" -d \"{\"\'\"jobName\"\'\":\\\"${JOB_NAME}\\\",\"\'\"number\"\'\":\\\"${BUILD_NUMBER}\\\",\"\'\"stat_date\"\'\":\\\"${stat_date}\\\",\"\'\"params\"\'\":\\\"${param}\\\" }\"  \"" + oozieConfig.getRest_url() + "/job_begin\" \n" +
//                "function run(){\n" +
                "echo 'begin '\n";

        String end = "\necho 'end '" ;
//                + "\n}\n run ";

        //bug【job执行过程中,结束接口不回调】:解决方案添加一个end_job的触发job。之前的结束回调方式，暂时不使用（因为在执行job的时候，可能已经报错，就不在执行结束的回调了）
//        String end = "\necho 'end run'" +
//                "\n}\n" +
//                "run && (curl -H \"Content-Type:application/json;charset=UTF-8\" -d \"{\"\'\"jobName\"\'\":\\\"${JOB_NAME}\\\",\"\'\"number\"\'\":\\\"${BUILD_NUMBER}\\\",\"\'\"stat_date\"\'\":\\\"${stat_date}\\\" }\" \"" + jenkinsConfig.getRest_url() + "/job_end\" &) " +
//                "|| (curl -H \"Content-Type:application/json;charset=UTF-8\" -d \"{\"\'\"jobName\"\'\":\\\"${JOB_NAME}\\\",\"\'\"number\"\'\":\\\"${BUILD_NUMBER}\\\",\"\'\"stat_date\"\'\":\\\"${stat_date}\\\" }\" \"" + jenkinsConfig.getRest_url() + "/job_end\" & exit 1)";
//
        return begin + shell + end;
    }


    /***
     * 传入原始内容提取出需要替换的变量和提交脚本
     * @param contents
     * @return
     */
    public Map<String, String> extraParams(String... contents) {
        //修改对应的scala -cp 命令不存在的问题
        String cmdLine = "res=$(/opt/scala-2.11.12/bin/scala -cp data-development-job-1.0.jar:joda-time-2.8.1.jar  cn.getech.data.development.job.util.ParseDateTime '%s' \"$param\" ) || exit 10\n" +
                "%s=`echo $res |awk '{print $1}'`\n";
        String reg = "(\\$\\{y{0,4}[:-]*q{0,2}[:-]*m{0,2}[:-]*w{0,2}[:-]*d{0,2}[: -]*h{0,2}[:-]*(mi)?(-(\\d)+?)*?\\})";
        Pattern p = Pattern.compile(reg);
        Map<String, String> re = new HashMap<>();
        for (String content : contents
        ) {
            if (content != null) {
                Matcher m = p.matcher(content);
                while (m.find()) {
                    re.put(m.group(1), String.format(cmdLine, m.group(1), m.group(1).replace("-", "_")
                            .replace("${", "").replace("}", "")));
                }
            }
        }
        return re;
    }

    public String gentProcExeShell(JobNodeConfig jobNodeConfig, OozieConfig oozieConfig, BdpJobConfig bdpJobConfig, Set<String> params) {
        String shell = String.format("spark-submit --class %s \\\n" +
                        "--master yarn \\\n" +
                        "--deploy-mode cluster \\\n" +
                        "--driver-memory %s \\\n" +
                        "--driver-cores %s \\\n" +
                        "--executor-memory %s \\\n" +
                        "--executor-cores %s \\\n" +
                        "--queue default \\\n" +
                        "hdfs://%s/%s/%s", jobNodeConfig.getBase_proc_main_class()
                , jobNodeConfig.getResource_dm()
                , jobNodeConfig.getResource_dc()
                , jobNodeConfig.getResource_em()
                , jobNodeConfig.getResource_ec()
                , bdpJobConfig.getJoblib(), jobNodeConfig.getJobNodeId()
                , jobNodeConfig.getBase_proc_main_file());
        if (StringUtils.isNotEmpty(jobNodeConfig.getBase_proc_main_in())) {
            shell = shell + " " + replace_date_to_normal(jobNodeConfig.getBase_proc_main_in(), params);
        }
        if (StringUtils.isNotEmpty(jobNodeConfig.getBase_proc_main_out())) {
            shell = shell + " " + replace_date_to_normal(jobNodeConfig.getBase_proc_main_out(), params);
        }
        if (StringUtils.isNotEmpty(jobNodeConfig.getBase_proc_main_args())) {
            shell = shell + " " + replace_date_to_normal(jobNodeConfig.getBase_proc_main_args(), params);
        }

        return wrapShell(shell);


    }

    public String genSQLShell(StringBuilder paramCmd, JobNodeConfig jobNodeConfig, BdpJobConfig bdpJobConfig, DataDevelopmentConfig dataDevelopmentConfig, Long userId, HiveConfig hiveConfig, Set<Integer> dropHiveTables, OozieConfig oozieConfig, String queueName, ImplaConfig implaConfig) {

        String shell;
        List<String> cmdList = new ArrayList<>();
        // 构造命令
//        cmdList.add("/bin/spark-submit");
//        cmdList.add("--class cn.getech.data.development.job.SqlExecutionJob");
////        cmdList.add("--jars /usr/share/java/mysql-connector-java.jar,/opt/cloudera/parcels/CDH/jars/libthrift-0.9.3.jar");
//        cmdList.add(String.format("hdfs://%s/data-development-job-1.0.jar", bdpJobConfig.getJoblib()));
//        cmdList.add(jobNodeConfig.getJobNodeId().toString());
//        cmdList.add("\"${param}\"");
//
//        shell = String.join(" ", cmdList.toArray(new String[cmdList.size()]));

        //增加解析sql语句中的动态变量的前置脚本
        Map<String, String> paramsMap = extraParams(jobNodeConfig.getSql_statment());
        for (String p : paramsMap.values()
        ) {
            paramCmd.append(p);
        }
        StringBuilder hiveconfs=new StringBuilder("");
        for (String p : paramsMap.keySet()
        ) {
            p = p.replace("${", "").replace("}", "");
            String oldeP = p.replace("-","_");
            hiveconfs.append(String.format(" --hivevar %s=${%s}",p,oldeP));
        }
        String beelineCmd = "";
       // jobNodeConfig.setQuery_engine(1);
        //hive 查询引擎
        if(null == jobNodeConfig.getQuery_engine()  || Objects.equals(jobNodeConfig.getQuery_engine(),1)){
           beelineCmd = String.format("/bin/beeline -u %s -n %s -p %s"
                    ,hiveConfig.getPressionUrl().substring(0,hiveConfig.getPressionUrl().length() - 1) + "?mapreduce.job.queuename=" + queueName
                    ,dataDevelopmentConfig.getUserPreffix() + userId
                    ,dataDevelopmentConfig.getUserPassword());
        }else{
            //impla 查询引擎  jobNodeConfig.getQuery_engine() == 2
            //权限问题修复
            beelineCmd = String.format("export PYTHON_EGG_CACHE=/tmp\n/usr/bin/impala-shell -u %s -i %s",
                    dataDevelopmentConfig.getUserPreffix() + userId,
                    implaConfig.getPressionUrl());
            //impala 不支持--hivevar 【直接重置】
            hiveconfs = new StringBuilder("");
        }
//        String beelineCmd = String.format("/bin/beeline -u %s -n %s -p %s"
//                ,hiveConfig.getPressionUrl().substring(0,hiveConfig.getPressionUrl().length() - 1) + "?mapreduce.job.queuename=" + queueName
//                ,dataDevelopmentConfig.getUserPreffix() + userId
//                ,dataDevelopmentConfig.getUserPassword());

        StringBuilder varStr = new StringBuilder();
        StringBuilder hiveVars = new StringBuilder();
        if(CollectionUtil.isNotEmpty(jobNodeConfig.getSysParam())){
            varStr.append("\nparam_str=''${param}''");
            initVarStrs(varStr,hiveVars,jobNodeConfig.getSysParam());
        }

//        shell =  String.format(
//                "%s" +
//                        "\n%s" +
//                        "\nrm -rf ./%s ||exit 10" +
//                        "\nmkdir ./%s ||exit 10" +
//                        "\ncd ./%s ||exit 10" +
//                        "\nrm -f script_%s.hql ||exit 10" +
//                        "\nhdfs dfs -get  %s/%s/jobid_%s.hql  script_%s.hql ||exit 10 " +
//                        "\nsed -i ':a;N;$!ba;s/\\n/%s/g'  script_%s.hql ||exit 10 " +
//                        "\ndataline=$(cat ./script_%s.hql | tr \"\\\"\" \"\'\")" +
//                        "\nnewSql=`curl -X POST %s/runSysparams --connect-timeout 20 -H \"Content-Type:application/json;charset=UTF-8\"  -d \"{\\\"sql\\\":\\\"${dataline}\\\",\\\"jobNodeId\\\":\\\"%s\\\"}\"`" +
//                        "\necho \"${newSql}\"" +
//                        "\nstrError=\"\\\"code\\\":500\"" +
//                        "\nrestttt=$(echo $newSql | grep \"${strError}\")" +
//                        "\nif [[ \"$restttt\" != \"\" ]];then" +
//                        "\n\techo 'repleace sysparam error'" +
//                        "\nelse" +
//                        "\n\trm -f script_%s.hql ||exit 10" +
//                        "\n\techo \"${newSql}\" > script_%s.hql ||exit 10" +
//                        "\nfi" +
//                        "\nsed -i 's/%s/\\n/g' script_%s.hql ||exit 10" +
////                         "\necho \"\"\"%s\"\"\" > script_%s.hql || exit 10" +
//                        "\n%s -f script_%s.hql %s %s ||exit 10"
//                , paramCmd.toString()
//                , varStr.toString()
//                , jobNodeConfig.getJobNodeId()
//                , jobNodeConfig.getJobNodeId()
//                , jobNodeConfig.getJobNodeId()
//                , jobNodeConfig.getJobNodeId()
//                , bdpJobConfig.getJobconfig()
//                , JobType.SQL.getCode()
//                , jobNodeConfig.getJobNodeId()
//                , jobNodeConfig.getJobNodeId()
//
//                , dataDevelopmentConfig.getRandData()
//                , jobNodeConfig.getJobNodeId()
//
//                , jobNodeConfig.getJobNodeId()
//                , oozieConfig.getRest_url()
//                , jobNodeConfig.getJobNodeId()
//                , jobNodeConfig.getJobNodeId()
//                , jobNodeConfig.getJobNodeId()
//
//                , dataDevelopmentConfig.getRandData()
//                , jobNodeConfig.getJobNodeId()
////                ,replace_date_to_normal(jobNodeConfig.getSql_statment(), paramsMap.keySet())
////                , jobNodeConfig.getJobNodeId()
//                , beelineCmd
//                , jobNodeConfig.getJobNodeId()
//                , hiveconfs.toString()
//                , hiveVars.toString());


        //文件请求
        shell =  String.format(
                "%s" +
                        "\n%s" +
                        "\nrm -rf ./%s ||exit 10" +
                        "\nmkdir ./%s ||exit 10" +
                        "\ncd ./%s ||exit 10" +
                        "\nrm -f script_%s.hql ||exit 10" +
                        "\nhdfs dfs -get  %s/%s/jobid_%s.hql  script_%s.hql ||exit 10 " +
//                        "\nsed -i ':a;N;$!ba;s/\\n/%s/g'  script_%s.hql ||exit 10 " +
//                        "\ndataline=$(cat ./script_%s.hql | tr \"\\\"\" \"\'\")" +
                        "\nnewSql=`curl  --connect-timeout %s --max-time %s -XPOST -F \"file=@script_%s.hql\" -F \"jobNodeId=%s\" %s/runFileSysparams`"+
                        "\necho \"${newSql}\"" +
                        "\nstrError=\"\\\"code\\\":500\"" +
                        "\nrestttt=$(echo $newSql | grep \"${strError}\")" +
                        "\nif [[ \"$restttt\" != \"\" ]];then" +
                        "\n\techo 'repleace sysparam error'" +
                        "\nelse" +
                        "\n\trm -f script_%s.hql ||exit 10" +
                        "\n\techo \"${newSql}\" > script_%s.hql ||exit 10" +
                        "\nfi" +
//                         "\necho \"\"\"%s\"\"\" > script_%s.hql || exit 10" +
                        "\n%s -f script_%s.hql %s %s ||exit 10"
                , paramCmd.toString()
                , varStr.toString()
                , jobNodeConfig.getJobNodeId()
                , jobNodeConfig.getJobNodeId()
                , jobNodeConfig.getJobNodeId()
                , jobNodeConfig.getJobNodeId()
                , bdpJobConfig.getJobconfig()
                , JobType.SQL.getCode()
                , jobNodeConfig.getJobNodeId()
                , jobNodeConfig.getJobNodeId()
                , dataDevelopmentConfig.getCurlconnectTime()
                , dataDevelopmentConfig.getCurlMaxTime()

                , jobNodeConfig.getJobNodeId()
                , jobNodeConfig.getJobNodeId()
                , oozieConfig.getRest_url()

                , jobNodeConfig.getJobNodeId()
                , jobNodeConfig.getJobNodeId()

//                ,replace_date_to_normal(jobNodeConfig.getSql_statment(), paramsMap.keySet())
//                , jobNodeConfig.getJobNodeId()
                , beelineCmd
                , jobNodeConfig.getJobNodeId()
                , hiveconfs.toString()
                , hiveVars.toString());


        //上传sql配置文件到hdfs上
        try {
//            JobNodeSQLDto jobNodeSQLDto = new JobNodeSQLDto();
//            jobNodeSQLDto.setSqlStatment(jobNodeConfig.getSql_statment());
//            jobNodeSQLDto.setUserName(dataDevelopmentConfig.getUserPreffix() + userId);
//            jobNodeSQLDto.setUserPassword(dataDevelopmentConfig.getUserPassword());
//            jobNodeSQLDto.setHiveserverurl(dataDevelopmentConfig.getHiveserverurl());
//            upLoadSQLJobConf(jobNodeSQLDto, bdpJobConfig, jobNodeConfig.getJobNodeId());
//            upLoadSQL(bdpJobConfig, jobNodeConfig.getJobNodeId(),replace_date_to_normal_forsql(jobNodeConfig.getSql_statment(), paramsMap.keySet()));
            upLoadSQL(bdpJobConfig, jobNodeConfig.getJobNodeId(),jobNodeConfig.getSql_statment());
        } catch (Exception e) {
            throw new RRException(DataDevelopmentBizExceptionEnum.UPLOAD_JOB_CONF_ERROR.getMessage());
        }

        //拼加对应的前置验证drop表和后置drop表操作
        if(CollectionUtil.isNotEmpty(dropHiveTables)){
            shell = upAndDownShell(shell,jobNodeConfig.getJobNodeId(),dropHiveTables,oozieConfig);
        }

        return wrapShell(shell);
    }

    private void initVarStrs(StringBuilder varStr, StringBuilder hiveVars, List<Map<String, Object>> sysParam) {
        if(CollectionUtil.isNotEmpty(sysParam)){
            for(int i = 0; i< sysParam.size(); i++){
                Map<String, Object> map = sysParam.get(i);
                if(MapUtil.isNotEmpty(map)){
                    Set<String> strings = map.keySet();
                    if(CollectionUtil.isNotEmpty(strings)){
                        for (String s : strings) {
                            Object va = map.get(s);
                            if(isNumber(va)){
                                varStr.append("\n").append("res_").append(s).append("=").append(va);
                            }else{
                                String vas = va.toString();
                                if(vas.startsWith("${")){
                                    varStr.append("\n").append(s).append("='").append(va).append("'");
                                    varStr.append("\n").append("res_").append(s).append("=`/opt/scala-2.11.12/bin/scala -cp data-development-job-1.0.jar:joda-time-2.8.1.jar  cn.getech.data.development.job.util.ParseDateTime ${").append(s).append("} \"${param_str}\" ` || exit 10");
                                }else{
                                    varStr.append("\n").append("res_").append(s).append("=\"'").append(va).append("'\"");
                                }
                            }
                            varStr.append("\necho ").append("\"res_").append(s).append(":${res_").append(s).append("}\"");
                            hiveVars.append(" --hivevar ").append(s).append("=").append("${res_").append(s).append("}");
                        }
                    }
                }
            }
        }
    }


    boolean isNumber (Object obj) {
        if (obj instanceof Number) {
            return true;
        } else if (obj instanceof String){
            try{
                Double.parseDouble((String)obj);
                return true;
            }catch (Exception e) {
                return false;
            }
        }
        return false;
    }

    private String upAndDownShell(String shell,Integer jobNodeId,Set<Integer> dropHiveTables,OozieConfig oozieConfig) {
        String ids = "";
        for (Integer dropHiveTable : dropHiveTables) {
            ids = ids + dropHiveTable + ",";
        }
        String begin = "curl --connect-timeout 10 -X GET "+oozieConfig.getRest_url()+"/cheackIsDrop/"+jobNodeId+" || exit 10\n";
        String end = "\ncurl --connect-timeout 10 -X POST "+oozieConfig.getRest_url()+"/dropTables/"+ids.substring(0,ids.length() -1)+" || exit 10" ;
        return begin + shell + end;
    }

//    public String genConfigFile(JobNodeConfig jobNodeConfig, ConfConnect conf, ConnectTypeEnum confConnectType, Set<String> params) {
//        StringBuilder config = new StringBuilder();
//        // sql查询, jobNodeConfig.getInput_input_type().equals("sql")
//        String tabAndLoc = genTableLocation(jobNodeConfig);
//        if (StringUtils.isEmpty(jobNodeConfig.getHight_thread())) {
//            jobNodeConfig.setHight_thread("1");
//        }
//        if (StringUtils.isEmpty(jobNodeConfig.getHight_file_num())) {
//            jobNodeConfig.setHight_file_num("1");
//        }
//        config.append(String.format("exec: {max_threads: %s, min_output_tasks: %s}\n", jobNodeConfig.getHight_thread(), jobNodeConfig.getHight_file_num()));
//
//
//        //关系型数据的实现
//        if (confConnectType.getType().getCode() == ConTypeType.JDBC.getCode()) {
//            config.append(String.format("in:\n" +
//                            "  type: %s\n" +
//                            "  host: %s\n" +
//                            "  port: %s\n" +
//                            "  user: %s\n" +
//                            "  password: %s\n" +
//                            "  database: %s\n" +
//                            "  query: %s\n" +
//                            "  use_raw_query_with_incremental: false\n" +
//                            "  options: {useLegacyDatetimeCode: false, serverTimezone: CST}\n",
//                    confConnectType.getName(),
//                    conf.getHost(),
//                    conf.getPort(),
//                    conf.getUsername(),
//                    conf.getPassword(),
//                    conf.getDbname(),
//                    replace_date_to_normal(jobNodeConfig.getInput_input_content(), params)));
//        } else {
//            config.append(String.format("in:\n" +
//                            "  type: ftp\n" +
//                            "  host: %s\n" +
//                            "  port: %s\n" +
//                            "  user: %s\n" +
//                            "  password: \"%s\"\n" +
//                            "  path_prefix: %s\n" +
//                            "\n" +
//                            "  ssl: false\n" +
//                            "  ssl_verify: false\n",
//                    conf.getHost(),
//                    conf.getPort(),
//                    conf.getUsername(),
//                    conf.getPassword(),
//                    replace_date_to_normal(jobNodeConfig.getFtpFileLocation(), params)));
//        }
//
//
//        if (DataFormat.parquet.getName().equals(conf.getFormat())) {
//            config.append(String.format("out:\n" +
//                    "  type: parquet\n" +
//                    "  config_files: [/etc/hadoop/conf/core-site.xml, /etc/hadoop/conf/hdfs-site.xml]\n" +
//                    "  config: {fs.defaultFS: 'hdfs://%s', fs.hdfs.impl: org.apache.hadoop.hdfs.DistributedFileSystem,\n" +
//                    "    fs.file.impl: org.apache.hadoop.fs.LocalFileSystem}\n" +
//                    "  path_prefix: /user/hive/warehouse/%s/\n" +
//                    "  file_ext: text${BUILD_NUMBER}\n" +
//                    "  mode: overwrite\n", bdpJobConfig.getNamespace(), replace_date_to_normal(tabAndLoc, params)));
//        } else {
//            config.append(String.format("out:\n" +
//                    "  type: hdfs\n" +
//                    "  config_files: [/etc/hadoop/conf/core-site.xml, /etc/hadoop/conf/hdfs-site.xml]\n" +
//                    "  config: {fs.defaultFS: 'hdfs://%s', fs.hdfs.impl: org.apache.hadoop.hdfs.DistributedFileSystem,\n" +
//                    "    fs.file.impl: org.apache.hadoop.fs.LocalFileSystem}\n" +
//                    "  path_prefix: /user/hive/warehouse/%s/\n" +
//                    "  file_ext: text${BUILD_NUMBER}\n" +
//                    "  mode: overwrite\n", bdpJobConfig.getNamespace(), replace_date_to_normal(tabAndLoc, params)));
//        }
//
//
//        if (DataFormat.text.getName().equals(conf.getFormat())) {
//            config.append(String.format(
//                    "  formatter:\n" +
//                            "    type: csv\n" +
//                            "    encoding: UTF-8\n" +
//                            "    header_line: false\n" +
//                            "    delimiter: '%s'\n", DelimitFormat.ObjOf(conf.getDelimitId()).getExpress()));
//        } else if (DataFormat.json.getName().equals(conf.getFormat())) {
//            config.append(String.format(
//                    "  formatter: {type: jsonl, encoding: UTF-8}"));
//        }
//        return config.toString();
//    }


    public String replace_date_to_normal(String content, Set<String> params) {

        for (String param : params
        ) {
            content = content.replace(param, param.replace("-", "_"));
        }
        return content;
    }

    public String replace_date_to_normal_forsql(String content, Set<String> params) {

        for (String param : params
        ) {
            content = content.replace(param, param.replace("-", "_").replace("${","${hivevar:"));
        }
        return content;
    }

    public String genTableLocation(JobNodeConfig jobNodeConfig) {
        String partition = null;
        String tabName = "default".equals(jobNodeConfig.getOutput_db_name()) ? jobNodeConfig.getOutput_table_name() : jobNodeConfig.getOutput_db_name() + ".db/" + jobNodeConfig.getOutput_table_name();

        // partition
        if (StringUtils.isNotEmpty(jobNodeConfig.getOutput_data_partition())) {
            ArrayList<String> arrayList = new ArrayList<String>();

            for (String item : StringUtils.split(jobNodeConfig.getOutput_data_partition(), ",")) {
                StringUtils.trimToNull(item);
                arrayList.add(item);
            }
            partition = StringUtils.join(arrayList.toArray(), "/");
        }
        return (partition == null) ? tabName : tabName + "/" + partition;
    }


    public String genOutPutShell(JobNodeConfig jobNodeConfig, ConfConnect conf, ConnectTypeEnum confConnectType, BdpJobConfig bdpJobConfig, Long userId, DataDevelopmentConfig dataDevelopmentConfig, StringBuilder paramCmd, Set<Integer> dropHiveTables, OozieConfig oozieConfig, String queueName, Integer jobNodeId) {
        String shell;

        List<String> cmdList = new ArrayList<>();
        // 构造命令
        Map<String, String> paramsMap = extraParams(jobNodeConfig.getInput_file_position(), jobNodeConfig.getInput_input_content(), jobNodeConfig.getOutput_data_partition(), jobNodeConfig.getFtpFileLocation());
        for (String p : paramsMap.values()
        ) {
            paramCmd.append(p);
        }
        cmdList.add(String.format("%s \n", paramCmd.toString()));
        cmdList.add("/bin/spark-submit  --master yarn --deploy-mode cluster --queue " + queueName);
        cmdList.add("--class cn.getech.data.development.job.ExportHiveToRDB");
//        cmdList.add(String.format("--num-executors %s",jobNodeConfig.getHight_thread()));
        cmdList.add(String.format("--driver-memory %sG",jobNodeConfig.getResource_dm()));
        cmdList.add(String.format("--driver-cores %s",jobNodeConfig.getResource_dc()));
        cmdList.add(String.format("--executor-cores %s",jobNodeConfig.getResource_ec()));
        cmdList.add(String.format("--executor-memory %sG",jobNodeConfig.getResource_em()));
//        cmdList.add("--jars /usr/share/java/mysql-connector-java.jar,/usr/share/java/sqljdbc4-chs-4.0.2206.100.jar,/usr/share/java/ojdbc6-11.2.0.3.jar,/opt/cloudera/parcels/CDH/jars/libthrift-0.9.3.jar");
        String jdbcVersion = conf.getJdbcVersion();
        if(StringUtils.isNotEmpty(jdbcVersion) && "5.1".equalsIgnoreCase(jdbcVersion)){
            cmdList.add(String.format("hdfs://%s/data-development-job-1.0-mysql5.0.jar", bdpJobConfig.getJoblib()));
        }else{
            cmdList.add(String.format("hdfs://%s/data-development-job-1.0.jar", bdpJobConfig.getJoblib()));
        }
        cmdList.add(jobNodeConfig.getJobNodeId().toString());
        cmdList.add("\"${param}\" ||exit 10");

        shell = String.join(" ", cmdList.toArray(new String[cmdList.size()]));


        //上传job配置文件到hdfs上
        try {
            jobNodeConfig.setRangerUserName(dataDevelopmentConfig.getUserPreffix() + userId);
            jobNodeConfig.setRangerUserPassword(dataDevelopmentConfig.getUserPassword());
            jobNodeConfig.setSql_statment(jobNodeConfig.getOutput_pre_statment());
            jobNodeConfig.setJobNodeId(jobNodeId);
            upLoadJobConf(jobNodeConfig, conf, confConnectType, bdpJobConfig,queueName);
        } catch (Exception e) {
            throw new RRException(DataDevelopmentBizExceptionEnum.UPLOAD_JOB_CONF_ERROR.getMessage());
        }

        //拼加对应的前置验证drop表和后置drop表操作
        if(CollectionUtil.isNotEmpty(dropHiveTables)){
            shell = upAndDownShell(shell,jobNodeConfig.getJobNodeId(),dropHiveTables,oozieConfig);
        }

        return wrapShell(shell);
    }

    public String genAlgorithmShell(JobNodeConfig jobNodeConfig,StringBuilder paramCmd, String queueName) {
        String shell;

        List<String> cmdList = new ArrayList<>();
        // 构造命令
        Map<String, String> paramsMap = extraParams(jobNodeConfig.getInput_file_position(), jobNodeConfig.getInput_input_content(), jobNodeConfig.getOutput_data_partition(), jobNodeConfig.getFtpFileLocation());
        for (String p : paramsMap.values()
        ) {
            paramCmd.append(p);
        }

        cmdList.add(String.format("%s \n", paramCmd.toString()));
        cmdList.add("/bin/spark-submit  --master yarn --deploy-mode cluster --queue " + queueName);
        cmdList.add("--class cn.getech.data.development.ml.task.AlgorithmAppJob");
        cmdList.add(String.format("--driver-memory %sG","1"));
        cmdList.add(String.format("--driver-cores %s","1"));
        cmdList.add(String.format("--executor-cores %s","2"));
        cmdList.add(String.format("--executor-memory %sG","4"));
        cmdList.add(String.format("hdfs://%s/data-development-ml-job-1.0.jar", bdpJobConfig.getJoblib()));
        cmdList.add(jobNodeConfig.getJobNodeId().toString());
        cmdList.add(jobNodeConfig.getModelId().toString());
        cmdList.add("\"${param}\" ||exit 10");

        shell = String.join(" ", cmdList.toArray(new String[cmdList.size()]));
        return wrapShell(shell);
    }


    public Map<String, Object> generateOutPutConfMap(JobNodeConfig jobNodeConfig,String output_write_format, DataDevelopmentConfig dataDevelopmentConfig, String restTableIds,Long userId){
        Map<String, Object> connectFiledToSpark = new HashMap<>();
        connectFiledToSpark.put("output_write_model", jobNodeConfig.getOutput_write_model());
        connectFiledToSpark.put("output_data_partition", jobNodeConfig.getOutput_data_partition());
        connectFiledToSpark.put("output_write_format", output_write_format);
        connectFiledToSpark.put("output_db_name", jobNodeConfig.getOutput_db_name());
        connectFiledToSpark.put("output_table_name", jobNodeConfig.getOutput_table_name());
        //选择数据引擎是kudu的时候默认impala存储需要将表名拼接成"impala::yhc.test_meiqin"
        if(jobNodeConfig.getOutput_storage_engine_type() != null && jobNodeConfig.getOutput_storage_engine_type() == 2){
            connectFiledToSpark.put("output_table_name","impala::"+jobNodeConfig.getOutput_db_name()+"."+jobNodeConfig.getOutput_table_name());
            //如果是kudu，则清空分区数据
            connectFiledToSpark.put("output_data_partition", null);
        }
        connectFiledToSpark.put("hight_file_num", jobNodeConfig.getHight_file_num());
        connectFiledToSpark.put("rangerUserName",dataDevelopmentConfig.getUserPreffix() + userId);
        connectFiledToSpark.put("allTableIds",restTableIds);
        return connectFiledToSpark;
    }

    public String getFTPInPutShell(JobNodeConfig jobNodeConfig, ConfConnect conf, BdpJobConfig bdpJobConfig, String output_write_format, StringBuilder paramCmd, String restTableIds, DataDevelopmentConfig dataDevelopmentConfig, String queueName,Long userId){
        String shell;
        List<String> cmdList = new ArrayList<>();
        Map<String, Object> ftpConnectFiledToSpark = new HashMap<>();
        ftpConnectFiledToSpark.put("ftp_port", conf.getPort());
        ftpConnectFiledToSpark.put("ftp_user", conf.getUsername());
        ftpConnectFiledToSpark.put("ftp_passwd", conf.getPassword());
        ftpConnectFiledToSpark.put("ftp_file_path", jobNodeConfig.getFtpFileLocation());
        ftpConnectFiledToSpark.put("ftpFileType", jobNodeConfig.getFtpFileType());
        ftpConnectFiledToSpark.put("ftpFileMatchType", jobNodeConfig.getFtpFileMatchType());
        ftpConnectFiledToSpark.put("ftpFileName", jobNodeConfig.getFtpFileName());
        ftpConnectFiledToSpark.put("ftp_host", conf.getHost());
        ftpConnectFiledToSpark.put("output_storage_engine_type", jobNodeConfig.getOutput_storage_engine_type());
        ftpConnectFiledToSpark.putAll(generateOutPutConfMap(jobNodeConfig,output_write_format,dataDevelopmentConfig,restTableIds,userId));
        if (null!=jobNodeConfig.getNonStandardFormat()&&jobNodeConfig.getNonStandardFormat().equals(0)){
            ftpConnectFiledToSpark.put("no_standard_format", true);
        }
//        ftpConnectFiledToSpark.put("tableFieldDtos",JSONObject.toJSONString(tableFieldDtos));

        //上传sap相关配置文件到hdfs上
        String ftpJsonName = uploadConf(jobNodeConfig,ftpConnectFiledToSpark);


        // 构造命令
        generateCommend(jobNodeConfig,paramCmd);

        String className = "cn.getech.data.development.job.FtpData2Hive --master yarn --deploy-mode cluster --queue " + queueName;
        shell =generateShell(cmdList,paramCmd,jobNodeConfig,ftpJsonName,className);

        return wrapShell(shell);
    }



    public String getSFTPInPutShell(JobNodeConfig jobNodeConfig, ConfConnect conf, BdpJobConfig bdpJobConfig, String output_write_format, StringBuilder paramCmd, String restTableIds, DataDevelopmentConfig dataDevelopmentConfig, String queueName,Long userId){
        String shell;
        List<String> cmdList = new ArrayList<>();
        Map<String, Object> sftpConnectFiledToSpark = new HashMap<>();
        sftpConnectFiledToSpark.put("sftp_port", conf.getPort());
        sftpConnectFiledToSpark.put("sftp_user", conf.getUsername());
        sftpConnectFiledToSpark.put("sftp_passwd", conf.getPassword());
        sftpConnectFiledToSpark.put("sftp_file_path", jobNodeConfig.getFtpFileLocation());
        sftpConnectFiledToSpark.put("sftpFileType", jobNodeConfig.getFtpFileType());
        sftpConnectFiledToSpark.put("sftpFileMatchType", jobNodeConfig.getFtpFileMatchType());
        sftpConnectFiledToSpark.put("sftpFileName", jobNodeConfig.getFtpFileName());
        sftpConnectFiledToSpark.put("sftp_host", conf.getHost());
        sftpConnectFiledToSpark.put("output_storage_engine_type", jobNodeConfig.getOutput_storage_engine_type());
        sftpConnectFiledToSpark.putAll(generateOutPutConfMap(jobNodeConfig,output_write_format,dataDevelopmentConfig,restTableIds,userId));
        if (null!=jobNodeConfig.getNonStandardFormat()&&jobNodeConfig.getNonStandardFormat().equals(0)){
            sftpConnectFiledToSpark.put("no_standard_format", true);
        }
//        sftpConnectFiledToSpark.put("tableFieldDtos",JSONObject.toJSONString(tableFieldDtos));

        //上传sftp相关配置文件到hdfs上
        String sftpJsonName = uploadConf(jobNodeConfig,sftpConnectFiledToSpark);


        // 构造命令
        generateCommend(jobNodeConfig,paramCmd);

        String className = "cn.getech.data.development.job.SftpData2Hive --master yarn --deploy-mode cluster --queue " + queueName;
        shell =generateShell(cmdList,paramCmd,jobNodeConfig,sftpJsonName,className);

        return wrapShell(shell);
    }



    public String getJieKouInPutShell(JobNodeConfig jobNodeConfig, ConfConnect conf, BdpJobConfig bdpJobConfig, String output_write_format, StringBuilder paramCmd, String restTableIds, DataDevelopmentConfig dataDevelopmentConfig, String queueName,Long userId){
        String shell;
        List<String> cmdList = new ArrayList<>();
        SapConnectField sapConnectFieldInfo = jobNodeConfig.getSapConnectFieldInfo();
        List<JSONObject> inputParams = null;
        if(null != sapConnectFieldInfo){
            inputParams = sapConnectFieldInfo.getAinputParams();
        }
        Map<String, Object> apiConnectFiledToSpark = new HashMap<>();
        apiConnectFiledToSpark.put("interface_url", conf.getInterfaceUrl());
        apiConnectFiledToSpark.put("request_type", RequetTypeEnum.ObjOf(conf.getRequestType()).getName());
        apiConnectFiledToSpark.put("format", conf.getFormat());
        apiConnectFiledToSpark.put("code", CodeType.ObjOf(conf.getCode()).getName());
        apiConnectFiledToSpark.put("output_storage_engine_type",jobNodeConfig.getOutput_storage_engine_type());
        apiConnectFiledToSpark.put("path", conf.getFtpFileLocation());
        apiConnectFiledToSpark.put("start_page_count",jobNodeConfig.getStartPageCount());
//        apiConnectFiledToSpark.put("page_num",jobNodeConfig.getPageNum());
//        apiConnectFiledToSpark.put("page_size",jobNodeConfig.getPageSize());
//        apiConnectFiledToSpark.put("end_name",jobNodeConfig.getEndName());
        apiConnectFiledToSpark.put("enable_incresement",jobNodeConfig.getEnableIncresement());
        apiConnectFiledToSpark.putAll(generateOutPutConfMap(jobNodeConfig,output_write_format,dataDevelopmentConfig,restTableIds,userId));
        //设置接口请求中的参数;
        reSetInterFaceParam(inputParams,apiConnectFiledToSpark,jobNodeConfig);


        //上传相关配置文件到hdfs上
        String apiJsonName = uploadConf(jobNodeConfig,apiConnectFiledToSpark);

        // 构造命令
        Map<String, String> paramsMap = extraParams(jobNodeConfig.getInput_file_position(), jobNodeConfig.getInput_input_content(), jobNodeConfig.getOutput_data_partition(), jobNodeConfig.getFtpFileLocation());
        for (String p : paramsMap.values()
        ) {
            paramCmd.append(p);
        }

        String className = "cn.getech.data.development.job.InterfaceData2Hive --master yarn --deploy-mode cluster --queue " + queueName;
        shell = generateShell(cmdList,paramCmd,jobNodeConfig,apiJsonName,className);

        return wrapShell(shell);
    }
    //设置接口请求中的参数;
    public void reSetInterFaceParam(List<JSONObject> inputParams,Map<String, Object> apiConnectFiledToSpark,JobNodeConfig jobNodeConfig){
        //新增一个页面参数数组;
        if(null !=inputParams && !inputParams.isEmpty()){
            for(JSONObject jsonObject : inputParams){
                for(Object object :jsonObject.keySet()){
                    Object value = jsonObject.get(object);
                    if(value instanceof String){
                        String str = (String)value;
                        if(str.equals("{{pageNum}}")||str.equals("{{pageSize}}")||str.equals("{{endName}}")){
                            String field = str.replaceAll("\\{\\{","").replaceAll("\\}\\}","");
                            jsonObject.put("fieldType",jsonObject.getInteger("fieldType"));
                            jsonObject.put("fieldValue",getFieldValue(jobNodeConfig,field));
                            jsonObject.putIfAbsent("fildEntry",field);
                            break;
                        }
                    }
                }
            }
            apiConnectFiledToSpark.put("filed_infos",inputParams);
        }
    }
    //反射匹配{{}}符号的值;
    public Object getFieldValue(JobNodeConfig jobNodeConfig, String fieldName){
        //循环遍历OaInfoAssess实体中的属性与值
        Field[] fields = jobNodeConfig.getClass().getDeclaredFields();
        for(int i = 0 , len = fields.length; i < len; i++) {
            // 对于每个属性，获取属性名
            String varName = fields[i].getName();
            try {
                // 获取原来的访问控制权限
                boolean accessFlag = fields[i].isAccessible();
                // 修改访问控制权限
                fields[i].setAccessible(true);
                // 获取在对象f中属性fields[i]对应的对象中的变量
                Object o;
                try {
                    if(varName.equals(fieldName)){
                        o = fields[i].get(jobNodeConfig);
                        return o;
                    }
                } catch (IllegalAccessException e) {
                    throw new RRException(DataDevelopmentBizExceptionEnum.DATA_SAVE_ERROR.getMessage());
                }
                // 恢复访问控制权限
                fields[i].setAccessible(accessFlag);
            } catch (IllegalArgumentException ex) {
                ex.printStackTrace();
            }
        }
        return -1;
    }

    public String getWebServiceInPutShell(JobNodeConfig jobNodeConfig, ConfConnect conf, BdpJobConfig bdpJobConfig, StringBuilder paramCmd, String restTableIds, DataDevelopmentConfig dataDevelopmentConfig, String queueName,Long userId){
        String shell;
        List<String> cmdList = new ArrayList<>();
        Map<String, Object> apiConnectFiledToSpark = new HashMap<>();
        if(Objects.equals(conf.getRequestType(),3)){
            //raw 支持对应的参数不一样
            apiConnectFiledToSpark.put("request_type", "POST-raw");
            //请求格式 //1.application/xml 2.application/json
            String reqType = Objects.equals(jobNodeConfig.getReqType(),2) ? "application/json" : "application/xml";
            apiConnectFiledToSpark.put("reqType", reqType);
            //xml请求数据
            apiConnectFiledToSpark.put("webserviceText", jobNodeConfig.getWebserviceText());
            //返回格式 //xml 2.json
            String resType = Objects.equals(jobNodeConfig.getResType(),2) ? "json" : "xml";
            apiConnectFiledToSpark.put("resType", resType);
            //xml解析表名路径
            apiConnectFiledToSpark.put("anaTableNameUrl", jobNodeConfig.getAnaTableNameUrl());
        }else{
            if(Objects.equals(conf.getRequestType(),1)){
                apiConnectFiledToSpark.put("request_type", "GET");
            }
            if(Objects.equals(conf.getRequestType(),2)){
                apiConnectFiledToSpark.put("request_type", "POST");
            }
            apiConnectFiledToSpark.put("target_name_space", conf.getTargetNameSpace());
            apiConnectFiledToSpark.put("function_name", conf.getFunctionName());
            apiConnectFiledToSpark.put("username", conf.getUsername());
            apiConnectFiledToSpark.put("password", conf.getPassword());
            apiConnectFiledToSpark.put("format", conf.getFormat());
            if (null!=jobNodeConfig.getSapConnectFieldInfo()&&CollectionUtil.isNotEmpty(jobNodeConfig.getSapConnectFieldInfo().getAinputParams())){
                List<JSONObject> ainputParams = jobNodeConfig.getSapConnectFieldInfo().getAinputParams();
                List<ConnectFieldInfo> connectFieldInfos = new ArrayList<>();
                for (JSONObject a:ainputParams){
                    ConnectFieldInfo connectFieldInfo = new ConnectFieldInfo();
                    connectFieldInfo.setConnectId(jobNodeConfig.getInput_connect_id());
                    connectFieldInfo.setFieldAlias(a.getString("fieldAlias"));
                    connectFieldInfo.setFieldName(a.getString("fieldName"));
                    connectFieldInfo.setFieldValue(a.getString("fieldValue"));
                    connectFieldInfos.add(connectFieldInfo);
                }
                apiConnectFiledToSpark.put("filed_infos", connectFieldInfos);
            }
            apiConnectFiledToSpark.put("outputFirstTableName", conf.getOutputTableName());
        }
        apiConnectFiledToSpark.put("web_service_url", conf.getWebServiceUrl());
        apiConnectFiledToSpark.put("code", CodeType.ObjOf(conf.getCode()).getName());
//        apiConnectFiledToSpark.put("filed_infos", conf.getConnectFieldInfos());
        apiConnectFiledToSpark.put("output_storage_engine_type",jobNodeConfig.getOutput_storage_engine_type());
        apiConnectFiledToSpark.putAll(generateOutPutConfMap(jobNodeConfig,null,dataDevelopmentConfig,restTableIds,userId));

        List<String> tableNames = new ArrayList<>();
        //可以不传值，或者可以传多个;list为空，默认将所有的表的数据存入hive
        for (String outTableName : jobNodeConfig.getSap_output_table_name()
        ) {
            //选择数据引擎是kudu的时候默认impala存储需要将表名拼接成"impala::yhc.test_meiqin"
            if(jobNodeConfig.getOutput_storage_engine_type() != null && jobNodeConfig.getOutput_storage_engine_type() == 2){
                tableNames.add("impala::"+outTableName);
            }else {
                tableNames.add(outTableName.substring(outTableName.indexOf(".") + 1));
            }
        }
        apiConnectFiledToSpark.put("output_table_name", tableNames);


        //上传相关配置文件到hdfs上
        String apiJsonName =uploadConf(jobNodeConfig,apiConnectFiledToSpark);

        // 构造命令
        generateCommend(jobNodeConfig,paramCmd);
        String className = "cn.getech.data.development.job.SapPIData2Hive --master yarn --deploy-mode cluster --queue " + queueName;
        shell = generateShell(cmdList,paramCmd,jobNodeConfig,apiJsonName,className);

        return wrapShell(shell);
    }

    public String getJDBCOracleNtsInPutShell(JobNodeConfig jobNodeConfig, ConfConnect conf,
                                             BdpJobConfig bdpJobConfig, String outPutTableInfoFormat, StringBuilder paramCmd,
                                             String tableIds, DataDevelopmentConfig dataDevelopmentConfig, String queueName, Integer jobNodeId, Long userId) {
        String shell;
        List<String> cmdList = new ArrayList<>();
        Map<String, Object> jdbcConnectFiledToSpark = new HashMap<>();
        jdbcConnectFiledToSpark.put("passwd", conf.getPassword());
        ConnectTypeEnum object = ConnectTypeEnum.ObjOf(jobNodeConfig.getInput_connect_type());
        jdbcConnectFiledToSpark.put("driverName", object.getDriverClass());
        if(StringUtils.isNotEmpty(conf.getDbname()))
            jdbcConnectFiledToSpark.put("url", object.getUrl().replace("<host>", conf.getHost()).replace("<port>", conf.getPort()).replace("<db_name>", conf.getDbname()));
        else
            jdbcConnectFiledToSpark.put("url", object.getUrl2().replace("<host>", conf.getHost()).replace("<port>", conf.getPort()));

        //下面的不变 上面的可能有变化
        jdbcConnectFiledToSpark.put("user", conf.getUsername());
        jdbcConnectFiledToSpark.put("input_table_name", jobNodeConfig.getInput_table_name());
        jdbcConnectFiledToSpark.put("input_input_content", jobNodeConfig.getInput_input_content());
        jdbcConnectFiledToSpark.put("input_db_name", jobNodeConfig.getInput_db_name());
        //数据接入存储引擎
        jdbcConnectFiledToSpark.put("output_storage_engine_type", jobNodeConfig.getOutput_storage_engine_type());
        jdbcConnectFiledToSpark.putAll(generateOutPutConfMap(jobNodeConfig,outPutTableInfoFormat,dataDevelopmentConfig,tableIds,userId));
        if(CollectionUtil.isNotEmpty(jobNodeConfig.getSysParam())){
            JSONObject jsonObject1 = changeObj(jobNodeConfig.getSysParam());
            jdbcConnectFiledToSpark.put("newSysparam", JSON.toJSONString(jsonObject1));
        }
        //jobNodeId
        jdbcConnectFiledToSpark.put("jobNodeId",jobNodeId);

        //上传sap相关配置文件到hdfs上
        String jdbcJsonName = uploadConf(jobNodeConfig,jdbcConnectFiledToSpark);

        // 构造命令
        generateCommend(jobNodeConfig,paramCmd);

        String className = "cn.getech.data.development.job.DBData2Hive --master yarn --deploy-mode cluster --queue " + queueName;
        shell =generateShell(cmdList,paramCmd,jobNodeConfig,jdbcJsonName,className);

        return wrapShell(shell);
    }

    public String getJDBCInPutShell(JobNodeConfig jobNodeConfig, ConfConnect conf, BdpJobConfig bdpJobConfig, String output_write_format, StringBuilder paramCmd, String restTableIds, DataDevelopmentConfig dataDevelopmentConfig, String queueName, Integer jobNodeId,Long userId){
        String shell;
        List<String> cmdList = new ArrayList<>();
        Map<String, Object> jdbcConnectFiledToSpark = new HashMap<>();
        jdbcConnectFiledToSpark.put("passwd", conf.getPassword());
        ConnectTypeEnum object = ConnectTypeEnum.ObjOf(jobNodeConfig.getInput_connect_type());
        jdbcConnectFiledToSpark.put("driverName", object.getDriverClass());
        if(StringUtils.isNotEmpty(conf.getDbname()))
            jdbcConnectFiledToSpark.put("url", object.getUrl().replace("<host>", conf.getHost()).replace("<port>", conf.getPort()).replace("<db_name>", conf.getDbname()));
        else
            jdbcConnectFiledToSpark.put("url", object.getUrl2().replace("<host>", conf.getHost()).replace("<port>", conf.getPort()));
        jdbcConnectFiledToSpark.put("user", conf.getUsername());
        jdbcConnectFiledToSpark.put("input_table_name", jobNodeConfig.getInput_table_name());
        jdbcConnectFiledToSpark.put("input_input_content", jobNodeConfig.getInput_input_content());
        jdbcConnectFiledToSpark.put("input_db_name", jobNodeConfig.getInput_db_name());
        //JDBC配置参数
        jdbcConnectFiledToSpark.put("isSetJdbcParam", jobNodeConfig.getIsSetJdbcParam() != null ? jobNodeConfig.getIsSetJdbcParam() : 0);
        jdbcConnectFiledToSpark.put("jobNodeConfJdbcParamList",jobNodeConfig.getJobNodeConfJdbcParamList());
        //数据接入存储引擎
        jdbcConnectFiledToSpark.put("output_storage_engine_type", jobNodeConfig.getOutput_storage_engine_type());
        jdbcConnectFiledToSpark.putAll(generateOutPutConfMap(jobNodeConfig,output_write_format,dataDevelopmentConfig,restTableIds,userId));
        if(CollectionUtil.isNotEmpty(jobNodeConfig.getSysParam())){
            JSONObject jsonObject1 = changeObj(jobNodeConfig.getSysParam());
            jdbcConnectFiledToSpark.put("newSysparam", JSON.toJSONString(jsonObject1));
        }
        //jobNodeId
        jdbcConnectFiledToSpark.put("jobNodeId",jobNodeId);

        //上传sap相关配置文件到hdfs上
        String jdbcJsonName = uploadConf(jobNodeConfig,jdbcConnectFiledToSpark);

        // 构造命令
        generateCommend(jobNodeConfig,paramCmd);
        String className = "cn.getech.data.development.job.DBData2Hive --master yarn --deploy-mode cluster --queue " + queueName;
        String jdbcVersion = conf.getJdbcVersion();
        if(StringUtils.isNotEmpty(jdbcVersion) && "5.1".equalsIgnoreCase(jdbcVersion)){
            shell =generateShellVersion(cmdList,paramCmd,jobNodeConfig,jdbcJsonName,className);
        }else{
            shell =generateShell(cmdList,paramCmd,jobNodeConfig,jdbcJsonName,className);
        }


        return wrapShell(shell);
    }

    private String generateShellVersion(List<String> cmdList, StringBuilder paramCmd, JobNodeConfig jobNodeConfig, String jsonName, String className) {
        String shell;
        //scala -cp 报错 scala命令不存在
        cmdList.add(String.format("%s \n", paramCmd.toString()));
        cmdList.add("/bin/spark-submit");
        cmdList.add(String.format("--class %s",className));
//        cmdList.add("--executor-memory 10g");
//        cmdList.add("--driver-memory 10g");
//        cmdList.add(String.format("--num-executors %s",jobNodeConfig.getHight_thread()));
        cmdList.add(String.format("--driver-memory %sG",jobNodeConfig.getResource_dm()));
        cmdList.add(String.format("--driver-cores %s",jobNodeConfig.getResource_dc()));
        cmdList.add(String.format("--executor-cores %s",jobNodeConfig.getResource_ec()));
        cmdList.add(String.format("--executor-memory %sG",jobNodeConfig.getResource_em()));
//        cmdList.add(String.format("--jars /usr/share/java/mysql-connector-java.jar,/usr/share/java/sqljdbc4-chs-4.0.2206.100.jar,/usr/share/java/ojdbc6-11.2.0.3.jar,/opt/cloudera/parcels/CDH/jars/libthrift-0.9.3.jar,hdfs://%s/sapjco3.jar",bdpJobConfig.getJoblib()));
        cmdList.add(String.format("hdfs://%s/data-development-job-1.0-mysql5.0.jar", bdpJobConfig.getJoblib()));
        cmdList.add(jsonName);
        cmdList.add("\"${param}\"  || exit 10");

        shell = String.join(" ", cmdList.toArray(new String[cmdList.size()]));
        return shell;
    }

    public String getEsShell(JobNodeConfig jobNodeConfig, ConfConnect conf, BdpJobConfig bdpJobConfig, String outPutTableInfoFormat, StringBuilder paramCmd, String tableIds, DataDevelopmentConfig dataDevelopmentConfig, String queueName, Long userId) {
        String shell;
        List<String> cmdList = new ArrayList<>();
        Map<String, Object> esConnectFiledToSpark = new HashMap<>();
        esConnectFiledToSpark.put("esSqlCondition", jobNodeConfig.getEsSqlCondition());//es的查询条件
        esConnectFiledToSpark.put("esIndex", jobNodeConfig.getEsIndex());//索引
        esConnectFiledToSpark.put("esIndexType", jobNodeConfig.getEsIndexType());//索引类型
        esConnectFiledToSpark.put("esPageSize", jobNodeConfig.getEsPageSize());//分页大小
        esConnectFiledToSpark.put("esPageTime", jobNodeConfig.getEsPageTime());//单位s
        esConnectFiledToSpark.put("passwd", conf.getPassword());
        //数据接入存储引擎
        esConnectFiledToSpark.put("output_storage_engine_type", jobNodeConfig.getOutput_storage_engine_type());
        esConnectFiledToSpark.putAll(generateOutPutConfMap(jobNodeConfig,outPutTableInfoFormat,dataDevelopmentConfig,tableIds,userId));
        if(CollectionUtil.isNotEmpty(jobNodeConfig.getSysParam())){
            JSONObject jsonObject1 = changeObj(jobNodeConfig.getSysParam());
            esConnectFiledToSpark.put("newSysparam", JSON.toJSONString(jsonObject1));
        }

        //上传sap相关配置文件到hdfs上
        String jdbcJsonName = uploadConf(jobNodeConfig,esConnectFiledToSpark);

        // 构造命令
        generateCommend(jobNodeConfig,paramCmd);

        String className = "cn.getech.data.development.job.ImportEsData --master yarn --deploy-mode cluster --queue " + queueName;
        shell =generateShell(cmdList,paramCmd,jobNodeConfig,jdbcJsonName,className);

        return wrapShell(shell);
    }


    public String genSapOutPutShell(JobNodeConfig jobNodeConfig, JobNodeInfo param, ConfConnect conf, BdpJobConfig bdpJobConfig, String restTableIds, StringBuilder paramCmd, String rangerUserName, DataDevelopmentConfig dataDevelopmentConfig, String queueName,Long userId) {
        String shell;
        SapConnectField sapConnectField = param.getJobNodeConfig().getSapConnectFieldInfo();
        if (sapConnectField==null){
            throw new RRException(jobNodeConfig.getJobNodeId()+"的sapConnectField的信息为空");
        }
        List<String> cmdList = new ArrayList<>();
        List<JSONObject> ainputParams = sapConnectField.getAinputParams();
        List<JSONObject> aoutputParams = sapConnectField.getAoutputParams();
        String inputTableName = sapConnectField.getInputTableName();
        String outputTableName = sapConnectField.getOutputTableName();
        String rfcFunc = sapConnectField.getRfcFunc();

        List<String> inputFieldNames = new ArrayList<>();
        List<String> inputFieldValues = new ArrayList<>();
        List<String> inputFieldAlias = new ArrayList<>();
        List<String> inputDataTypes = new ArrayList<>();
        List<String> outputFieldNames = new ArrayList<>();
        List<String> outputFieldValues = new ArrayList<>();
        List<String> outputFieldAlias = new ArrayList<>();
        List<String> outputDataTypes = new ArrayList<>();

        SapConnectFieldToSparkDto sapConnectFieldToSparkDto = new SapConnectFieldToSparkDto();
        sapConnectFieldToSparkDto.setAllTableIds(restTableIds);
        sapConnectFieldToSparkDto.setRangerUserName(rangerUserName);

        Integer paramType = sapConnectField.getParamType();
        sapConnectFieldToSparkDto.setParamType(paramType);
        //判断是单表单行还是多表多行
        if (paramType == 2) { //多表多行
            sapConnectFieldToSparkDto.setAinputMoreTableParams(sapConnectField.getAinputMoreTableParams());
        }else {
            for (JSONObject j : ainputParams) {
                inputFieldNames.add((String) j.get("fieldName"));
                inputFieldValues.add(j.get("fieldValue").toString());
                inputFieldAlias.add((String) j.get("fieldAlias"));
                inputDataTypes.add(j.get("fieldDataType").toString());
            }
            if (null != aoutputParams && !aoutputParams.isEmpty()) {
                for (JSONObject j : aoutputParams) {
                    outputFieldNames.add((String) j.get("fieldName"));
                    outputFieldValues.add(j.get("fieldValue").toString());
                    outputFieldAlias.add((String) j.get("fieldAlias"));
                    outputDataTypes.add(j.get("fieldDataType").toString());
                }
            } else {
                outputFieldNames.add("null");
                outputFieldValues.add("null");
                outputFieldAlias.add("null");
                outputDataTypes.add("null");
            }
        }

        sapConnectFieldToSparkDto.setInputTableName(inputTableName);
        sapConnectFieldToSparkDto.setOutputTableName(outputTableName);
        sapConnectFieldToSparkDto.setInputFieldNames(inputFieldNames);
        sapConnectFieldToSparkDto.setInputFieldValues(inputFieldValues);
        sapConnectFieldToSparkDto.setInputFieldAlias(inputFieldAlias);
        sapConnectFieldToSparkDto.setInputFieldDataTypes(inputDataTypes);
        sapConnectFieldToSparkDto.setOutputFieldNames(outputFieldNames);
        sapConnectFieldToSparkDto.setOutputFieldValues(outputFieldValues);
        sapConnectFieldToSparkDto.setOutputFieldAlias(outputFieldAlias);
        sapConnectFieldToSparkDto.setOutputFieldDataTypes(outputDataTypes);
        sapConnectFieldToSparkDto.setRfcFunc(rfcFunc);

        if(null == sapConnectField.getIsGroup() || 0 == sapConnectField.getIsGroup()){
            sapConnectFieldToSparkDto.setIsGroup("0");
            sapConnectFieldToSparkDto.setJCO_ASHOST(conf.getHost());
            sapConnectFieldToSparkDto.setJCO_SYSNR(sapConnectField.getSysnr());
            sapConnectFieldToSparkDto.setJCO_MSHOST("null");
            sapConnectFieldToSparkDto.setJCO_GROUP("null");
            sapConnectFieldToSparkDto.setJCO_R3NAME("null");
        }else if(1 == sapConnectField.getIsGroup()){
            sapConnectFieldToSparkDto.setIsGroup("1");
            sapConnectFieldToSparkDto.setJCO_MSHOST(conf.getHost());
            sapConnectFieldToSparkDto.setJCO_GROUP(sapConnectField.getGroupName());
            sapConnectFieldToSparkDto.setJCO_R3NAME(sapConnectField.getR3Name());
            sapConnectFieldToSparkDto.setJCO_ASHOST("null");
            sapConnectFieldToSparkDto.setJCO_SYSNR("null");
        }
        sapConnectFieldToSparkDto.setJCO_LANG(sapConnectField.getLang());
        sapConnectFieldToSparkDto.setJCO_CLIENT(conf.getPort());
        sapConnectFieldToSparkDto.setJCO_PASSWD(conf.getPassword());
        sapConnectFieldToSparkDto.setJCO_USER(conf.getUsername());

        sapConnectFieldToSparkDto.setInput_connect_id(jobNodeConfig.getInput_connect_id().toString());
        sapConnectFieldToSparkDto.setHight_file_num(jobNodeConfig.getHight_file_num());
//        sapConnectFieldToSparkDto.setHight_resource(jobNodeConfig.getHight_resource());
        List<String> tableNames = new ArrayList<>();
        //可以不传值，或者可以传多个;list为空，默认将所有的表的数据存入hive
        for (String outTableName : jobNodeConfig.getSap_output_table_name()
        ) {
            //选择数据引擎是kudu的时候默认impala存储需要将表名拼接成"impala::yhc.test_meiqin"
            if(jobNodeConfig.getOutput_storage_engine_type() != null && jobNodeConfig.getOutput_storage_engine_type() == 2){
                tableNames.add("impala::"+outTableName);
            }else {
                tableNames.add(outTableName.substring(outTableName.indexOf(".") + 1));
            }
        }
        sapConnectFieldToSparkDto.setOutput_table_name(tableNames);
        sapConnectFieldToSparkDto.setInput_input_content(jobNodeConfig.getInput_input_content());
        sapConnectFieldToSparkDto.setJobNodeId(jobNodeConfig.getJobNodeId().toString());
        sapConnectFieldToSparkDto.setInput_connect_type(jobNodeConfig.getInput_connect_type().toString());
        sapConnectFieldToSparkDto.setOutput_write_model(jobNodeConfig.getOutput_write_model());
        if (null == jobNodeConfig.getOutput_data_partition() || "".equals(jobNodeConfig.getOutput_data_partition())) {
            sapConnectFieldToSparkDto.setOutput_data_partition("null");
        } else {
            String output_data_partition = jobNodeConfig.getOutput_data_partition();
//            if(!output_data_partition.contains("/")){
//                throw new RRException(BizExceptionEnum.PARTITION_FORMAT_ERROR);
//            }
//            output_data_partition = output_data_partition.replaceAll("/",",");
//            output_data_partition = output_data_partition.substring(0,output_data_partition.length() - 1);
            sapConnectFieldToSparkDto.setOutput_data_partition(output_data_partition);
        }
        sapConnectFieldToSparkDto.setOutput_db_name(jobNodeConfig.getOutput_db_name());
        sapConnectFieldToSparkDto.setJobNodeId(jobNodeConfig.getJobNodeId().toString());
//        sapConnectFieldToSparkDto.setHight_thread(jobNodeConfig.getHight_thread());
//        sapConnectFieldToSparkDto.setTableFieldDtos(tableFieldDtos);

        sapConnectFieldToSparkDto.setIs_open_lood_access(jobNodeConfig.getIsOpenLoodAccess());
        sapConnectFieldToSparkDto.setBatch_size(jobNodeConfig.getBatchSize());
        sapConnectFieldToSparkDto.setBegin_GDA(jobNodeConfig.getBeginGDA());
        sapConnectFieldToSparkDto.setEnd_GDA(jobNodeConfig.getEndGDA());
        sapConnectFieldToSparkDto.setStop_sign(jobNodeConfig.getStopSign());
        sapConnectFieldToSparkDto.setStop_sign_val(jobNodeConfig.getStopSignVal());
        //数据接入存储引擎
        sapConnectFieldToSparkDto.setOutput_storage_engine_type(jobNodeConfig.getOutput_storage_engine_type());
        //如果是kudu引擎，则不需要分区
        if(Objects.equals(jobNodeConfig.getOutput_storage_engine_type(),StorageEngineEnum.KUDU.getCode())){
            sapConnectFieldToSparkDto.setOutput_data_partition(null);
        }
        sapConnectFieldToSparkDto.setRangerUserName(dataDevelopmentConfig.getUserPreffix() + userId);
        //上传sap相关配置文件到hdfs上
        String sapJsonName = uploadConf(jobNodeConfig,sapConnectFieldToSparkDto);

//        cmdList.add("cd /data \n");
        // 构造命令
        generateCommend(jobNodeConfig,paramCmd);
        String className = "cn.getech.data.development.job.SapData2Hive --master yarn --deploy-mode cluster --queue " + queueName;
        shell = generateShell(cmdList,paramCmd,jobNodeConfig,sapJsonName,className);
        return wrapShell(shell);
    }

    public void generateCommend(JobNodeConfig jobNodeConfig,StringBuilder paramCmd){
        Map<String, String> paramsMap = extraParams(jobNodeConfig.getInput_file_position(), jobNodeConfig.getInput_input_content(), jobNodeConfig.getOutput_data_partition(), jobNodeConfig.getFtpFileLocation());
        for (String p : paramsMap.values()
        ) {
            paramCmd.append(p);
        }
    }
    public String generateShell(List<String> cmdList, StringBuilder paramCmd,JobNodeConfig jobNodeConfig,String jsonName,String className){
        String shell;
        //scala -cp 报错 scala命令不存在
        cmdList.add(String.format("%s \n", paramCmd.toString()));
        cmdList.add("/bin/spark-submit");
        cmdList.add(String.format("--class %s",className));
//        cmdList.add("--executor-memory 10g");
//        cmdList.add("--driver-memory 10g");
//        cmdList.add(String.format("--num-executors %s",jobNodeConfig.getHight_thread()));
        cmdList.add(String.format("--driver-memory %sG",jobNodeConfig.getResource_dm()));
        cmdList.add(String.format("--driver-cores %s",jobNodeConfig.getResource_dc()));
        cmdList.add(String.format("--executor-cores %s",jobNodeConfig.getResource_ec()));
        cmdList.add(String.format("--executor-memory %sG",jobNodeConfig.getResource_em()));
//        cmdList.add(String.format("--jars /usr/share/java/mysql-connector-java.jar,/usr/share/java/sqljdbc4-chs-4.0.2206.100.jar,/usr/share/java/ojdbc6-11.2.0.3.jar,/opt/cloudera/parcels/CDH/jars/libthrift-0.9.3.jar,hdfs://%s/sapjco3.jar",bdpJobConfig.getJoblib()));
        cmdList.add(String.format("hdfs://%s/data-development-job-1.0.jar", bdpJobConfig.getJoblib()));
        cmdList.add(jsonName);
        cmdList.add("\"${param}\"  || exit 10");

        shell = String.join(" ", cmdList.toArray(new String[cmdList.size()]));
        return shell;
    }



    public String uploadConf(JobNodeConfig jobNodeConfig,Object o){
        JSONObject jsonObject = (JSONObject) JSONObject.toJSON(o);
        /***添加连接池id*/
        jsonObject.put("connect_id",jobNodeConfig.getInput_connect_id());
        String jsonName = jobNodeConfig.getJobNodeId().toString();
//        List<Map<String, Object>> sysParams = jobNodeConfig.getSysParam();

        HdfsUtil hdfsUtil = null;
        try {
            hdfsUtil = new HdfsUtil(bdpJobConfig);
        } catch (Exception e) {
            e.printStackTrace();
        }
        if (null != hdfsUtil) {
//            hdfsUtil.writeFile(jsonObject.toJSONString().getBytes(),String.format("%s/%s.json", bdpJobConfig.getJobsapconfig(), sapJsonName));
            try {
                hdfsUtil.writeFile(jsonObject.toJSONString().getBytes("utf-8"), String.format("%s/%s/%s%s.json", bdpJobConfig.getJobconfig(), JobType.INPUT.getCode(), "jobid_", jsonName));
            } catch (UnsupportedEncodingException e) {
                e.printStackTrace();
            }
            try {
                hdfsUtil.dfs.close();
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
        return jsonName;
    }
    public void upLoadJobConf(JobNodeConfig jobNodeConfig, ConfConnect conf, ConnectTypeEnum confConnectType, BdpJobConfig bdpJobConfig, String queueName) throws Exception {


        JSONObject jsonObject = (JSONObject) JSONObject.toJSON(jobNodeConfig);
        /***添加连接池id*/
        jsonObject.put("connect_id",jobNodeConfig.getInput_connect_id());
        jsonObject.put("type", confConnectType.getName());
        jsonObject.put("host", conf.getHost());
        jsonObject.put("port", conf.getPort());
        jsonObject.put("username", conf.getUsername());
        jsonObject.put("password", conf.getPassword());
        jsonObject.put("dbname", jobNodeConfig.getInput_db_name());
        jsonObject.put("queueName", queueName);
        jsonObject.put("driverClass", confConnectType.getDriverClass());
        if(CollectionUtil.isNotEmpty(jobNodeConfig.getSysParam())){
            JSONObject jsonObject1 = changeObj(jobNodeConfig.getSysParam());
            jsonObject.put("newSysparam", JSON.toJSONString(jsonObject1));
        }
        HdfsUtil util = new HdfsUtil(bdpJobConfig);
        util.writeFile(jsonObject.toJSONString().getBytes("utf-8"), String.format("%s/%s/jobid_%s.json", bdpJobConfig.getJobconfig(), JobType.OUTPUT.getCode(), jobNodeConfig.getJobNodeId()));
        if(null != util){
            util.close();
        }

    }

    private JSONObject changeObj(List<Map<String, Object>> sysParam) {
        JSONObject jsonObject = new JSONObject();
        if(CollectionUtil.isNotEmpty(sysParam)){
            for (Map<String, Object> map : sysParam) {
                if(MapUtil.isNotEmpty(map)){
                    Set<String> strings = map.keySet();
                    for (String s : strings) {
                        if(null != map.get(s)){
                            if(isNumber(map.get(s))){
                                jsonObject.put(s,map.get(s));
                            }else{
                                jsonObject.put(s,"'" + map.get(s) + "'");
                            }
                        }
                    }
                }
            }
        }
        return jsonObject;
    }

    //上传SQL的文件
    public void upLoadSQLJobConf(JobNodeConfig jobNodeConfig, BdpJobConfig bdpJobConfig) throws Exception {
        JSONObject jsonObject = (JSONObject) JSONObject.toJSON(jobNodeConfig);
        HdfsUtil util = new HdfsUtil(bdpJobConfig);
        util.writeFile(jsonObject.toJSONString().getBytes(), String.format("%s/%s/jobid_%s.json", bdpJobConfig.getJobconfig(), JobType.SQL.getCode(), jobNodeConfig.getJobNodeId()));
        if(null != util){
            util.close();
        }
    }

    //上传SQL的文件
    public void upLoadSQLJobConf(JobNodeSQLDto jobNodeSQLDto, BdpJobConfig bdpJobConfig, Integer jobNodeId) throws Exception {
        JSONObject jsonObject = (JSONObject) JSONObject.toJSON(jobNodeSQLDto);
        HdfsUtil util = new HdfsUtil(bdpJobConfig);
        util.writeFile(jsonObject.toJSONString().getBytes(), String.format("%s/%s/jobid_%s.json", bdpJobConfig.getJobconfig(), JobType.SQL.getCode(), jobNodeId));
        if(null != util){
            util.close();
        }
    }

    //单独保存sql
    public void upLoadSQL(BdpJobConfig bdpJobConfig, Integer jobNodeId,String sqlStatment) throws Exception {
        HdfsUtil util = new HdfsUtil(bdpJobConfig);
        util.writeFile(sqlStatment.getBytes("utf-8"), String.format("%s/%s/jobid_%s.hql", bdpJobConfig.getJobconfig(), JobType.SQL.getCode(), jobNodeId));
        if(null != util){
            util.close();
        }
    }


    public void upLoadShell(BdpJobConfig bdpJobConfig, Integer jobNodeId,String sqlStatment) throws Exception {
        HdfsUtil util = new HdfsUtil(bdpJobConfig);
        util.writeFile(sqlStatment.getBytes("utf-8"), String.format("%s/%s/nodeShell_%s.sh", bdpJobConfig.getJobconfig(), JobType.SHELL.getCode(), jobNodeId));
        if(null != util){
            util.close();
        }
    }

    public void deleteSQLJobConf(JobNodeInfo jobNodeInfo, BdpJobConfig bdpJobConfig) throws Exception {
        String path = String.format("%s/%s/jobid_%s.hql", bdpJobConfig.getJobconfig(), JobType.SQL.getCode(), jobNodeInfo.getId());
        HdfsUtil util = new HdfsUtil(bdpJobConfig);
        util.delete(path);
        if(null != util){
            util.close();
        }
    }

    public String genShellNode(JobNodeConfig jobNodeConfig, BdpJobConfig bdpJobConfig, Long userId, DataDevelopmentConfig dataDevelopmentConfig, StringBuilder paramCmd, OozieConfig oozieConfig, String queueName, BussessConfigEntity bussessConfigEntity) {
        //1.上传shell脚本
        //上传sql配置文件到hdfs上
        try {
            upLoadShell(bdpJobConfig, jobNodeConfig.getJobNodeId(),jobNodeConfig.getSql_statment());
        } catch (Exception e) {
            throw new RRException(DataDevelopmentBizExceptionEnum.UPLOAD_JOB_CONF_ERROR.getMessage());
        }
        //2.拼装oozie执行的shell脚本---换成请求接口
        //String shell = pingShellLog(oozieConfig,bdpJobConfig,jobNodeConfig.getJobNodeId(),bussessConfigEntity);
        String shell = pingShellLogInterfer(oozieConfig,jobNodeConfig.getJobNodeId());

        //3.返回oozie执行的脚本
        return wrapShell(shell);
    }

    private String pingShellLogInterfer(OozieConfig oozieConfig, Integer jobNodeId) {
        StringBuilder startShell = new StringBuilder();
        startShell.append("shellRest=`curl -X GET ").append(oozieConfig.getRest_url()).append("/runShellNodes/").append(jobNodeId).append("`").append("\n");
        startShell.append("echo ${shellRest}\n");
        startShell.append("fixShelllog=\"\\[error-oozi\\]\"\n");
        //startShell.append("chk=$(echo $shellRest | grep \"${fixShelllog}\")").append("\n");
        startShell.append("if [[ $shellRest == *$fixShelllog* ]]; then\n\t");
        startShell.append("exit 10\n");
        startShell.append("fi\n");
        return startShell.toString();
    }

    private String pingShellLog(OozieConfig oozieConfig, BdpJobConfig bdpJobConfig, Integer jobNodeId, BussessConfigEntity bussessConfigEntity) {
        String path = String.format("%s/%s/nodeShell_%s.sh", bdpJobConfig.getJobconfig(), JobType.SHELL.getCode(), jobNodeId);
        StringBuilder shell = new StringBuilder();
        //shell.append("Date=`date +'%Y-%m-%d'`").append("\n");
        shell.append("ScriptDir=./sDate/scripts/").append(jobNodeId).append("\n");
        shell.append("LogDir=./sDate/scripts/").append(jobNodeId).append("/logs").append("\n");
        shell.append("Host=").append(bussessConfigEntity.getHostUrl()).append("\n");
        shell.append("Port=").append(bussessConfigEntity.getPort()).append("\n");
        shell.append("User=").append(bussessConfigEntity.getUsername()).append("\n");
        shell.append("Password=").append(bussessConfigEntity.getPassword()).append("\n");
        shell.append("ScriptsUrl=").append(path).append("\n");
        shell.append("ts=").append(jobNodeId).append("\n");
        //不删除，用完就删除
        shell.append("rm -rf $ScriptDir").append("\n");
        shell.append("rm -rf $LogDir").append("\n");
        //判断密钥对
        shell.append("genkey() {").append("\n");
        shell.append("expect << EOF").append("\n");
        shell.append("spawn ssh-keygen").append("\n");
        shell.append("expect \"ssh/id_rsa\"").append("\n");
        shell.append("send \"\\r\"").append("\n");
        shell.append("expect \"passphrase\"").append("\n");
        shell.append("send \"\\r\"").append("\n");
        shell.append("expect \"again\"").append("\n");
        shell.append("send \"\\r\"").append("\n");
        shell.append("expect \"#\"").append("\n");
        shell.append("send \"exit\\r\"").append("\n");
        shell.append("EOF").append("\n");
        shell.append("}").append("\n");

        shell.append("if [[ ! -e ~/.ssh/id_rsa ]]; then").append("\n\t");
        shell.append("genkey").append("\n");
        shell.append("fi").append("\n");

        //互相操作
        shell.append("\n");
        shell.append("expect << EOF").append("\n");
        shell.append("spawn ssh-copy-id -p $Port $User@$Host").append("\n");
        shell.append("set timeout -1").append("\n");
        shell.append("expect {").append("\n\t");
        shell.append("-re \"password\" {send \"$Password\\r\"}").append("\n\t");
        shell.append("-re \"yes/no\" {send \"yes\\r\";exp_continue}").append("\n");
        shell.append("}").append("\n");
        shell.append("expect \"#\"").append("\n");
        shell.append("send \"exit\\r\"").append("\n");
        shell.append("EOF").append("\n");
        shell.append("\n");

        shell.append("mkdir -p $LogDir").append("\n");
        //#step1
        shell.append("echo \"get hdfs data start...\"").append("\n");
        shell.append("echo \"`date +'%Y-%m-%d %H:%M:%S'` [INFO] Download shell now.\" >> $LogDir/script_$ts.log").append("\n");
        shell.append("hdfs dfs -get $ScriptsUrl $ScriptDir/script_$ts.sh").append("\n");
//        shell.append("wget $ScriptsUrl -O $ScriptDir/script_$ts.sh").append("\n");
        shell.append("if [ $? == 0 ]; then").append("\n\t");
        shell.append("echo \"`date +'%Y-%m-%d %H:%M:%S'` [INFO] Download shell succeed.\" >> $LogDir/script_$ts.log").append("\n");
        shell.append("else").append("\n\t");
        shell.append("echo \"`date +'%Y-%m-%d %H:%M:%S'` [ERROR] Download shell failed.\" >> $LogDir/script_$ts.log").append("\n\t");
        shell.append("echo \"`date +'%Y-%m-%d %H:%M:%S'` [INFO] exit now.\" >> $LogDir/script_$ts.log").append("\n\t");
        //将日志打印到控制台
        shell.append("datalogline=$(cat $LogDir/script_$ts.log)").append("\n\t");
        shell.append("echo \"log:${datalogline}\"").append("\n\t");
        shell.append("exit 1").append("\n");
        shell.append("fi").append("\n");
        shell.append("hdfsData=$(cat $ScriptDir/script_$ts.sh)").append("\n");
        shell.append("echo \"`date +'%Y-%m-%d %H:%M:%S'` [INFO] Download shell content:${hdfsData} \"").append("\n");

        //#step2 把对应的shell解析运行参数
        shell.append("echo \"replace sysparam start...\"").append("\n");
        shell.append("dataline=$(cat $ScriptDir/script_$ts.sh | tr \"\\n\" \"HHIIIJJ3JKKKLLLMMMN4NNOOOPPP5QQQRRRSSSTTT\")").append("\n");
        shell.append("newSql=`curl -X POST ").append(oozieConfig.getRest_url()).append("/runSysparams ").append("--connect-timeout 20 -H \"Content-Type:application/json;charset=UTF-8\"  -d \"{\\\"sql\\\":\\\"${dataline}\\\",\\\"jobNodeId\\\":\\\"").append(jobNodeId).append("\\\"}\"`").append("\n");
        shell.append("strError=\"\\\"code\\\":500\"").append("\n");
        shell.append("restttt=$(echo $newSql | grep \"${strError}\")").append("\n");
        shell.append("if [[ \"$restttt\" != \"\" ]];then").append("\n\t");
        shell.append("echo 'repleace sysparam error'").append("\n");
        shell.append("else").append("\n\t");
        shell.append("rm -f $ScriptDir/script_$ts.sh").append("\n\t");
        shell.append("newSql=$(echo \"${newSql}\" | tr \"HHIIIJJ3JKKKLLLMMMN4NNOOOPPP5QQQRRRSSSTTT\" \"\\n\")").append("\n\t");
        shell.append("echo \"${newSql}\" > $ScriptDir/script_$ts.sh").append("\n");
        shell.append("fi").append("\n");
        shell.append("repalceHdfsData=$(cat $ScriptDir/script_$ts.sh)").append("\n");
        shell.append("echo \"`date +'%Y-%m-%d %H:%M:%S'` [INFO] replace shell content:${repalceHdfsData} \"").append("\n");

        //#step3
        shell.append("echo \"`date +'%Y-%m-%d %H:%M:%S'` [INFO] sending file to remote host.\" >> $LogDir/script_$ts.log").append("\n");
        shell.append("echo \"scp shellNode start...\"").append("\n");
        shell.append("scp -P $Port $ScriptDir/script_$ts.sh $User@$Host:/tmp/").append("\n");
        shell.append("if [ $? != 0 ]; then").append("\n\t");
        shell.append("echo \"`date +'%Y-%m-%d %H:%M:%S'` [ERROR] file send to remote host failed.\" >> $LogDir/script_$ts.log").append("\n\t");
        //将日志打印到控制台
        shell.append("datalogline=$(cat $LogDir/script_$ts.log)").append("\n\t");
        shell.append("echo \"log:${datalogline}\"").append("\n\t");
        shell.append("exit 2").append("\n");
        shell.append("fi").append("\n");
        //#step4
        shell.append("echo \"`date +'%Y-%m-%d %H:%M:%S'` [INFO] running shell on remote host.\" >> $LogDir/script_$ts.log").append("\n");
        shell.append("echo \"ssh shellNode start...\"").append("\n");
        shell.append("ssh -p $Port $User@$Host \"source /etc/profile;bash /tmp/script_$ts.sh || echo `date +'%Y-%m-%d %H:%M:%S'` [ERROR] running shell on remote host failed \" &>> $LogDir/script_$ts.log").append("\n");
        shell.append("chk=`grep 'running shell on remote host failed' $LogDir/script_$ts.log`").append("\n");
        shell.append("if [[ ! -z $chk ]]; then").append("\n\t");
        //将日志打印到控制台
        shell.append("datalogline=$(cat $LogDir/script_$ts.log)").append("\n\t");
        shell.append("echo \"log:${datalogline}\"").append("\n\t");
        shell.append("exit 3").append("\n");
        shell.append("fi").append("\n");
        shell.append("echo \"`date +'%Y-%m-%d %H:%M:%S'` [INFO] End.\" >> $LogDir/script_$ts.log").append("\n");

        //将日志打印到控制台
        shell.append("datalogline=$(cat $LogDir/script_$ts.log)").append("\n");
        shell.append("echo \"log:${datalogline}\"").append("\n");

        return shell.toString();
    }



}
