package com.kingsoft.dc.khaos.plugin.core.dts;

import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.kingsoft.dc.khaos.extender.meta.MetaClient;
import com.kingsoft.dc.khaos.extender.model.ParamConfig;
import com.kingsoft.dc.khaos.plugin.core.constants.Contants;
import com.kingsoft.dc.khaos.plugin.core.exception.MetaException;
import com.kingsoft.dc.khaos.plugin.core.utils.*;
import com.ksyun.kbdp.dts.common.utils.Props;
import com.ksyun.kbdp.dts.common.utils.StringUtil;
import com.ksyun.kbdp.dts.job.core.ProcessJob;
import com.ksyun.kbdp.dts.job.core.util.CommandUtil;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;

import java.io.File;
import java.util.*;
import java.util.stream.Collectors;

import static com.kingsoft.dc.khaos.plugin.core.constants.ScheduleConstants.COMMAND;
import static com.kingsoft.dc.khaos.plugin.core.dts.DtsContants.SCRIPT_TYPE;
import static com.ksyun.kbdp.dts.job.core.JavaProcessJob.JAVA_COMMAND;

/**
 * Created by jing on 19/8/2.
 */
public class ScriptPluginSubmit extends ProcessJob {
    final String USER_SCRIPT_FILENAME = "user_script";//脚本名
    final String MAIN_SCRIPT_FILENAME = "run";
    final String KERBEROS_KDC_URL = "kerberos.kdc.url";


    public ScriptPluginSubmit(String jobid, Props sysProps, Props jobProps, Logger logger) {
        super(jobid, sysProps, jobProps, logger);
        this.logger=logger;
    }
    private Logger logger;

    @Override
    public void beforeExec() throws Exception {
        super.beforeExec();
    }

    @Override
    protected String getFileContent() {
        StringBuffer content = new StringBuffer();
        String userScripCode = jobProps.get(COMMAND);
        content.append(userScripCode);
        return content.toString();
    }

    @Override
    protected String getFileName() {
        //脚本类型
        StringBuffer sb = new StringBuffer();
        String type = sysProps.get(SCRIPT_TYPE);
        String scriptSuffix = ".command";
        for (ScriptType st : ScriptType.values()) {
            if (type.equalsIgnoreCase(st.name)) {
                scriptSuffix = st.suffix;
                break;
            }
        }
        if (sysProps.containsKey(DtsContants.SCRIPT_SUFFIX)) {
            scriptSuffix = DtsContants.SCRIPT_SUFFIX;
        }
        sb.append(USER_SCRIPT_FILENAME);
        sb.append(scriptSuffix);
        return sb.toString();
    }

    @Override
    protected String getCommand() {
        String commandType = this.sysProps.getString("command.type");
        if (this.sysProps.containsKey("command.opt")) {
            commandType = commandType + " " + this.sysProps.get("command.opt"); // python2 -u
        }
        String executingDir = this.jobProps.getString("job.executing.dir");
        File file = CommandUtil.command2File(executingDir, this.getFileContent(), this.getFileName()); // 执行目录生成用户代码文件user_script.py
        //logger.info("file.getAbsolutePath:"+file.getAbsolutePath());
        String args = getFileArgs(file.getAbsolutePath()); // python 拼接要运行的参数json--json获取其中的值getFileArgs
        String pluginDir = sysProps.get("plugin.dir");
        String userScriptFileName = file.getName();
        String suffix = userScriptFileName.substring(userScriptFileName.lastIndexOf("."));
        String mainScriptName = pluginDir + "/" + MAIN_SCRIPT_FILENAME + suffix; // 拼接${pluginDir}/run.py或${pluginDir}/run.pl
        String command = String.format("%s %s %s", new Object[]{commandType, mainScriptName, args});
        return command;
    }


    protected String getJAVACommand() {
        String command = JAVA_COMMAND + " ";
        command = command + getJVMArguments() + " ";
        command = command + "-Xms" + getInitialMemorySize() + " ";
        command = command + "-Xmx" + getMaxMemorySize() + " ";
        command = command + "-cp " + createArguments(this.getClassPaths(), ":") + " ";
        command = command + getJavaClass() + " ";
//        command = command + getMainArguments();
        return command;
    }

    protected String getInitialMemorySize() {
        return this.getJobProps().getString("Xms", "64M");
    }

    protected String getJVMArguments() {
        String globalJVMArgs = this.getJobProps().getString("global.jvm.args", (String) null);
        return globalJVMArgs == null ? this.getJobProps().getString("jvm.args", "") : globalJVMArgs + " " + this.getJobProps().getString("jvm.args", "");
    }

    protected String createArguments(List<String> arguments, String separator) {
        if (arguments != null && arguments.size() > 0) {
            String param = "";

            String arg;
            for (Iterator var4 = arguments.iterator(); var4.hasNext(); param = param + arg + separator) {
                arg = (String) var4.next();
            }

            return param.substring(0, param.length() - 1);
        } else {
            return "";
        }
    }

    protected String getJavaClass() {
        return this.getJobProps().getString("java.class");
    }


    protected String getMaxMemorySize() {
        return this.getJobProps().getString("Xmx", "256M");
    }

    private String getFileArgs(String userScriptName) {
        Map<String, Object> argMap = new HashMap<>();
        argMap.put("user_script_file", userScriptName);
        argMap.put("plugin_dir", sysProps.get("plugin.dir"));
        argMap.put("working_dir", sysProps.get("job.executing.dir"));
        argMap.put("run_env", jobProps.get(DtsContants.RUN_ENV));
        //logger.info("plugin_param："+jobProps.get("plugin_param"));
        DsInfo dsInfo = DtsArgParserUtils.extractDsInfo(jobProps.get("plugin_param"));
        if (dsInfo != null && dsInfo.getDsId() != -1) {
            argMap.put("ds_type", dsInfo.getDsType());
        } else {
            argMap.put("ds_type", "-");
        }

        if (argMap.get("ds_type").equals("hive")) {
            // 添加执行sparksql的环境参数
            argMap.put("spark_args", getMainArguments());
            argMap.put("java_cmd", getJAVACommand());
            //python插件运行hive，此配置调用未生效，默认配置写死
            argMap.put("krbUrl", getSysProps().getString(KERBEROS_KDC_URL, "http://kerberosapi.innerapi.sdns.kscbigdata.cloud/kerberos"));
        }


  /*      if (argMap.get("ds_type").equals("hive")) {
            try {
                String command = JAVA_COMMAND + " ";
                Class<?> clazz = Class.forName("com.ksyun.kbdp.dts.job.core.JavaProcessJob");
                Class<?> childClazz = Class.forName("com.ksyun.di.submit.KhaosAppSubmit");
                Constructor<?> csr = clazz.getConstructor(String.class, Props.class, Props.class, Logger.class);
                Constructor<?> childCsr = childClazz.getConstructor(String.class, Props.class, Props.class, Logger.class);
                getJobProps().put(JobArg.QUEUE.dtsPropName, getJobProps().get(JobArg.YARN_QUEUE.dtsPropName));
                Object obj = csr.newInstance(getJobProps().get("job.id"), getSysProps(), getJobProps(), getLog());
                Object childObj = childCsr.newInstance(getJobProps().get("job.id"), getSysProps(), getJobProps(), getLog());
                Method getJVMArguments = clazz.getDeclaredMethod("getJVMArguments");
                Method getInitialMemorySize = clazz.getDeclaredMethod("getInitialMemorySize");
                Method getMaxMemorySize = clazz.getDeclaredMethod("getMaxMemorySize");
                Method createArguments = clazz.getDeclaredMethod("createArguments", List.class, String.class);
                Method getJavaClass = clazz.getDeclaredMethod("getJavaClass");

                Method getClassPaths = childClazz.getDeclaredMethod("getClassPaths");
                Method getMainArguments = childClazz.getDeclaredMethod("getMainArguments");

                getJVMArguments.setAccessible(true);
                getInitialMemorySize.setAccessible(true);
                getMaxMemorySize.setAccessible(true);
                getClassPaths.setAccessible(true);
                createArguments.setAccessible(true);
                getJavaClass.setAccessible(true);
                getMainArguments.setAccessible(true);


                command += (String) getJVMArguments.invoke(obj) + " ";
                command += "-Xms" + (String) getInitialMemorySize.invoke(obj) + " ";
                command += "-Xmx" + (String) getMaxMemorySize.invoke(obj) + " ";
                List<String> getClassPathsList = (List<String>) getClassPaths.invoke(childObj);
                command += "-cp " + (String) createArguments.invoke(obj, getClassPathsList, ":") + " ";
                command += getJavaClass.invoke(obj) + " ";
                String sparkArgs = (String) getMainArguments.invoke(childObj);

                int i = sparkArgs.indexOf("--userConfig=");
                sparkArgs = sparkArgs.substring(0, i);

                argMap.put("java_cmd", command);
                argMap.put("spark_args", sparkArgs);
            } catch (ClassNotFoundException | NoSuchMethodException | InstantiationException | IllegalAccessException | InvocationTargetException e) {
                e.printStackTrace();
                getLog().error(e.getMessage(), e);
                argMap.put("aaa", "aaa");
            }
        }*/

        argMap.put("connect", getConnectInfo(dsInfo));
        if (sysProps.containsKey("psql.home")) {
            argMap.put("psql.home", sysProps.get("psql.home"));
        }
        if (dsInfo != null) {
            argMap.put("auth_param", buildAuthQueryParamWithoutAkSk(dsInfo));
            argMap.put("meta_param", buildMetaQueryParamWithoutAkSk(dsInfo));
        } else {
            argMap.put("auth_param", "");
            argMap.put("meta_param", "");
        }
        return JSON.toJSONString(JSON.toJSONString(argMap));
    }

    // TODO 后期去掉dts-plugin代码
    protected List<String> getClassPaths() {
        List<String> classPath = getFatherClassPaths();
        classPath.add(ClassPathUtils.getSourcePathFromClass(this.getClass()));
        classPath.addAll(addJobTypeClasspath(classPath));
        //资源conf文件所在目录，比如hadoop core-site.xml,yarn-site.xml...
        String resourceConf = getResourceConfDir();
        classPath.add(resourceConf);

        classPath.addAll(addGlobalClassPath(classPath));
        debug("Final classpath: " + classPath);
        return classPath.stream().distinct().collect(Collectors.toList());
    }

    private String getResourceConfDir() {
        String resourceConfDir = getJobProps().get("resource.conf.dir");
        return resourceConfDir;
    }

    private List<String> addGlobalClassPath(List<String> classPath) {
        List<String> typeGlobalClassPath =
                getSysProps().getStringList("jobtype.global.classpath", null, ",");
        info("Adding jobtype.global.classpath: " + typeGlobalClassPath);
        if (typeGlobalClassPath != null) {
            for (String jar : typeGlobalClassPath) {
                if (!classPath.contains(jar)) {
                    classPath.add(jar);
                }
            }
        }
        return classPath;
    }

    protected List<String> getFatherClassPaths() {
        Collection<String> classPaths = this.getJobProps().getStringList("classpath", Collections.emptyList(), ",");
        ArrayList<String> classpathList = new ArrayList();
        List<String> globalClasspath = this.getJobProps().getStringList("global.classpaths", Collections.emptyList());
        Iterator var4 = globalClasspath.iterator();

        while (var4.hasNext()) {
            String global = (String) var4.next();
            this.getLog().info("Adding to global classpath:" + global);
            classpathList.add(global);
        }
        if (classPaths == null) {
            File path = new File(this.getPath());
            this.getLog().info("No classpath specified. Trying to load classes from " + path);
            if (path != null) {
                File[] var11 = path.listFiles();
                int var6 = var11.length;

                for (int var7 = 0; var7 < var6; ++var7) {
                    File file = var11[var7];
                    if (file.getName().endsWith(".jar")) {
                        classpathList.add(file.getName());
                    }
                }
            }
        } else {
            classpathList.addAll(classPaths);
        }

        String resourceDir = this.getJobProps().getString("kdts.job.resource.dir", (String) null);
        if (StringUtil.isNotBlank(resourceDir)) {
            classpathList.add(resourceDir);
        }

        return classpathList;
    }

    private List<String> addJobTypeClasspath(List<String> classPath) {
        String pluginDir = getSysProps().get("plugin.dir");
        List<String> typeClassPath = getSysProps().getStringList("jobtype.classpath", null, ",");
        debug("Adding jobtype.classpath: " + typeClassPath);
        if (typeClassPath != null) {
            // fill in this when load this jobtype
            for (String jar : typeClassPath) {
                File jarFile = new File(jar);
                if (!jarFile.isAbsolute()) {
                    jarFile = new File(pluginDir + File.separatorChar + jar);
                }
                if (!classPath.contains(jarFile.getAbsolutePath())) {
                    classPath.add(jarFile.getAbsolutePath());
                }
            }
        }
        return classPath;

    }

    protected String getMainArguments() {
        Props mergedProps = getMergedProps();
        Props jobProps = getJobProps();
        Props sysProps = getSysProps();
        String pluginDir = mergedProps.get("plugin.dir");
        return getMainArguments(mergedProps, jobProps, sysProps, pluginDir, getLog());
    }

    private Props getMergedProps() {
        Props mergedProps = new Props();
        mergedProps.putAll(getSysProps());
        mergedProps.putAll(getJobProps());
        return mergedProps;
    }

    String getMainArguments(Props mergedProps, Props jobProps, Props sysProps, String pluginDir,
                            Logger log) {

        // if we ever need to recreate a failure scenario in the test case

        List<String> argList = new ArrayList<String>();

        // special case handling for DRIVER_JAVA_OPTIONS
//        argList.add(JobArg.DRIVER_JAVA_OPTIONS.sparkParamName);
        StringBuilder driverJavaOptions = new StringBuilder();
        // note the default java opts are communicated through the hadoop conf and
        // added in the
        // HadoopSecureSparkWrapper
        if (mergedProps.containsKey(JobArg.DRIVER_JAVA_OPTIONS.dtsPropName)) {
            driverJavaOptions.append(" " + mergedProps.getString(JobArg.DRIVER_JAVA_OPTIONS.dtsPropName));
            argList.add(JobArg.DRIVER_JAVA_OPTIONS.sparkParamName);
            argList.add(driverJavaOptions.toString());
        }

        driverResourceHelper();
        executorResourceHelper(jobProps, log);
        // Note that execution_jar and params must appear in order, and as the last
        // 2 params
        // Because of the position they are specified in the JobArg class, this
        // should not be an
        // issue
        for (JobArg jobArg : JobArg.values()) {
            if (!jobArg.needSpecialTreatment) {
                handleStandardArgument(getMergedProps(), argList, jobArg);
            } else if (jobArg.equals(JobArg.YARN_QUEUE)) {
//                argList.add("--queue " + mergedProps.get(JobArg.YARN_QUEUE.dtsPropName));
                handleStandardArgument(mergedProps, argList, jobArg);
            } else if (jobArg.equals(JobArg.JARS)) {
                sparkJarsHelper(getMergedProps(), pluginDir, log, argList);
            } else if (jobArg.equals(JobArg.FILES)) {
                sparkFilesHelper(getMergedProps(), pluginDir, log, argList);
            } else if (jobArg.equals(JobArg.SPARK_CONF_PREFIX)) {
                sparkConfPrefixHelper(getMergedProps(), argList);
            } else if (jobArg.equals(JobArg.DRIVER_JAVA_OPTIONS)) {
                // do nothing because already handled above
            } else if (jobArg.equals(JobArg.SPARK_FLAG_PREFIX)) {
                sparkFlagPrefixHelper(getMergedProps(), argList);
            } else if (jobArg.equals(JobArg.EXECUTION_JAR)) {
                executionJarHelper(getMergedProps(), pluginDir, log, argList);
            } else if (jobArg.equals(JobArg.PARAMS)) {
                paramsHelper(getJobProps(), getSysProps(), argList, log);
            } else if (jobArg.equals(JobArg.SPARK_VERSION)) {
                // do nothing since this arg is not a spark-submit argument
                // it is only used in getClassPaths() below
            }
        }
        return StringUtils.join((Collection<String>) argList, JobArg.delimiter);
    }

    private void driverResourceHelper() {
        Boolean yarnAdvancedEnable = jobProps.containsKey(Contants.YARN_ADVANCED_ENABLE) ?
                jobProps.getBoolean(Contants.YARN_ADVANCED_ENABLE) : false;
        if (yarnAdvancedEnable) {
            // 开启高级资源设置
            String yarnResourceDetailJsonStr = jobProps.getString(Contants.YARN_RESOURCE_DETAIL);
            JSONObject yarnResourceDetailJson = JSONObject.parseObject(yarnResourceDetailJsonStr);
            Map<String, Object> yarnResourceDetailMap = JSONObject
                    .toJavaObject(yarnResourceDetailJson, Map.class);

            int driverCuNum = Integer.parseInt(yarnResourceDetailMap.get(Contants.DRIVER_CU) + "");
            this.jobProps.put(Contants.DRIVER_MEMORY, ResourceUtils.getDriverMemory(driverCuNum));
        } else {
            this.jobProps.put(Contants.DRIVER_MEMORY, ResourceUtils.getDriverMemory());
        }
    }

    private void executorResourceHelper(Props jobProps, Logger log) {
        int numCU = jobProps.getInt(Contants.DTS_RESOURCE_USAGE);
        log.info("资源分配：dts.ext.yarn.resource.usage="+numCU);
        StringBuilder resourceStr = new StringBuilder();
        if (jobProps.containsKey(Contants.SPARK_DYNAMICALLOCATION_ENABLE) && jobProps
                .getBoolean(Contants.SPARK_DYNAMICALLOCATION_ENABLE)) {
            this.jobProps.put(Contants.SPARK_DYNAMICALLOCATION_MIN_EXECUTORS, ResourceUtils.getMinExecutorNum());
            resourceStr.append(Contants.SPARK_DYNAMICALLOCATION_MIN_EXECUTORS).append(" ")
                    .append(ResourceUtils.getMinExecutorNum()).append(" ");

            Boolean yarnAdvancedEnable = jobProps.containsKey(Contants.YARN_ADVANCED_ENABLE) ?
                    jobProps.getBoolean(Contants.YARN_ADVANCED_ENABLE) : false;
            if (yarnAdvancedEnable) {
                // 开启高级资源设置
                String yarnResourceDetailJsonStr = jobProps
                        .getString(Contants.YARN_RESOURCE_DETAIL);
                JSONObject yarnResourceDetailJson = JSONObject
                        .parseObject(yarnResourceDetailJsonStr);
                Map<String, Object> yarnResourceDetailMap = JSONObject
                        .toJavaObject(yarnResourceDetailJson, Map.class);

                int driverCuNum = Integer
                        .parseInt(yarnResourceDetailMap.get(Contants.DRIVER_CU) + "");
                Integer executorNUM = Integer
                        .parseInt(yarnResourceDetailMap.get(Contants.EXECUTOR_NUM) + "");       //工作节点数
                this.jobProps.put(Contants.SPARK_DYNAMICALLOCATION_MAX_EXECUTORS,
                        ResourceUtils.getExecutorNum(executorNUM));     //numCU - driverCuNum
                resourceStr.append(Contants.SPARK_DYNAMICALLOCATION_MAX_EXECUTORS).append(" ")
                        .append(ResourceUtils.getExecutorNum(executorNUM)).append(" ");

                Integer executorCuNum = Integer
                        .parseInt(yarnResourceDetailMap.get(Contants.EXECUTOR_CU) + "");        //执行节点资源 x个cu
                String resourceStrategy =
                        yarnResourceDetailMap.get(Contants.RESOURCE_STRATEGY) + "";

                // executor-core 【CPU个数决定了executor个数，即工作节点个数
                    //  当CPU优先时，工作节点将能启动两个executor，每个executor分配4GB内存；
                    //  当分以内存优先时，工作节点仅启动一个executor，每个executor分配8GB内存】
                this.jobProps.put(Contants.EXECUTOR_CORES,
                        ResourceUtils.getNumCoresPerExecutor(executorCuNum, resourceStrategy));
                resourceStr.append(Contants.EXECUTOR_CORES).append(" ")
                        .append(ResourceUtils.getNumCoresPerExecutor(executorCuNum, resourceStrategy))
                        .append(" ");

                // 每个executor内存size
                this.jobProps.put(Contants.EXECUTOR_MEMORY,
                        ResourceUtils.getMemPerExecutorWithUnit(executorCuNum, resourceStrategy));
                resourceStr.append(Contants.EXECUTOR_MEMORY).append(" ").append(
                        ResourceUtils.getMemPerExecutorWithUnit(executorCuNum, resourceStrategy))
                        .append(" ");

            } else {
                // maxExecutors个数
                int executorNum=ResourceUtils.getExecutorNum(numCU - 1);
                this.jobProps.put(Contants.SPARK_DYNAMICALLOCATION_MAX_EXECUTORS, executorNum);
                resourceStr.append(Contants.SPARK_DYNAMICALLOCATION_MAX_EXECUTORS).append(" ")
                        .append(executorNum).append(" ");
                log.info("----fansl maxExecutors="+executorNum);

                // executor个数
                this.jobProps.put(Contants.EXECUTOR_CORES, ResourceUtils.getNumCoresPerExecutor());
                resourceStr.append(Contants.EXECUTOR_CORES).append(" ")
                        .append(ResourceUtils.getNumCoresPerExecutor()).append(" ");

                // 实为一个CU的内存，未考虑CU个数
                this.jobProps
                        .put(Contants.EXECUTOR_MEMORY, ResourceUtils.getMemPerExecutorWithUnit());
                resourceStr.append(Contants.EXECUTOR_MEMORY).append(" ")
                        .append(ResourceUtils.getMemPerExecutorWithUnit()).append(" ");
            }

        } else {
            this.jobProps.put(Contants.NUM_EXECUTORS, ResourceUtils.getExecutorNum(numCU - 1));
            resourceStr.append(Contants.NUM_EXECUTORS).append(" ")
                    .append(ResourceUtils.getExecutorNum(numCU) - 1).append(" ");
        }

        if (PluginUtils.isDevelopEnv()) {
            System.out.println("申请资源详情：\n" + resourceStr.toString());
        } else {
            // log.info("申请资源详情：\n" + resourceStr.toString());
        }
    }

    private void handleStandardArgument(Props jobProps,
                                        List<String> argList, JobArg sparkJobArg) {
        if (jobProps.containsKey(sparkJobArg.dtsPropName)) {
            argList.add(sparkJobArg.sparkParamName);
            argList.add(jobProps.getString(sparkJobArg.dtsPropName));
        }
    }

    private void sparkJarsHelper(Props jobProps, String pluginDir,
                                 Logger log, List<String> argList) {
        String propSparkJars =
                jobProps.getString(JobArg.JARS.dtsPropName, "");
        String basePath = pluginDir;
        if (propSparkJars.startsWith(File.separator)) {
            basePath = "";
        }
        String jarList = JarUtils.resolveWildCardForJarSpec(basePath, propSparkJars, log);
        String formatJarList = HadoopJobUtils.resolvedJarWithAbsolutePathSpec(pluginDir, jarList, log);

        if (formatJarList.length() > 0) {
            argList.add(JobArg.JARS.sparkParamName);
            argList.add(formatJarList);
        }
    }

    private void sparkFilesHelper(Props jobProps, String pluginDir,
                                  Logger log, List<String> argList) {
        String propSparkJars =
                jobProps.getString(JobArg.FILES.dtsPropName, "");
        String basePath = pluginDir;
        if (propSparkJars.startsWith(File.separator)) {
            basePath = "";
        }
        String fileList = JarUtils.resolveWildCardForJarSpec(basePath, propSparkJars, log);
        String formatFileList = HadoopJobUtils.resolvedJarWithAbsolutePathSpec(pluginDir, fileList, log);

        if (formatFileList.length() > 0) {
            argList.add(JobArg.FILES.sparkParamName);
            argList.add(formatFileList);
        }
    }

    private void sparkConfPrefixHelper(Props jobProps, List<String> argList) {
        for (Map.Entry<String, String> entry : jobProps.getMapByPrefix(
                JobArg.SPARK_CONF_PREFIX.dtsPropName).entrySet()) {
            argList.add(JobArg.SPARK_CONF_PREFIX.sparkParamName);
            String confV = entry.getValue().replaceAll("\"", "\\\\\"");
            String sparkConfKeyVal =
                    String.format("\"%s=%s\"", entry.getKey(), confV);
            argList.add(sparkConfKeyVal);
        }
    }

    private void sparkFlagPrefixHelper(Props jobProps, List<String> argList) {
        for (Map.Entry<String, String> entry : jobProps.getMapByPrefix(
                JobArg.SPARK_FLAG_PREFIX.dtsPropName).entrySet()) {
            if ("true".equalsIgnoreCase(entry.getValue()))
                argList.add(JobArg.SPARK_FLAG_PREFIX.sparkParamName
                        + entry.getKey());
        }
    }

    private void executionJarHelper(Props jobProps, String pluginDir,
                                    Logger log, List<String> argList) {
        if (jobProps.containsKey(JobArg.EXECUTION_JAR.dtsPropName)) {
            String jarName = jobProps.getString(JobArg.EXECUTION_JAR.dtsPropName);
            String basePath = pluginDir;
            if (jarName.startsWith(File.separator)) {
                basePath = "";
            }
            String executionJarName =
                    JarUtils.resolveExecutionJarName(basePath,
                            jarName, log);
            argList.add(executionJarName);
        }
    }

    private void paramsHelper(Props jobProps, Props sysProps, List<String> argList, Logger log) {
        if (jobProps.containsKey(JobArg.PARAMS.dtsPropName)) {
            String params = jobProps.getString(JobArg.PARAMS.dtsPropName);
            String[] paramsList = params.split(" ");
            for (String s : paramsList) {
                argList.add(s);
            }
        }
       /* Map<String, Object> paramMap = getParamsMapByProps(jobProps);
        Map newMap = null;
        try {
            newMap = DtsArgParserUtils.parseSyncMap(paramMap);
        } catch (ParamParseException e) {
            log.error("初始化clazz配置信息失败...");
            e.printStackTrace();
            throw e;
        }
        String param = JSONObject.toJSONString(newMap);
        // cluster模式采用base64编码方式传递用户参数
        String encoderParams = Base64Utils.encoder(param);
        argList.add(String.format("--userConfig=\"%s\"", encoderParams));*/
    }

    private Map<String, Object> getParamsMapByProps(Props jobProps) {
        Map<String, String> map = jobProps.getMapByPrefix("");
        Map<String, Object> objectMap = new HashMap<>();
        for (Map.Entry<String, String> entry : map.entrySet()) {
            objectMap.put(entry.getKey(), entry.getValue());
        }
        for (JobArg jobArg : JobArg.values()) {
            if (objectMap.containsKey(jobArg.dtsPropName)) {
                objectMap.remove(jobArg.dtsPropName);
            }
        }
        Set<Map.Entry<String, Object>> entrySet = new HashSet<>();
        entrySet.addAll(objectMap.entrySet());
        for (Map.Entry<String, Object> entry : entrySet) {
            if (entry.getKey().startsWith(JobArg.SPARK_CONF_PREFIX.dtsPropName)) {
                objectMap.remove(entry.getKey());
            }
        }
        return objectMap;
    }
    //TODO END

    /**
     * 构建数据权限服务请求参数
     *
     * @return
     */
    private ParamConfig buildAuthQueryParam(DsInfo dsInfo) {
        ParamConfig paramConfig = new ParamConfig();
        paramConfig.setDsId(dsInfo.getDsId());
        paramConfig.setDsType(dsInfo.getDsType());
        paramConfig.setDsName(dsInfo.getDsName());
        paramConfig.setRunEnv(jobProps.get(DtsContants.RUN_ENV));
        paramConfig.setAuthUrl(jobProps.get(DtsContants.AUTH_SERVER_KEY));
        paramConfig.setAuthAk(jobProps.get(DtsContants.DTS_EXECUTOR_AK));
        paramConfig.setAuthSk(jobProps.get(DtsContants.DTS_EXECUTOR_SK));
        paramConfig.setUrl(jobProps.get(DtsContants.META_RIGHT_QUERY_URL_KEY));
        paramConfig.setJobProjectId(jobProps.getInt(DtsContants.PROJECT_ID));
        paramConfig.setJobProjectName(jobProps.getString(DtsContants.PROJECT_NAME, null));
        return paramConfig;
    }

    /**
     * 构建数据权限服务请求参数
     *
     * @return
     */
    private ParamConfig buildAuthQueryParamWithoutAkSk(DsInfo dsInfo) {
        ParamConfig paramConfig = new ParamConfig();
        paramConfig.setDsId(dsInfo.getDsId());
        paramConfig.setDsType(dsInfo.getDsType());
        paramConfig.setDsName(dsInfo.getDsName());
        paramConfig.setRunEnv(jobProps.get(DtsContants.RUN_ENV));
        paramConfig.setAuthUrl(jobProps.get(DtsContants.AUTH_SERVER_KEY));
        paramConfig.setUrl(jobProps.get(DtsContants.META_RIGHT_QUERY_URL_KEY));
        paramConfig.setJobProjectId(jobProps.getInt(DtsContants.PROJECT_ID));
        paramConfig.setJobProjectName(jobProps.getString(DtsContants.PROJECT_NAME, null));
        return paramConfig;
    }

    /**
     * 构建元数据查询参数
     *
     * @return
     */
    private ParamConfig buildMetaQueryParam(DsInfo dsInfo) {
        ParamConfig paramConfig = new ParamConfig();
        paramConfig.setDsId(dsInfo.getDsId());
        paramConfig.setDsType(dsInfo.getDsType());
        paramConfig.setDsName(dsInfo.getDsName());
        paramConfig.setRunEnv(jobProps.get(DtsContants.RUN_ENV));
        paramConfig.setAuthUrl(jobProps.get(DtsContants.AUTH_SERVER_KEY));
        paramConfig.setAuthAk(jobProps.get(DtsContants.DTS_EXECUTOR_AK));
        paramConfig.setAuthSk(jobProps.get(DtsContants.DTS_EXECUTOR_SK));
        paramConfig.setUrl(jobProps.get(DtsContants.META_QUERY_URL_KEY));
        paramConfig.setJobProjectId(jobProps.getInt(DtsContants.PROJECT_ID));
        paramConfig.setJobProjectName(jobProps.getString(DtsContants.PROJECT_NAME, null));
        return paramConfig;

    }

    /**
     * 构建元数据查询参数
     *
     * @return
     */
    private ParamConfig buildMetaQueryParamWithoutAkSk(DsInfo dsInfo) {
        ParamConfig paramConfig = new ParamConfig();
        paramConfig.setDsId(dsInfo.getDsId());
        paramConfig.setDsType(dsInfo.getDsType());
        paramConfig.setDsName(dsInfo.getDsName());
        paramConfig.setRunEnv(jobProps.get(DtsContants.RUN_ENV));
        paramConfig.setAuthUrl(jobProps.get(DtsContants.AUTH_SERVER_KEY));
        paramConfig.setUrl(jobProps.get(DtsContants.META_QUERY_URL_KEY));
        paramConfig.setJobProjectId(jobProps.getInt(DtsContants.PROJECT_ID));
        paramConfig.setJobProjectName(jobProps.getString(DtsContants.PROJECT_NAME, null));
        return paramConfig;

    }

    private Map getConnectInfo(DsInfo dsInfo) {
        Map connectMap = new HashMap();
        String defaultConnectStr = "{\"host\":\"-\",\"port\":\"0\",\"username\":\"-\",\"password\":\"-\",\"instanceName\":\"default\"}";
        String connectStr = defaultConnectStr;
        if (dsInfo != null) {
            if (dsInfo.getDsId() == -1) {
                warn("未配置数据源信息");
                connectStr = defaultConnectStr;
            } else {
                connectStr = getConnectInfoFromDmg(dsInfo, defaultConnectStr);
            }
        } else {
            info("未配置数据源信息");
        }
        connectMap = JSON.parseObject(connectStr, Map.class);
        return connectMap;
    }


    /**
     * 从数据管理获取连接信息
     *
     * @return
     */
    private String getConnectInfoFromDmg(DsInfo dsInfo, String defaultConnectStr) {
        String connectStr = defaultConnectStr;
        try {
            MetaClient client = new MetaClient();
            ParamConfig paramConfig = buildMetaQueryParam(dsInfo);
            info("paramConfig" + paramConfig.toString());
            connectStr = client.getConnect(paramConfig);
            if (connectStr == null || connectStr.isEmpty()) {
                connectStr = defaultConnectStr;
                error("未获取到数据源连接信息");
            }
        } catch (Exception e) {
//            e.printStackTrace();
            error(e.getMessage(), e);
            throw new MetaException("获取数据源信息异常");
        }
        return connectStr;
    }

    @Override
    public void cancel() throws InterruptedException {
        super.cancel();
        info("Cancel called.");
//        final String logFilePath = super.getJobLogPath();
//        HadoopJobUtils.proxyUserKillAllSpawnedHadoopJobs(logFilePath, getMergedProps(), getLog());
    }
}
