package avicit.bdp.dds.server.worker.task.spark;

import avicit.bdp.common.dto.BdpPrmEngineResourceDTO;
import avicit.bdp.common.dto.CalculateEngineConf;
import avicit.bdp.common.utils.uploads.FileAdapterUtils;
import avicit.bdp.common.utils.uploads.IFileAdapter;
import avicit.bdp.core.constant.Constants;
import avicit.bdp.core.constant.ProcessConstants;
import avicit.bdp.dds.dispatch.process.Property;
import avicit.bdp.dds.dispatch.process.ResourceInfo;
import avicit.bdp.dds.dispatch.task.AbstractParameters;
import avicit.bdp.dds.dispatch.task.spark.SparkParameters;
import avicit.bdp.dds.common.utils.OSUtils;
import avicit.bdp.dds.common.utils.ParameterUtils;
import avicit.bdp.dds.dao.entity.Resource;
import avicit.bdp.dds.server.entity.TaskExecutionContext;
import avicit.bdp.dds.server.utils.ParamUtils;
import avicit.bdp.dds.server.utils.ParseHadoopXmlUtils;
import avicit.bdp.dds.server.utils.SparkArgsUtils;
import avicit.bdp.dds.server.worker.task.AbstractYarnTask;
import com.alibaba.fastjson2.JSON;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;

import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

/**
 * spark task
 */
public class SparkTask extends AbstractYarnTask {

    /**
     * spark1 command
     */
    private static final String SPARK_COMMAND = "spark-submit";

    /**
     * spark1 command
     */
    private static final String SPARK1_COMMAND = "${SPARK_HOME1}/bin/spark-submit";

    /**
     * spark2 command
     */
    private static final String SPARK2_COMMAND = "${SPARK_HOME2}/bin/spark-submit";

    /**
     * spark parameters
     */
    private SparkParameters sparkParameters;


    /**
     * 处理计算引擎
     */
    CalculateEngineConf calculateEngine = null;

    /**
     * taskExecutionContext
     */
    private final TaskExecutionContext taskExecutionContext;

    public SparkTask(TaskExecutionContext taskExecutionContext, Logger logger) {
        super(taskExecutionContext, logger);
        this.taskExecutionContext = taskExecutionContext;
    }

    @Override
    public void init() {

        logger.info("spark task params {}", taskExecutionContext.getTaskParams());

        sparkParameters = JSON.parseObject(taskExecutionContext.getTaskParams(), SparkParameters.class);

        if (sparkParameters == null || !sparkParameters.checkParameters()) {
            throw new RuntimeException("spark task params is not valid");
        }

        BdpPrmEngineResourceDTO engineResource = processService.findBdpPrmEngineResourceById(sparkParameters.getClusterId());
        if (engineResource != null) {
            calculateEngine = engineResource.getCalculateEngineConf();
        }

        sparkParameters.setQueue(taskExecutionContext.getQueue());

        setMainJarName();

        String args = sparkParameters.getMainArgs();

        if (StringUtils.isNotEmpty(args)) {
            // replace placeholder
            Map<String, Property> paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()),
                    taskExecutionContext.getDefinedParams(),
                    sparkParameters.getLocalParametersMap(),
                    taskExecutionContext.getCmdTypeIfComplement(),
                    taskExecutionContext.getScheduleTime());

            if (paramsMap != null) {
                args = ParameterUtils.convertParameterPlaceholders(args, ParamUtils.convert(paramsMap));
            }
        }

        args = handleTaskJson(args);

        logger.info("args: {}", args);

        if (StringUtils.isNotEmpty(args)) {
            sparkParameters.setMainArgs(args);
        }
    }

    /**
     * 处理任务入参json数据
     *
     * @param args 主函数参数
     */
    private String handleTaskJson(String args) {
        Map<String, Property> localParametersMap = sparkParameters.getLocalParametersMap();
        //处理质量检测任务入参
        if (localParametersMap.containsKey(ProcessConstants.MEASURE_TASK_ENV) && localParametersMap.containsKey(ProcessConstants.MEASURE_TASK_JSON)) {
            if (args == null) {
                args = "";
            }
            // env.json
            Property measureTaskEnv = localParametersMap.get(ProcessConstants.MEASURE_TASK_ENV);
            String envJsonFilePath = writeJson2File("env.json", measureTaskEnv.getValue());
            args += " " + envJsonFilePath;

            // dq.json
            Property measureTaskProperty = localParametersMap.get(ProcessConstants.MEASURE_TASK_JSON);
            String measureTaskJson = measureTaskProperty.getValue();
            if (StringUtils.isNotBlank(measureTaskJson)) {
                String dqJsonFilePath = writeJson2File("dq.json", measureTaskJson);
                args += " " + dqJsonFilePath;
            }
        }

        if (localParametersMap.containsKey(ProcessConstants.CALCULATE_TASK_JSON)) {
            if (args == null) {
                args = "";
            }
            Property property = localParametersMap.get(ProcessConstants.CALCULATE_TASK_JSON);
            if (StringUtils.isNotBlank(property.getValue())) {
                String jsonFilePath = writeJson2File("job.json", property.getValue());

                //传入--files参数，注意：yarn-cluster模式下，--files应该使用相对路径，否则Driver（Executor）找不到files
                File jsonFile = new File(jsonFilePath);
                // args += " " + jsonFilePath + " " + taskExecutionContext.getTaskInstanceId();
                args += " " + jsonFile.getName() + " " + taskExecutionContext.getProcessInstanceId();

                //传入--files参数，注意：yarn-cluster模式下，--files应该使用相对路径，否则Driver（Executor）找不到files
                // String others = " --files " + jsonFilePath;
                String others = " --files " + jsonFile.getName();
                sparkParameters.setOthers(others);
            }
        }


        if (localParametersMap.containsKey(ProcessConstants.INCREMENT_TASK_JSON)) {
            if (args == null) {
                args = "";
            }
            Property property = localParametersMap.get(ProcessConstants.INCREMENT_TASK_JSON);
            if (StringUtils.isNotBlank(property.getValue())) {
                String jsonFilePath = writeJson2File("job.json", property.getValue());
                //传入--files参数，注意：yarn-cluster模式下，--files应该使用相对路径，否则Driver（Executor）找不到files
                File jsonFile = new File(jsonFilePath);
                args += " " + jsonFile.getName() + " " + taskExecutionContext.getTaskInstanceId();

                //传入--files参数，注意：yarn-cluster模式下，--files应该使用相对路径，否则Driver（Executor）找不到files
                String others = " --files " + jsonFile.getName();
                sparkParameters.setOthers(others);
            }
        }
        return args;
    }

    private String writeJson2File(String fileName, String json) {
        // generate json
        String filePath = String.format("%s/%s_%s", taskExecutionContext.getExecutePath(), taskExecutionContext.getTaskAppId(), fileName);
        Path path = new File(filePath).toPath();
        if (Files.exists(path)) {
            return fileName;
        }
        // create json file
        try {
            FileUtils.writeStringToFile(new File(filePath), json, StandardCharsets.UTF_8);
        } catch (IOException e) {
            e.printStackTrace();
        }
        return filePath;
    }

    /**
     * create command
     *
     * @return command
     * <p>
     */
    @Override
    protected String buildCommand() {
        boolean isKerberos = calculateEngine != null && "kerberos".equals(calculateEngine.getAuthType());
        List<String> args = new ArrayList<>();
        args.add("\n");
        //增加spark yarn 提交环境变量
        if (calculateEngine != null && StringUtils.isNotBlank(calculateEngine.getConfigPath())) {
            // handleConfigXml(calculateEngine, args);

            if (OSUtils.isWindows()) {
                args.add("set HADOOP_CONF_DIR=" + calculateEngine.getConfigPath() + "\n");
            } else {
                args.add("export HADOOP_CONF_DIR=" + calculateEngine.getConfigPath() + "\n");
            }
        }

        //spark version
        args.add(SPARK_COMMAND);

        //增加kerberos支持
        if (calculateEngine != null && isKerberos) {
            args.add("--keytab " + calculateEngine.getKeytabPath());
            args.add("--principal " + calculateEngine.getKerberosUser());
        }

        //args.add("--executor-memory 3G --executor-cores 1 --driver-memory 6G");

        // other parameters
        args.addAll(SparkArgsUtils.buildArgs(sparkParameters));

        String command = ParameterUtils.convertParameterPlaceholders(String.join(" ", args), taskExecutionContext.getDefinedParams());

        logger.info("spark task command : {}", command);

        return command;
    }

    /**
     * 从hadoop xml配置文件中，解析配置添加到 spark-submit --conf
     * https://spark.apache.org/docs/3.1.2/configuration.html#inheriting-hadoop-cluster-configuration
     */
    private void handleConfigXml(CalculateEngineConf calculateEngine, List<String> args) {
        // 从配置文件中解析出hdfs等xml数据
        Set<String> hadoopConfXmlSet = new HashSet<String>() {{
            add("hdfs-site.xml");
            add("core-site.xml");
        }};
        String configPath = calculateEngine.getConfigPath();

        if (StringUtils.isNotBlank(configPath)) {
            String template = "--conf spark.hadoop.%s=%s";
            try {
                File configFilePath = new File(configPath);
                if (configFilePath.isDirectory()) {
                    File[] files = configFilePath.listFiles(pathname -> pathname.getName().endsWith("xml"));
                    if (files != null) {
                        for (File file : files) {
                            if (hadoopConfXmlSet.contains(file.getName())) {
                                String xmlFilePath = file.getAbsolutePath();
                                ParseHadoopXmlUtils.parseHadoopXml(args, template, xmlFilePath);
                            }
                        }
                    }
                }
            } catch (Exception e) {
                logger.error("从配置文件中解析出hdfs等xml数据异常", e);
            }
        }
    }

    @Override
    protected void setMainJarName() {
        // main jar
        ResourceInfo mainJar = sparkParameters.getMainJar();
        if (mainJar != null) {
            //资源路径
            String resourceId = mainJar.getId();
            String resourceName = null;
            if (StringUtils.isBlank(resourceId)) {
                resourceName = mainJar.getRes();
            } else {
                resourceName = resourceId.substring(resourceId.lastIndexOf("/") + 1);
                try {
                    IFileAdapter fileAdapter = FileAdapterUtils.getFileAdapterByProject(taskExecutionContext.getProjectId());
                    fileAdapter.copyServerToLocal(resourceId,
                            taskExecutionContext.getExecutePath() + "/" + resourceName,
                            false, true);
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }

            mainJar.setRes(resourceName);
            sparkParameters.setMainJar(mainJar);
        }
    }

    @Override
    public AbstractParameters getParameters() {
        return sparkParameters;
    }
}
