package avicit.bdp.dds.server.worker.task.algorithm;

import avicit.bdp.common.dto.CalculateEngineConf;
import avicit.bdp.common.utils.SpringApplicationContext;
import avicit.bdp.common.utils.uploads.FileAdapterUtils;
import avicit.bdp.common.utils.uploads.IFileAdapter;
import avicit.bdp.dds.dispatch.enums.ProgramType;
import avicit.bdp.dds.dispatch.process.ResourceInfo;
import avicit.bdp.dds.dispatch.task.AbstractParameters;
import avicit.bdp.dds.dispatch.task.algorithm.AlgorithmParameters;
import avicit.bdp.dds.dispatch.task.spark.SparkParameters;
import avicit.bdp.core.util.json.JSONUtils;
import avicit.bdp.dds.api.service.ProcessResourceConfigService;
import avicit.bdp.dds.common.Constants;
import avicit.bdp.dds.common.utils.FileUtils;
import avicit.bdp.dds.common.utils.OSUtils;
import avicit.bdp.dds.common.utils.ParameterUtils;
import avicit.bdp.dds.dao.entity.AlgoNodes;
import avicit.bdp.dds.dao.entity.Resource;
import avicit.bdp.dds.dao.entity.SparkConfig;
import avicit.bdp.dds.dao.entity.TaskNodes;
import avicit.bdp.dds.server.entity.TaskExecutionContext;
import avicit.bdp.dds.server.utils.SparkArgsUtils;
import avicit.bdp.dds.server.worker.task.AbstractYarnTask;
import avicit.bdp.dds.service.process.ProcessService;
import com.alibaba.fastjson2.JSONObject;
import org.apache.commons.collections4.MapUtils;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;

/**
 * @author xugb
 * algorithm task
 */
public class AlgorithmTask extends AbstractYarnTask {

    /**
     * spark1 command
     */
    private static final String SPARK_COMMAND = "spark-submit";

    /**
     * Algorithm parameters
     */
    private AlgorithmParameters algorithmParameters;

    /**
     * spark parameters
     */
    private SparkParameters sparkParameters;


    /**
     * taskExecutionContext
     */
    private final TaskExecutionContext taskExecutionContext;

    private final ProcessService processService;

    private ProcessResourceConfigService processResourceConfigService;
    // 处理计算引擎
    CalculateEngineConf calculateEngine = null;
    //实际执行类名称
    private String execClassName = "";

    public AlgorithmTask(TaskExecutionContext taskExecutionContext, Logger logger) {
        super(taskExecutionContext, logger);
        this.taskExecutionContext = taskExecutionContext;

        this.processService = SpringApplicationContext.getBean(ProcessService.class);
        this.processResourceConfigService = SpringApplicationContext.getBean(ProcessResourceConfigService.class);
    }

    @Override
    public void init() throws IOException {
        logger.info("algorithm task params {}", taskExecutionContext.getTaskParams());

        algorithmParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), AlgorithmParameters.class);
        if (algorithmParameters == null || !algorithmParameters.checkParameters()) {
            throw new RuntimeException("spark task params is not valid");
        }

        logger.info("algorithm task params {}", JSONUtils.toJson(algorithmParameters));

        calculateEngine = this.taskExecutionContext.getCalculateEngineConf();
        SparkConfig sparkConfig = processResourceConfigService.getSparkConfigByProcessDefinitionId(taskExecutionContext.getProcessDefineId());
        if (sparkConfig == null) {
            sparkConfig = new SparkConfig();
        }
        sparkParameters = new SparkParameters();
        sparkParameters.setQueue(sparkConfig.getQueue() == null ? taskExecutionContext.getQueue() : sparkConfig.getQueue());
        sparkParameters.setDeployMode(sparkConfig.getDeployMode() == null ? "cluster" : sparkConfig.getDeployMode());
        sparkParameters.setDriverCores(sparkConfig.getDriverCores() == null ? 1 : sparkConfig.getDriverCores());
        sparkParameters.setDriverMemory(sparkConfig.getDriverMemory() == null ? "1g" : sparkConfig.getDriverMemory() + "g");
        sparkParameters.setExecutorCores(sparkConfig.getExecutorCores() == null ? 1 : sparkConfig.getExecutorCores());
        sparkParameters.setExecutorMemory(sparkConfig.getExecutorMemory() == null ? "1g" : sparkConfig.getExecutorMemory() + "g");
        sparkParameters.setNumExecutors(sparkConfig.getNumExecutors() == null ? 1 : sparkConfig.getNumExecutors());

        setMainJarName();

        //生成json文件
        String jsonFilePath = InputJsonUtils.buildAlgoJsonFile(taskExecutionContext, logger, algorithmParameters, processService);
        //传入参数
        String others = " --files " + jsonFilePath;

        sparkParameters.setOthers(others);
        //设置文件名称参数
        String fileName = jsonFilePath.substring(jsonFilePath.lastIndexOf("/") + 1);
        //需要传入执行类
        if (StringUtils.isNotBlank(execClassName)) {
            fileName += " " + execClassName;
        }
        sparkParameters.setMainArgs(fileName);

        // 判据计算节点更新任务信息到Resource的other_info字段中
        updateAircraftTaskInfo();
    }

    /**
     * create command
     *
     * @return command
     */
    @Override
    protected String buildCommand() {
        logger.info("engine: {}", calculateEngine.toString());
        boolean isKerberos = false;
        if (calculateEngine != null && calculateEngine.getAuthType() != null && calculateEngine.getAuthType().equals("kerberos")) {
            isKerberos = true;
        }
        List<String> args = new ArrayList<>();
        args.add("\n");
        //增加spark yarn 提交环境变量
        if (calculateEngine != null) {
            if (OSUtils.isWindows()) {
                args.add("set HADOOP_CONF_DIR=" + calculateEngine.getConfigPath() + "\n");
//                if (isKerberos) {
//                    args.add("set KRB5_CONFIG=" + calculateEngine.getKrb5Path() + "\n");
//                }
            } else {
                args.add("export HADOOP_CONF_DIR=" + calculateEngine.getConfigPath() + "\n");
//                if (isKerberos) {
//                    args.add("export KRB5_CONFIG=" + calculateEngine.getKrb5Path() + "\n");
//                }
            }
        }

        //spark version
        args.add(SPARK_COMMAND);

        //增加kerberos支持
        if (isKerberos) {
            args.add("--keytab " + calculateEngine.getKeytabPath());
            args.add("--principal " + calculateEngine.getKerberosUser());
        }

        // other parameters
        args.addAll(SparkArgsUtils.buildArgs(sparkParameters));

        String command = ParameterUtils.convertParameterPlaceholders(
                String.join(" ", args),
                taskExecutionContext.getDefinedParams());

        logger.info("spark task command : {}", command);

        return command;
    }

    @Override
    protected void setMainJarName() {
        // main jar
        ResourceInfo mainJar = sparkParameters.getMainJar();
        if (mainJar == null) {
            mainJar = new ResourceInfo();
        }
        Integer type = algorithmParameters.getType();
        String nodeId = algorithmParameters.getNodeId();
        String resourceName;
        String params;
        //算法库算法.
        if (AlgorithmParameters.CUSTOM_ALGORITHM_LIBRARY.equals(type)) {
            AlgoNodes algoNodes = processService.getTaskAlgoNodesById(nodeId);
            Resource resource = processService.getResourceById(algoNodes.getResourceId());
            if (resource == null) {
                logger.error("resource id: {} not exist", algoNodes.getResourceId());
                throw new RuntimeException(String.format("resource id: %s not exist", algoNodes.getResourceId()));
            }
            //下载jar包
            downloadResource(taskExecutionContext.getExecutePath(), resource.getFullName());

            resourceName = resource.getFullName().replaceFirst("/", "");

//            sparkParameters.setProgramType(ProgramType.of(algoNodes.getProgramType()));
//            if (sparkParameters.getProgramType() != ProgramType.PYTHON) {
//                if (StringUtils.isNotBlank(algoNodes.getClassName())) {
//                    sparkParameters.setMainClass(algoNodes.getClassName());
//                } else {
//                    //算法包的统一入口类名
//                    sparkParameters.setMainClass(Constants.ALGORITHM_JAVA_MAINCLASS);
//                }
//            }
            if (algoNodes.getProgramType() != null) {
                sparkParameters.setProgramType(ProgramType.of(algoNodes.getProgramType()));
            } else {
                sparkParameters.setProgramType(ProgramType.JAVA);
            }
            if (sparkParameters.getProgramType() == ProgramType.PYTHON) {
                //算法包的统一入口类名
                sparkParameters.setMainClass(Constants.ALGORITHM_PYSPARK_MAINFILE);
            } else if (sparkParameters.getProgramType() == ProgramType.JAVA) {
                //算法包的统一入口类名
                sparkParameters.setMainClass(Constants.ALGORITHM_JAVA_MAINCLASS);
            } else {
                //算法包的统一入口类名
                sparkParameters.setMainClass(Constants.ALGORITHM_JAVA_MAINCLASS);
            }

            execClassName = algoNodes.getClassName();

            //获取执行className
            params = algoNodes.getParams();
            //pyspark
            if (algoNodes.getProgramType() != null && algoNodes.getProgramType() == ProgramType.PYTHON.getCode()) {
                //复制需要提交的py主文件
                FileUtils.copyResourceFile(taskExecutionContext.getExecutePath(), Constants.ALGORITHM_PYSPARK_MAINFILE);
            }

        } else {
            TaskNodes taskNodes = processService.getTaskNodesById(nodeId);
            //下载jar包
            //downloadResource(taskExecutionContext.getExecutePath(), taskNodes.getNodeFilePath());
            FileUtils.copyResourceFile(taskExecutionContext.getExecutePath(), "/libs/" + taskNodes.getNodeFilePath());
            resourceName = "libs/" + taskNodes.getNodeFilePath();
            if (taskNodes.getProgramType() != null) {
                sparkParameters.setProgramType(ProgramType.of(taskNodes.getProgramType()));
            } else {
                sparkParameters.setProgramType(ProgramType.JAVA);
            }

            //pyspark
            if (taskNodes.getProgramType() != null && taskNodes.getProgramType() == ProgramType.PYTHON.getCode()) {
                //复制需要提交的py主文件
                FileUtils.copyResourceFile(taskExecutionContext.getExecutePath(), Constants.ALGORITHM_PYSPARK_MAINFILE);
            }

            execClassName = taskNodes.getExecClassName();
            if (sparkParameters.getProgramType() == ProgramType.PYTHON) {
                //算法包的统一入口类名
                sparkParameters.setMainClass(Constants.ALGORITHM_PYSPARK_MAINFILE);
            } else if (sparkParameters.getProgramType() == ProgramType.JAVA) {
                //算法包的统一入口类名
                sparkParameters.setMainClass(Constants.ALGORITHM_JAVA_MAINCLASS);
            } else {
                //算法包的统一入口类名
                sparkParameters.setMainClass(Constants.ALGORITHM_JAVA_MAINCLASS);
            }

            //获取执行className
            params = taskNodes.getParams();
        }

        if ("102".equals(nodeId)) {
            //pyspark节点 需要自己生成py脚本
            //复制需要提交的py主文件
            String str = FileUtils.readResourceFile(Constants.ALGORITHM_PYSPARK_TMPFILE);
            String info = algorithmParameters.getSettingParams();
            Map<String, Object> infoMap = JSONUtils.toObjectMap(info);
            String raw = MapUtils.getString(infoMap, "rawScript", "");
            str = str.replace("${rawScript}", raw);
            List list = Arrays.asList(str.split("\n"));
            try {
                File resFile = new File(taskExecutionContext.getExecutePath(), "tmp_pyspark.py");
                org.apache.commons.io.FileUtils.writeLines(resFile, list);
            } catch (IOException e) {
                e.printStackTrace();
            }
            //sparkParameters.setMainClass("tmp_python.tmp_python");
            resourceName += ",tmp_pyspark.py";
            execClassName = "tmp_pyspark.tmp_pyspark";
        }

        if (sparkParameters.getProgramType() != null && sparkParameters.getProgramType() == ProgramType.PYTHON) {
            //复制libs目录下的依赖jar包
            String jarsPath = FileUtils.copyResourceDirectory(taskExecutionContext.getExecutePath(), "libs", "jar");
            sparkParameters.setJars(jarsPath);
        }


        mainJar.setRes(resourceName);
        sparkParameters.setMainJar(mainJar);
    }


    private void downloadResource(String execLocalPath, String fullName) {
        File resFile = new File(execLocalPath, fullName);
        if (!resFile.exists()) {
            try {

                IFileAdapter fileAdapter = FileAdapterUtils.getFileAdapterByProject(taskExecutionContext.getProjectId());
                fileAdapter.copyServerToLocal(fullName,
                        execLocalPath + File.separator + fullName,
                        false,
                        true);
            } catch (Exception e) {
                logger.error(e.getMessage(), e);
                throw new RuntimeException(e.getMessage());
            }
        } else {
            logger.info("file : {} exists ", resFile.getName());
        }
    }

    @Override
    public AbstractParameters getParameters() {
        return sparkParameters;
    }

    /**
     * 根据resourceId更新判据计算任务信息（存放在bdp_dds_resource中other_info字段中）
     * (1)数据信息，dataOutTableId
     * (2)判据计算结果信息,tagOutTableId
     * (3)任务管理信息，processDefineId/processInstanceId/taskInstanceId
     * (4)判据信息ruleId
     */
    private void updateAircraftTaskInfo() {
        // step1:只有nodeId=104（判据计算任务）时才更新任务状态，其他直接返回
        if (!this.algorithmParameters.getNodeId().equals("104")) {
            return;
        }

        JSONObject params = new JSONObject();

        // step2:设置任务管理信息
        setTaskMngInfo(params);

        // step3:设置数据表、标签表
        setCalculateResultInfo(params);

        // step4:设置判读信息
        setRuleInfo(params);

        // step5:更新Resource的otherInfo字段
        updateResouceInfo(params);
    }

    /**
     * 设置判据计算任务关联信息
     *
     * @param params
     */
    private void setTaskMngInfo(JSONObject params) {
        params.put(Constants.PROCESS_DEFINE_ID_KEY, this.taskExecutionContext.getProcessDefineId());
        params.put(Constants.PROCESS_INSTANCE_ID_KEY, this.taskExecutionContext.getProcessInstanceId());
        params.put(Constants.TASK_INSTANCE_ID_KEY, this.taskExecutionContext.getTaskInstanceId());
    }

    /**
     * 设置判据计算任务结果信息，包括数据表、标签表
     *
     * @param params
     */
    private void setCalculateResultInfo(JSONObject params) {
        String outConf = this.algorithmParameters.getOutputParams();
        if (StringUtils.isBlank(outConf)) {
            throw new RuntimeException("未配置判据计算任务outputParams信息.");
        }

        Map<String, Object> outMap = JSONUtils.toObjectMap(outConf);

        // 数据表信息
        String modelConfig = MapUtils.getString(outMap, Constants.DATA_OUT_KEY);
        if (StringUtils.isBlank(modelConfig)) {
            throw new RuntimeException("未配置判据计算任务outputParams中dataOutput信息.");
        }

        Map<String, Object> modelOutMap = JSONUtils.toObjectMap(modelConfig);
        String dataOutTableId = MapUtils.getString(modelOutMap, Constants.TABLE_ID_KEY);
        if (StringUtils.isBlank(dataOutTableId)) {
            throw new RuntimeException("未配置判据计算任务dataOutput中tableId信息.");
        }

        params.put(Constants.DATA_OUT_TABLE_ID_KEY, dataOutTableId);

        // 标签表信息
        String tagConfig = MapUtils.getString(outMap, Constants.TAG_OUT_KEY);
        if (StringUtils.isBlank(tagConfig)) {
            throw new RuntimeException("未配置判据计算任务outputParams中tagOutput信息.");
        }

        Map<String, Object> tagOutMap = JSONUtils.toObjectMap(tagConfig);
        String tagOutTableId = MapUtils.getString(tagOutMap, Constants.TABLE_ID_KEY);
        if (StringUtils.isBlank(tagOutTableId)) {
            throw new RuntimeException("未配置判据计算任务tagOutput中tableId信息.");
        }

        params.put(Constants.TAG_OUT_TABLE_ID_KEY, tagOutTableId);
    }

    /**
     * 设置判据计算任务关联判据信息
     *
     * @param params
     */
    private void setRuleInfo(JSONObject params) {
        String settingConf = this.algorithmParameters.getSettingParams();
        if (StringUtils.isBlank(settingConf)) {
            throw new RuntimeException("未配置判据计算任务settingParams信息.");
        }

        Map<String, Object> settingMap = JSONUtils.toObjectMap(settingConf);
        String ruleStrs = MapUtils.getString(settingMap, Constants.RULES_KEY);
        if (StringUtils.isBlank(ruleStrs)) {
            throw new RuntimeException("未配置判据计算任务settingParams中rules信息.");
        }
        params.put(Constants.RULE_IDS_KEY, ruleStrs);
    }

    /**
     * 更新Resource中otherInfo字段
     *
     * @param params
     */
    private void updateResouceInfo(JSONObject params) {
        String inputConf = this.algorithmParameters.getInputParams();
        if (StringUtils.isBlank(inputConf)) {
            throw new RuntimeException("未配置判据计算任务inputParams信息.");
        }

        Map<String, Object> inputMap = JSONUtils.toObjectMap(inputConf);
        String resourceId = MapUtils.getString(inputMap, Constants.RESOURCE_ID_KEY);
        if (StringUtils.isBlank(resourceId)) {
            throw new RuntimeException("未配置判据计算任务inputParams中resourceId信息.");
        }

        Resource resource = this.processService.getResourceById(resourceId);
        if (resource == null) {
            throw new RuntimeException("未找到资源信息,resourceId=" + resourceId);
        }

        resource.setOtherInfo(params.toJSONString());
        this.processService.updateResource(resource);
    }
}
