package avicit.bdp.dds.server.worker.task.spark;

import avicit.bdp.common.dto.BdpPrmEngineResourceDTO;
import avicit.bdp.common.dto.CalculateEngineConf;
import avicit.bdp.common.service.service.FileOperateCommonService;
import avicit.bdp.common.utils.SpringApplicationContext;
import avicit.bdp.common.utils.file.StorageConfThreadLocal;
import avicit.bdp.core.constant.Constants;
import avicit.bdp.dds.dispatch.process.Property;
import avicit.bdp.dds.dispatch.process.ResourceInfo;
import avicit.bdp.dds.dispatch.task.AbstractParameters;
import avicit.bdp.dds.dispatch.task.spark.SparkParameters;
import avicit.bdp.core.util.json.JSONUtils;
import avicit.bdp.dds.common.utils.ParameterUtils;
import avicit.bdp.dds.dao.entity.Resource;
import avicit.bdp.dds.server.entity.TaskExecutionContext;
import avicit.bdp.dds.server.entity.YarnSubmitConditions;
import avicit.bdp.dds.server.utils.ParamUtils;
import avicit.bdp.dds.server.utils.SparkArgsUtils;
import avicit.bdp.dds.server.worker.adapter.yarn.SparkOnYarnAdapter;
import avicit.bdp.dds.server.worker.task.AbstractYarnClientTask;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.slf4j.Logger;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

/**
 * @author DIGITAL-MAYANJ
 * spark task
 */
public class SparkOnYarnTask extends AbstractYarnClientTask {

    /**
     * spark parameters
     */
    private SparkParameters sparkParameters;

    private final FileOperateCommonService fileOperateCommonService;


    /**
     * taskExecutionContext
     */
    private final TaskExecutionContext taskExecutionContext;

    public SparkOnYarnTask(TaskExecutionContext taskExecutionContext, Logger logger) {
        super(taskExecutionContext, logger);

        this.yarnAdapter = SparkOnYarnAdapter.newInstance();
        this.taskExecutionContext = taskExecutionContext;
        this.fileOperateCommonService = SpringApplicationContext.getBean(FileOperateCommonService.class);
    }

    @Override
    public void init() {

        logger.info("spark task params {}", taskExecutionContext.getTaskParams());

        sparkParameters = JSONUtils.parseObject(taskExecutionContext.getTaskParams(), SparkParameters.class);

        if (sparkParameters == null || !sparkParameters.checkParameters()) {
            throw new RuntimeException("spark task params is not valid");
        }
        sparkParameters.setQueue(taskExecutionContext.getQueue());

        setMainJarName();

        if (StringUtils.isNotEmpty(sparkParameters.getMainArgs())) {
            String args = sparkParameters.getMainArgs();

            // replace placeholder
            Map<String, Property> paramsMap = ParamUtils.convert(ParamUtils.getUserDefParamsMap(taskExecutionContext.getDefinedParams()),
                    taskExecutionContext.getDefinedParams(),
                    sparkParameters.getLocalParametersMap(),
                    taskExecutionContext.getCmdTypeIfComplement(),
                    taskExecutionContext.getScheduleTime());

            if (paramsMap != null) {
                args = ParameterUtils.convertParameterPlaceholders(args, ParamUtils.convert(paramsMap));
            }
            sparkParameters.setMainArgs(args);
        }
    }

    /**
     * create command
     *
     * @return command
     */
    @Override
    protected YarnSubmitConditions buildYarnSubmitConditions() {

        YarnSubmitConditions conditions = new YarnSubmitConditions();

        conditions.setMainClass(sparkParameters.getMainClass());
        conditions.setApplicationJar(sparkParameters.getMainJar().getRes());

        // 处理计算引擎
        String clusterId = sparkParameters.getClusterId();
        if (StringUtils.isNotBlank(clusterId)) {
            BdpPrmEngineResourceDTO engineResource = fileOperateCommonService.findBdpPrmEngineResourceById(clusterId);
            if (engineResource != null && engineResource.getCalculateEngineConf() != null) {
                CalculateEngineConf calculateEngine = engineResource.getCalculateEngineConf();
                yarnAddress = calculateEngine.getYarnAddress();
                conditions.setYarnResourcemanagerAddress(yarnAddress);

                // kerberos 认证
                if (Constants.KERBEROS.equalsIgnoreCase(calculateEngine.getAuthType())) {
                    conditions.setKeytab(calculateEngine.getKeytabPath());
                    conditions.setPrincipal(calculateEngine.getKerberosUser());
                    conditions.setAuthType(Constants.KERBEROS);
                }

                handleConfigXml(conditions, calculateEngine);
            }
        }

        // other parameters
        List<String> otherArgs = new ArrayList<>(SparkArgsUtils.buildArgs(sparkParameters));
        conditions.setOtherArgs(otherArgs);

        // todo 处理用户自定义参数
        //Map<String, String> definedParams = taskExecutionContext.getDefinedParams();
        //String command = ParameterUtils.convertParameterPlaceholders(String.join(" ", otherArgs),
        //        taskExecutionContext.getDefinedParams());

        logger.info("spark conditions: {}", JSONUtils.toJsonString(conditions));
        return conditions;
    }

    /**
     * 从配置的路径中解析出hfds集群配置参数
     * @param conditions  conditions
     * @param calculateEngine calculateEngine
     *
     * core-site.xml
     * yarn-site.xml
     * hdfs-site.xml
     * mapred-site.xml
     */
    private void handleConfigXml(YarnSubmitConditions conditions, CalculateEngineConf calculateEngine) {
        // 从配置文件中解析出hdfs等xml数据
        Set<String> hadoopConfXmlSet = new HashSet<String>(){{
            add("hdfs-site.xml");
            add("core-site.xml");
        }};
        String configPath = calculateEngine.getConfigPath();
        if (StringUtils.isNotBlank(configPath)) {
            try {
                File configFilePath = new File(configPath);
                if (configFilePath.isDirectory()) {
                    File[] files = configFilePath.listFiles(pathname -> pathname.getName().endsWith("xml"));
                    if (files != null) {
                        Configuration hdfsConf = new Configuration();
                        for (File file : files) {
                            if (hadoopConfXmlSet.contains(file.getName())) {
                                String xmlFilePath = file.getAbsolutePath();
                                hdfsConf.addResource(xmlFilePath);
                            }
                        }

                        conditions.setFsNameservices(hdfsConf.get("dfs.nameservices"));
                        conditions.setDefaultFS(hdfsConf.get("fs.defaultFS"));
                    }
                }
            } catch (Exception e) {
                logger.error("从配置文件中解析出hdfs等xml数据异常", e);
            }
        }
    }


    @Override
    protected void setMainJarName() {
        // main jar
        ResourceInfo mainJar = sparkParameters.getMainJar();
        if (mainJar != null) {
            String resourceId = mainJar.getId();
            if (resourceId.startsWith(Constants.LEFT_SQUARE_BRACKET)) {
                resourceId = resourceId.substring(2, resourceId.length() - 2);
            }
            String resourceName;
            if (StringUtils.isBlank(resourceId)) {
                resourceName = mainJar.getRes();
            } else {
                Resource resource = processService.getResourceById(resourceId);
                if (resource == null) {
                    logger.error("resource id: {} not exist", resourceId);
                    throw new RuntimeException(String.format("resource id: %s not exist", resourceId));
                }
                resourceName = resource.getFullName().replaceFirst(Constants.SINGLE_SLASH, "");

                try {
                    fileOperateCommonService.getStorageAndSetThreadLocal(taskExecutionContext.getProjectId());
                    fileOperateCommonService.copyServerToLocal(resource.getFullName(),
                            taskExecutionContext.getExecutePath() + resource.getFullName(),
                            false, true);

                } catch (IOException e) {
                    e.printStackTrace();
                } finally {
                    StorageConfThreadLocal.removeThreadLocal();
                }
            }

            mainJar.setRes(resourceName);
            sparkParameters.setMainJar(mainJar);
        }
    }

    @Override
    public AbstractParameters getParameters() {
        return sparkParameters;
    }
}
