package cn.lite.flow.executor.kernel.container.impl;

import cn.lite.flow.common.conf.HadoopConfig;
import cn.lite.flow.common.model.Tuple;
import cn.lite.flow.common.model.consts.CommonConstants;
import cn.lite.flow.common.utils.JSONUtils;
import cn.lite.flow.common.utils.YarnHolder;
import cn.lite.flow.executor.common.consts.Constants;
import cn.lite.flow.executor.common.exception.ExecutorRuntimeException;
import cn.lite.flow.executor.common.utils.ExecutorLoggerFactory;
import cn.lite.flow.executor.kernel.conf.ExecutorMetadata;
import cn.lite.flow.executor.kernel.utils.JobUtils;
import cn.lite.flow.executor.kernel.utils.YarnUtils;
import cn.lite.flow.executor.model.basic.ExecutorJob;
import cn.lite.flow.executor.model.consts.ContainerStatus;
import cn.lite.flow.executor.model.kernel.AsyncContainer;
import cn.lite.flow.executor.service.ExecutorJobService;
import cn.lite.flow.executor.service.utils.ExecutorServiceUtils;
import com.alibaba.fastjson.JSONObject;
import com.google.common.collect.Lists;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.logaggregation.LogCLIHelpers;
import org.apache.spark.SparkConf;
import org.apache.spark.deploy.yarn.Client;
import org.apache.spark.deploy.yarn.ClientArguments;
import org.slf4j.Logger;

import java.io.File;
import java.io.FileOutputStream;
import java.io.PrintStream;
import java.util.List;

/**
 * @description: spark on yarn
 * @author: yueyunyue
 * @create: 2019-04-10
 **/
public class SparkOnYarnContainer extends AsyncContainer {

    protected Logger logger;

    private SparkConf sparkConf;

    public SparkOnYarnContainer(ExecutorJob executorJob) {
        super(executorJob);
        logger = JobUtils.getLogger(executorJob);
    }

    /**
     * 获取yarn applicationId
     * @return
     */
    private ApplicationId getApplicationId(){
        String appId = this.executorJob.getApplicationId();
        ApplicationId applicationId = YarnUtils.convertApplicationId(appId);
        return applicationId;
    }

    @Override
    public void checkStatus() {
        ApplicationId applicationId = this.getApplicationId();
        if(applicationId == null){
            return;
        }
        try {
            ApplicationReport applicationReport = YarnHolder.getYarnClient().getApplicationReport(applicationId);
            ExecutorJobService executorJobService = ExecutorServiceUtils.getExecutorJobService();
            YarnApplicationState yarnApplicationState = applicationReport.getYarnApplicationState();

            boolean isDumpLog = false;
            logger.info("check job:{} applicationId:{} status is {}", executorJob.getId(), executorJob.getApplicationId(), yarnApplicationState.name());
            switch (yarnApplicationState){
                case FINISHED:
                    executorJobService.success(this.getExecutorJob().getId());
                    this.setStatus(ContainerStatus.SUCCESS);
                    isDumpLog = true;
                    break;
                case FAILED:
                    executorJobService.fail(this.getExecutorJob().getId(), applicationReport.getDiagnostics());
                    this.setStatus(ContainerStatus.FAIL);
                    isDumpLog = true;
                    break;
                case KILLED:
                    executorJobService.fail(this.getExecutorJob().getId(), "killed by other");
                    this.setStatus(ContainerStatus.FAIL);
                    isDumpLog = true;
                    break;
                default:
                    return;
            }

            if(isDumpLog){
                this.dumpLog2Local();
            }

        } catch (Throwable e) {
            logger.error("job:{} check status error", executorJob.getId(), e);
        }

    }

    private SparkConf initSparkConf(JSONObject configObj){

        String jobName = configObj.getString(CommonConstants.PARAM_EXECUTOR_JOB_NAME);

        String yarnQueue = configObj.getString(CommonConstants.SPARK_PARAM_YARN_QUEUE);
        String instanceNum = configObj.getString(CommonConstants.SPARK_PARAM_INSTANCE_NUM);

        SparkConf sparkConf = new SparkConf();
        sparkConf.setAppName(jobName);

        sparkConf.set("spark.app.name", jobName);
        sparkConf.set("spark.yarn.queue", yarnQueue);


        sparkConf.set("spark.driver.cores", configObj.getString(CommonConstants.SPARK_PARAM_DRIVER_CORES));
        sparkConf.set("spark.driver.memory", configObj.getString(CommonConstants.SPARK_PARAM_DRIVER_MEMORY) + CommonConstants.SPARK_PARAM_MEMORY_UNIT);
        sparkConf.set("spark.executor.cores", configObj.getString(CommonConstants.SPARK_PARAM_EXECUTOR_CORES));
        sparkConf.set("spark.executor.memory", configObj.getString(CommonConstants.SPARK_PARAM_EXECUTOR_MEMORY) + CommonConstants.SPARK_PARAM_MEMORY_UNIT);
        // 设置并发实例数
        Boolean isDynamicAllocation = HadoopConfig.getHadoopConf().getIsDynamicAllocation();
        if (isDynamicAllocation != null && isDynamicAllocation) {
            sparkConf.set("spark.shuffle.service.enabled", "true");
            sparkConf.set("spark.dynamicAllocation.enabled", "true");
            sparkConf.set("spark.dynamicAllocation.minExecutors", "1");
            sparkConf.set("spark.dynamicAllocation.maxExecutors", String.valueOf(instanceNum));
        } else {
            sparkConf.set("spark.executor.instances", String.valueOf(instanceNum));
        }

        /**
         * hadoop、hive配置文件
         */
        String hadoopFiles = HadoopConfig.getHadoopConf().getSparkYarnDistFiles();
        sparkConf.set("spark.yarn.dist.files", hadoopFiles + CommonConstants.COMMA + configObj.getString(Constants.JOB_CONFIG_PATH));

        return sparkConf;

    }

    /**
     * 获取参数
     * @param configObj
     * @return
     */
    private ClientArguments getArgs(JSONObject configObj){
        List<String> argList = Lists.newArrayList();

        /**
         * 添加主类
         */
        argList.add(Constants.YARN_PARAM_CLASS);
        argList.add(configObj.getString(CommonConstants.SPARK_PARAM_YARN_MAIN_CLASS));
        /**
         * 添加主类所在jar
         */
        argList.add(Constants.YARN_PARAM_JAR);
        argList.add(configObj.getString(CommonConstants.SPARK_PARAM_YARN_MAIN_JAR));

        /**
         * 添加配置文件
         */
        argList.add(Constants.YARN_PARAM_ARG);
        argList.add(configObj.getString(Constants.CONFIG_FILE_NAME));

        return new ClientArguments(argList.toArray(new String[]{}));

    }

    @Override
    public void run() throws Exception {

        ExecutorJob executorJob = this.getExecutorJob();

        String config = executorJob.getConfig();
        JSONObject configObj = null;
        if(StringUtils.isNotBlank(config)){
            configObj = JSONObject.parseObject(config);
        }

        Tuple<String, String> configTuple = this.generateConfigFile(JSONUtils.toJSONStringWithoutCircleDetect(configObj));
        String configName = configTuple.getA();
        String configPath = configTuple.getB();

        configObj.put(Constants.CONFIG_FILE_NAME, configName);
        configObj.put(Constants.JOB_CONFIG_PATH, configPath);
        /**
         * 初始化spark conf
         */
        this.sparkConf = initSparkConf(configObj);

        /**
         * 生成用户参数
         */
        ClientArguments clientArgs = getArgs(configObj);
        /**
         * 提交到yarn
         */
        Client client = new Client(clientArgs, this.sparkConf);
        ApplicationId applicationId = client.submitApplication();
        String appId = applicationId.toString();
        logger.info("{} get yarn applicationId:{}", executorJob.getId(), appId);
        ExecutorJobService executorJobService = ExecutorServiceUtils.getExecutorJobService();
        /**
         * 这只运行状态
         */
        this.setStatus(ContainerStatus.RUNNING);
        executorJob.setApplicationId(appId);
        executorJobService.bindApplicationIdAndRun(executorJob.getId(), appId);
    }

    @Override
    public void kill() throws Exception {
        ApplicationId applicationId = this.getApplicationId();
        if(applicationId == null){
            return;
        }
        try {
            YarnHolder.getYarnClient().killApplication(applicationId);
        } catch (Throwable e) {
            logger.error("kill job:{} error", executorJob.getId(), e);
            throw new ExecutorRuntimeException(e.getMessage());
        }
    }

    /**
     * 生成文件
     */
    private Tuple<String, String> generateConfigFile(String config){
        String workDirPath = ExecutorMetadata.getJobWorkspace(executorJob.getId());
        String configFileName = executorJob.getId() + Constants.CONFIG_FILE_SUFFIX;
        String configFilePath = workDirPath + CommonConstants.FILE_SPLIT + configFileName;
        try {
            FileUtils.write(new File(configFilePath), config, CommonConstants.UTF8);
        } catch (Throwable e) {
            logger.error("generate config file error", e);
            throw new ExecutorRuntimeException(e.getMessage());
        }
        return Tuple.of(configFileName, configFilePath);
    }

    /**
     * 把yarn的日志dump下来
     */
    private void dumpLog2Local(){

        ApplicationId applicationId = this.getApplicationId();
        if(applicationId == null){
            return;
        }
        PrintStream out = null;
        try {
            String jobWorkspace = ExecutorMetadata.getJobWorkspace(executorJob.getId());
            String logFilePath = ExecutorLoggerFactory.getLogFile(jobWorkspace, executorJob.getId());

            File logFile = new File(logFilePath);
            out = new PrintStream(new FileOutputStream(logFile));

            HadoopConfig hadoopConf = HadoopConfig.getHadoopConf();
            String user = hadoopConf.getHadoopUserName();

            LogCLIHelpers logCLIHelpers = new LogCLIHelpers();
            logCLIHelpers.setConf(YarnHolder.getYarnConfiguration());
            int code = logCLIHelpers.dumpAllContainersLogs(applicationId, user, out);

            if(code != 0){
                String errorMsg = "job:"+ executorJob.getId() +" dumpLog2Local error, result code is " + code;
                if(out != null){
                    out.println(errorMsg);
                }
                logger.error(errorMsg);
            }
        }catch (Throwable e){
            String errorMsg = "job:"+ executorJob.getId() +" dumpLog2Local error, error msg is " + e.getMessage();
            if(out != null){
                out.println(errorMsg);
            }
            logger.error(errorMsg,  e);
        }finally {
            IOUtils.closeQuietly(out);
        }

    }

}
