package org.dromara.hodor.actuator.jobtype.bigdata.asyncSpark;

import com.google.common.collect.Lists;
import java.util.List;
import java.util.Objects;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.logging.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.deploy.yarn.Client;
import org.apache.spark.deploy.yarn.ClientArguments;
import org.dromara.hodor.actuator.jobtype.api.exception.JobExecutionException;
import org.dromara.hodor.actuator.jobtype.api.utils.JSONUtils;
import org.dromara.hodor.actuator.jobtype.bigdata.javautils.JobUtils;
import org.dromara.hodor.actuator.jobtype.bigdata.javautils.RegexUtil;
import org.dromara.hodor.actuator.jobtype.bigdata.javautils.YarnSubmitArguments;
import org.dromara.hodor.common.utils.StringUtils;

/**
 * 用于spark 提交到yarn
 *
 * @author tomgs
 * @since 1.0
 **/
public class SparkOnYarn {

    private final Logger logger;

    public SparkOnYarn(Logger logger) {
        this.logger = logger;
    }

    public String submitSpark(YarnSubmitArguments conditions) {
        logger.info(StringUtils.format("请求参数:{}.", conditions));

        // 初始化yarn客户端
        logger.info("初始化spark on yarn客户端");
        List<String> args = Lists.newArrayList("--jar", conditions.getApplicationJar(), "--class",
            conditions.getMainClass());
        if (conditions.getAppArgs() != null && !conditions.getAppArgs().isEmpty()) {
            for (String arg : conditions.getAppArgs()) {
                args.add("--arg");
                args.add(StringUtils.join(new String[]{arg}, ","));
            }
        }

        // identify that you will be using Spark as YARN mode
        System.setProperty("SPARK_YARN_MODE", "true");
        // 初始化 spark的配置
        SparkConf sparkConf = SparkConfUtils.getSparkConf(conditions);
        // 初始化 yarn的配置
        Configuration conf = JobUtils.getHadoopConfiguration(conditions);
        ClientArguments cArgs = new ClientArguments(args.toArray(new String[0]));
        Client client = new Client(cArgs, conf, sparkConf);
        logger.info("提交任务，HadoopJavaJobRunnerMain任务名称：" + conditions.getJobName());

        try {
            ApplicationId appId = client.submitApplication();
            return appId.toString();
        } catch (Exception e) {
            logger.error("提交spark任务失败", e);
            throw new JobExecutionException("提交spark任务失败", e);
        } finally {
            client.stop();
        }
    }

    /**
     * 停止spark任务
     *
     * @param yarnResourceManagerAddress yarn资源管理器地址， 例如：master:8032，查看yarn集群获取具体地址
     * @param appIdStr                   需要取消的任务id
     */
    public void killJob(String yarnResourceManagerAddress, String appIdStr) {
        logger.info(StringUtils.format("取消spark任务,任务id：{}", appIdStr));
        // 初始化 yarn的配置
        Configuration cf = new Configuration();
        String os = System.getProperty("os.name");
        boolean cross_platform = os.contains("Windows");
        cf.setBoolean("mapreduce.app-submission.cross-platform", cross_platform);// 配置使用跨平台提交任务
        // 设置yarn资源，不然会使用localhost:8032
        // 启用yarn的高可用，默认关闭
        cf.setBoolean("yarn.resourcemanager.ha.enabled", true);
        cf.set("yarn.resourcemanager.ha.rm-ids", "rm1,rm2");
        // 设置yarn资源，不然会使用localhost:8032
        String[] hostPort = RegexUtil.getResourceHostPort(yarnResourceManagerAddress);
        cf.set("yarn.resourcemanager.address.rm1",
            Objects.requireNonNull(hostPort, "yarn resource manager address illegal")[0]);
        // 设置yarn资源，不然会使用localhost:8032
        if (hostPort.length >= 2) {
            cf.set("yarn.resourcemanager.address.rm2", hostPort[1]);
        }
        try (YarnClient yarnClient = YarnClient.createYarnClient()) {
            // 创建yarn的客户端，此类中有杀死任务的方法
            // 初始化yarn的客户端
            yarnClient.init(cf);
            // yarn客户端启动
            yarnClient.start();
            // 根据应用id，杀死应用
            yarnClient.killApplication(getAppId(appIdStr));
        } catch (Exception e) {
            logger.error("取消spark任务失败", e);
        }
    }

    /**
     * 获取spark任务状态
     *
     * @param yarnResourceManagerAddress yarn资源管理器地址， 例如：master:8032，查看yarn集群获取具体地址
     * @param appIdStr                   需要取消的任务id
     */
    public SparkTaskState getStatus(String yarnResourceManagerAddress, String appIdStr) {
        logger.info(StringUtils.format("获取任务状态启动，任务id：{}", appIdStr));
        // 初始化 yarn的配置
        Configuration cf = new Configuration();
        String os = System.getProperty("os.name");
        boolean cross_platform = os.contains("Windows");
        cf.setBoolean("mapreduce.app-submission.cross-platform", cross_platform);// 配置使用跨平台提交任务
        // 设置yarn资源，不然会使用localhost:8032
        // 启用yarn的高可用，默认关闭
        cf.setBoolean("yarn.resourcemanager.ha.enabled", true);
        cf.set("yarn.resourcemanager.ha.rm-ids", "rm1,rm2");
        // 设置yarn资源，不然会使用localhost:8032
        String[] hostPort = RegexUtil.getResourceHostPort(yarnResourceManagerAddress);
        cf.set("yarn.resourcemanager.address.rm1", Objects.requireNonNull(hostPort)[0]);
        // 设置yarn资源，不然会使用localhost:8032
        if (hostPort.length >= 2) {
            cf.set("yarn.resourcemanager.address.rm2", hostPort[1]);
        }
        logger.info(StringUtils.format("获取任务状态，任务id: {}", appIdStr));

        SparkTaskState taskState = new SparkTaskState();
        // 设置任务id
        taskState.setAppId(appIdStr);

        try (YarnClient yarnClient = YarnClient.createYarnClient()) {
            // 初始化yarn的客户端
            yarnClient.init(cf);
            // yarn客户端启动
            yarnClient.start();
            ApplicationReport report = null;
            try {
                report = yarnClient.getApplicationReport(getAppId(appIdStr));
            } catch (Exception e) {
                logger.error("获取spark任务状态失败", e);
            }

            if (report != null) {
                YarnApplicationState state = report.getYarnApplicationState();
                taskState.setState(state);
                // 任务执行进度
                float progress = report.getProgress();
                taskState.setProgress(progress);
                // 最终状态
                FinalApplicationStatus status = report.getFinalApplicationStatus();
                taskState.setFinalStatus(status);
            } else {
                taskState.setState(YarnApplicationState.FAILED);
                taskState.setProgress(0.0f);
                taskState.setFinalStatus(FinalApplicationStatus.FAILED);
            }
            logger.info(StringUtils.format("获取任务状态结束，任务状态： {}", JSONUtils.toJSON(taskState)));
        } catch (Exception e) {
            logger.error("取消spark任务失败", e);
        }
        return taskState;
    }

    /**
     * 根据spark的任务id字符串获取对象
     *
     * @param appIdStr String类型的id
     * @return 应用id的对象
     */
    public ApplicationId getAppId(String appIdStr) {
        //return ConverterUtils.toApplicationId(appIdStr);
        return ApplicationId.fromString(appIdStr);
    }

}
