package com.sui.bigdata.rtcadmin.util;


import com.sui.bigdata.flink.table.client.JobClient;
import com.sui.bigdata.rtcadmin.constant.AppConstant;
import com.sui.bigdata.rtcadmin.exception.SparkApplicationException;
import com.sui.bigdata.rtcadmin.model.dto.SparkAppDto;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.spark.launcher.SparkAppHandle;
import org.apache.spark.launcher.SparkLauncher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;

import java.io.File;
import java.io.IOException;
import java.util.HashMap;

import static com.sui.bigdata.rtcadmin.util.SendMsgUtils.sendYarnDisconnectMsg;

/**
 * @Author: TingWuHuang
 * @Date: 2020/5/6 15:30
 * @description
 */
@Component
public class YarnUtil {

    static Logger logger = LoggerFactory.getLogger(YarnUtil.class);

    @Value("${yarn.conf.path}")
    private String yarnConfPath;

    @Value("${spark.home}")
    private String sparkHome;

    public SparkAppDto luncherSubmit(SparkAppDto sparkApp) throws SparkApplicationException {
        HashMap<String, String> envParams = new HashMap<>(8);
        envParams.put("YARN_CONF_DIR", yarnConfPath);
        envParams.put("HADOOP_CONF_DIR", yarnConfPath);
        envParams.put("SPARK_HOME", sparkHome);
        envParams.put("SPARK_PRINT_LAUNCH_COMMAND", "1");
        SparkLauncher sparkLauncher = new SparkLauncher(envParams)
                .setMaster(AppConstant.SPARK_DEFAULT_MASTER)
                .setAppResource(sparkApp.getAppResource())
                .setMainClass(sparkApp.getMainClass())
                .setJavaHome("")
                .setAppName(sparkApp.getAppName())
                .setDeployMode(sparkApp.getModel() == null ? AppConstant.SPARK_CLUSTER_MODEL : sparkApp.getModel())
                .setConf(AppConstant.SPARK_METRICS_NAMESPACE, AppConstant.METRICS_PREFIX + sparkApp.getAppName())
                .addSparkArg(AppConstant.DRIVER_MEMORY, sparkApp.getDriverMemory() == null ?
                        AppConstant.SPARK_DRIVER_MEMORY_DEFAULT : sparkApp.getDriverMemory())
                .addSparkArg(AppConstant.EXECUTOR_MEMORY, sparkApp.getExecutorMemory() == null ?
                        AppConstant.SPARK_EXECUTOR_MEMORY_DEFAULT : sparkApp.getExecutorMemory())
                .addSparkArg(AppConstant.EXECUTOR_CORES, sparkApp.getExecutorCores() == null ?
                        AppConstant.SPARK_EXECUTOR_CORES_DEFAULT :sparkApp.getExecutorCores())
                .addSparkArg(AppConstant.QUEUE, sparkApp.getQueue() == null ? AppConstant.DEFAULT_QUEUE : sparkApp.getQueue());
        if(StringUtils.isNotEmpty(sparkApp.getNumExecutors())){
            sparkLauncher.addSparkArg(AppConstant.NUM_EXECUTORS, sparkApp.getNumExecutors());
        }
        if(StringUtils.isNotEmpty(sparkApp.getAppArgs())){
            sparkLauncher.addAppArgs(sparkApp.getAppArgs().split(AppConstant.SPARK_APP_ARGS_SPLIT));
        }
        if(StringUtils.isNotEmpty(sparkApp.getDriverJavaOption())){
            sparkLauncher.addSparkArg(AppConstant.DRIVER_JAVA_OPTIONS, sparkApp.getDriverJavaOption());
        }
        if (StringUtils.isNotEmpty(sparkApp.getExecutorJavaOption())){
            sparkLauncher.setConf(AppConstant.SPARK_EXECUTOR_EXTRAJAVAOPTIONS, sparkApp.getExecutorJavaOption());
        }
        SparkArgumentsUtil.addextendParam(sparkLauncher, sparkApp.getExtendParam());
        if (AppConstant.SPARK_CLIENT_MODEL.equals(sparkApp.getModel())){
            sparkLauncher.redirectError(new File("/dev/null"));
            sparkLauncher.redirectOutput(new File("/dev/null"));
        }
        SparkAppHandle handler = null;
        try {
            handler = sparkLauncher.startApplication();
        } catch (IOException e) {
            logger.error("sparkLauncher提交任务异常：{}", e);
            throw new SparkApplicationException("sparkLauncher提交任务异常！");
        }

        logger.info("开始执行。。。");
        while(handler.getAppId() == null){
            logger.info("waitting for appId:{} ,state:{}", handler.getAppId(), handler.getState());
            if(handler.getState().isFinal()){
                throw new SparkApplicationException("提交并等待获取任务状态异常，state: "+handler.getState());
            }
            try {
                Thread.sleep(AppConstant.GET_STATUS_WAITTING_TIME);
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
        }

        // 杀死子进程，并关闭连接
        if (AppConstant.SPARK_CLUSTER_MODEL.equals(sparkApp.getModel())){
            handler.kill();
        }
        sparkApp.setAppId(handler.getAppId());
        sparkApp.setTrackingUrl(AppConstant.WEB_PREFIX + sparkApp.getAppId() + AppConstant.WEB_SUFFIX);
        sparkApp.setAppStatus(AppConstant.APP_STATUS_ACCEPT);

        return sparkApp;
    }

    /**
     * 获取应用状态
     * @param appId
     * @return
     * @throws SparkApplicationException
     */
    public SparkAppDto applicationsStatus(String appId) throws SparkApplicationException {

        YarnApplicationState yarnStatus = null;
        try {
            yarnStatus = (YarnApplicationState)JobClient.getYarnStatus(appId, yarnConfPath).get(0);
        }catch (ApplicationNotFoundException e1) {
            logger.error("获取应用状态异常：{}", e1);
            throw new SparkApplicationException(appId + " not found exception .");
        } catch (YarnException e2) {
            logger.error(" yarn disconnect: ", e2);
            throw new SparkApplicationException( " yarn disconnect!");
        }catch (Exception e) {
                logger.error("获取应用状态异常：{}", e);
                throw new SparkApplicationException("gets application status error");
        }

        SparkAppDto sparkApp = new SparkAppDto();
        sparkApp.setAppId(appId);
        sparkApp.setAppStatus(yarnStatus.toString());

        return sparkApp;
    }

    public boolean applicationStop(String appId) throws SparkApplicationException {
        if (StringUtils.isEmpty(appId)){
            throw new SparkApplicationException("appId is null!");
        }

        try {

            JobClient.killApplication(appId, yarnConfPath);
        } catch (Exception e) {
            logger.error("stop application error：{}", e);
            throw new SparkApplicationException("stop application error!");
        }

        return true;
    }


}
