package avicit.bdp.dds.server.worker.adapter.yarn;

import avicit.bdp.core.constant.Constants;
import avicit.bdp.dds.server.entity.YarnSubmitConditions;
import com.alibaba.fastjson2.JSON;
import com.google.common.collect.Lists;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.spark.SparkConf;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.List;
import java.util.Map;

/**
 * yarn容器对任务的管理，使用此类的api的时候一定要提交一个任务创建一个对象，不可重复使用对象 单例模式
 *
 * @author DIGITAL-MAYANJ
 */
public class SparkOnYarnAdapter extends YarnAdapter {

    private static final Logger logger = LoggerFactory.getLogger(SparkOnYarnAdapter.class);
    private static SparkOnYarnAdapter yarnAdapter = null;

    private SparkOnYarnAdapter() {
    }

    /**
     * 获取单例
     */
    public synchronized static SparkOnYarnAdapter newInstance() {
        if (yarnAdapter == null) {
            yarnAdapter = new SparkOnYarnAdapter();
        }
        return yarnAdapter;
    }

    /**
     * 提交任务到yarn集群
     *
     * @param conditions yarn集群，spark，hdfs具体信息，参数等
     * @return applicationId
     */
    @Override
    public String submitTask(YarnSubmitConditions conditions) {
        logger.info("初始化yarn参数:{}", JSON.toJSONString(conditions));

        List<String> args = Lists.newArrayList("--jar", conditions.getApplicationJar(),
                "--class", conditions.getMainClass());

        if (CollectionUtils.isNotEmpty(conditions.getOtherArgs())) {
            for (String s : conditions.getOtherArgs()) {
                args.add("--arg");
                args.add(StringUtils.join(new String[]{s}, Constants.COMMA));
            }
        }

        // identify that you will be using Spark as YARN mode
        System.setProperty("SPARK_YARN_MODE", "true");

        SparkConf sparkConf = new SparkConf();

        if (StringUtils.isNotEmpty(conditions.getJobName())) {
            sparkConf.setAppName(conditions.getJobName());
        }

        if (StringUtils.isNotBlank(conditions.getYarnJars())) {
            sparkConf.set("spark.yarn.jars", conditions.getYarnJars());
        }

        if (conditions.getAdditionalJars() != null && conditions.getAdditionalJars().length > 0) {
            sparkConf.set("spark.jars", StringUtils.join(conditions.getAdditionalJars(), ","));
        }

        if (conditions.getFiles() != null && conditions.getFiles().length > 0) {
            sparkConf.set("spark.files", StringUtils.join(conditions.getFiles(), ","));
        }

        if (conditions.getOtherProperties() != null) {
            for (Map.Entry<Object, Object> e : conditions.getOtherProperties().entrySet()) {
                sparkConf.set(e.getKey().toString(), e.getValue().toString());
            }
        }

        if (Constants.KERBEROS.equals(conditions.getAuthType())) {
            sparkConf.set("spark.kerberos.keytab", conditions.getKeytab());
            sparkConf.set("spark.kerberos.principal", conditions.getPrincipal());
        }

        //sparkConf.set("yarn.resourcemanager.ha.rm-ids", "rm1,rm2");

        // 添加这个参数，不然spark会一直请求0.0.0.0:8030,一直重试
        /*List<String> host = RegexUtil.getResourceHost(conditions.getYarnResourcemanagerAddress());
        sparkConf.set("yarn.resourcemanager.hostname.rm1", host.get(0));
        if (host.size() >= 2) {
            sparkConf.set("yarn.resourcemanager.hostname.rm2", host.get(1));
        }*/
        // 添加这个参数，不然spark会一直请求0.0.0.0:8030,一直重试
        // todo
        //List<String> host = RegexUtil.getResourceHost(conditions.getYarnResourcemanagerAddress());
//        List<String> host = null;
//
//        Socket socket = new Socket();
//        SocketAddress add = new InetSocketAddress(host.get(1), 8030);
//        try {
//            // 超时3秒
//            socket.connect(add, 3000);
//            sparkConf.set("yarn.resourcemanager.hostname", host.get(1));
//        } catch (IOException e) {
//            sparkConf.set("yarn.resourcemanager.hostname", host.get(0));
//        } finally {
//            try {
//                socket.close();
//            } catch (Exception e) {
//                e.printStackTrace();
//            }
//        }

        // 设置为true，不删除缓存的jar包，因为现在提交yarn任务是使用的代码配置，没有配置文件，删除缓存的jar包有问题，
        sparkConf.set("spark.yarn.preserve.staging.files", "true");
        sparkConf.set("spark.sql.session.timeZone", "Asia/Shanghai");

        //初始化 yarn的配置
        Configuration configuration = new Configuration();
        String os = System.getProperty("os.name");
        boolean crossPlatform = false;
        if (os.contains(Constants.OS_WINDOWS)) {
            crossPlatform = true;
        }
        // 配置使用跨平台提交任务
        configuration.setBoolean("mapreduce.app-submission.cross-platform", crossPlatform);
        // 启用yarn的高可用，默认关闭
//        configuration.setBoolean("yarn.resourcemanager.ha.enabled", true);
//        configuration.set("yarn.resourcemanager.ha.rm-ids", "rm1,rm2");
        // 设置yarn资源，不然会使用localhost:8032
        // todo
        //String[] hostPort = RegexUtil.getResourceHostPort(conditions.getYarnResourcemanagerAddress());
//        configuration.set("yarn.resourcemanager.address.rm1", hostPort[0]);
//        // 设置yarn资源，不然会使用localhost:8032
//        if (hostPort.length >= 2) {
//            configuration.set("yarn.resourcemanager.address.rm2", hostPort[1]);
//        }

        configuration.set("yarn.resourcemanager.address", conditions.getYarnResourcemanagerAddress());

        // 设置namenode的地址，不然jar包会分发，非常恶心
        configuration.set("fs.defaultFS", conditions.getDefaultFS());

        if (StringUtils.isNotBlank(conditions.getFsNameservices())) {
            configuration.set("dfs.nameservices", conditions.getFsNameservices());
        }
//        configuration.set("dfs.ha.namenodes." + conditions.getFsNameservices(), "nn1,nn2");
//        configuration.set("dfs.namenode.rpc-address." + conditions.getFsNameservices() + ".nn1",
//                conditions.getFsNamenodeRpcAddressNameserviceNN1());
//        configuration.set("dfs.namenode.rpc-address." + conditions.getFsNameservices() + ".nn2",
//                conditions.getFsNamenodeRpcAddressNameserviceNN2());
//        configuration.set("dfs.client.failover.proxy.provider." + conditions.getFsNameservices(),
//                "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");

        //configuration.set("yarn.application.classpath", conditions.getFsNameservices());

        logger.info("初始化yarn客户端...");
//        Client client = null;
//        try {
//            ClientArguments cArgs = new ClientArguments(args.toArray(new String[args.size()]));
//             client = new Client(cArgs, configuration, sparkConf);
//
//            logger.info("提交任务，任务名称：{}", conditions.getJobName());
//            ApplicationId appId = client.submitApplication();
//            String applicationId = appId.toString();
//            logger.info("任务提交成功，任务id为[{}]", applicationId);
//            return applicationId;
//        } catch (Exception e) {
//            logger.error("提交spark任务失败", e);
//            return null;
//        } finally {
//            if (client != null) {
//                client.stop();
//            }
//        }


        return null;
    }

    /**
     * 停止spark任务
     *
     * @param yarnResourcemanagerAddress yarn资源管理器地址， 例如：master:8032，查看yarn集群获取具体地址
     * @param appIdStr                   需要取消的任务id
     */
    @Override
    public void killJob(String yarnResourcemanagerAddress, String appIdStr) {
        logger.info("取消任务,任务id：{}", appIdStr);
        // 初始化 yarn的配置
        Configuration cf = new Configuration();
        String os = System.getProperty("os.name");
        boolean crossPlatform = false;
        if (os.contains(Constants.OS_WINDOWS)) {
            crossPlatform = true;
        }
        // 配置使用跨平台提交任务
        cf.setBoolean("mapreduce.app-submission.cross-platform", crossPlatform);
        // 设置yarn资源，不然会使用localhost:8032
        // 启用yarn的高可用，默认关闭
        cf.setBoolean("yarn.resourcemanager.ha.enabled", true);
        cf.set("yarn.resourcemanager.ha.rm-ids", "rm1,rm2");
        // 设置yarn资源，不然会使用localhost:8032
        // todo
        //String[] hostPort = RegexUtil.getResourceHostPort(yarnResourcemanagerAddress);
        String[] hostPort = null;
        cf.set("yarn.resourcemanager.address.rm1", hostPort[0]);
        // 设置yarn资源，不然会使用localhost:8032
        if (hostPort.length >= 2) {
            cf.set("yarn.resourcemanager.address.rm2", hostPort[1]);
        }

        YarnClient yarnClient = null;
        try {
            // 创建yarn的客户端，此类中有杀死任务的方法
            yarnClient = YarnClient.createYarnClient();
            // 初始化yarn的客户端
            yarnClient.init(cf);
            // yarn客户端启动
            yarnClient.start();
            // 根据应用id，杀死应用
            yarnClient.killApplication(getAppId(appIdStr));
        } catch (Exception e) {
            logger.error("取消任务失败", e);
        } finally {
            // 关闭yarn客户端
            if (yarnClient != null) {
                yarnClient.stop();
            }
        }

    }


}
