package cn.org.intelli.zjgflink.example;

import org.apache.commons.lang3.StringUtils;
import org.apache.flink.client.deployment.ClusterDeploymentException;
import org.apache.flink.client.deployment.ClusterSpecification;
import org.apache.flink.client.deployment.application.ApplicationConfiguration;
import org.apache.flink.client.program.ClusterClient;
import org.apache.flink.client.program.ClusterClientProvider;
import org.apache.flink.configuration.*;
import org.apache.flink.yarn.YarnClientYarnClusterInformationRetriever;
import org.apache.flink.yarn.YarnClusterDescriptor;
import org.apache.flink.yarn.YarnClusterInformationRetriever;
import org.apache.flink.yarn.configuration.YarnConfigOptions;
import org.apache.flink.yarn.configuration.YarnDeploymentTarget;
import org.apache.flink.yarn.configuration.YarnLogConfigUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;

import java.io.IOException;
import java.util.Collections;

public class YarnMain {
    public static void main(String[] args) throws IOException {
        System.setProperty("HADOOP_USER_NAME", "root");
        String configurationDirectory = "D:\\code\\ZJG\\flink_config\\conf";
        YarnClient yarnClient = YarnClient.createYarnClient();
        YarnConfiguration yarnConfiguration = new YarnConfiguration();
        yarnClient.init(yarnConfiguration);
        yarnClient.start();
        YarnClientYarnClusterInformationRetriever yarnClientYarnClusterInformationRetriever = YarnClientYarnClusterInformationRetriever.create(yarnClient);
        Configuration flinkConfiguration = GlobalConfiguration.loadConfiguration(configurationDirectory);
        flinkConfiguration.set(CheckpointingOptions.CHECKPOINTS_DIRECTORY, "hdfs://192.168.19.121:9000/flink/checkpoint3/abc");
        String savePoint = null;
        if (StringUtils.isNotEmpty(savePoint))
            flinkConfiguration.set(CheckpointingOptions.CHECKPOINTS_DIRECTORY, savePoint);
        flinkConfiguration.set(PipelineOptions.JARS,

                Collections.singletonList("hdfs://192.168.19.121:9000/flink/jar/ZJGStream-7.0-SNAPSHOT.jar"));
        Path remoteLib = new Path("hdfs://192.168.19.121:9000/flink/libs");
        flinkConfiguration.set(YarnConfigOptions.PROVIDED_LIB_DIRS,

                Collections.singletonList(remoteLib.toString()));
        flinkConfiguration.set(YarnConfigOptions.FLINK_DIST_JAR, "hdfs://192.168.19.121:9000/flink/libs/flink-yarn-1.17.2.jar");
        flinkConfiguration.set(DeploymentOptions.TARGET, YarnDeploymentTarget.APPLICATION

                .getName());
        flinkConfiguration.set(TaskManagerOptions.NUM_TASK_SLOTS, Integer.valueOf(3));
        flinkConfiguration.set(YarnConfigOptions.APPLICATION_NAME, "fromPC2");
        ClusterSpecification clusterSpecification = (new ClusterSpecification.ClusterSpecificationBuilder()).createClusterSpecification();
        ApplicationConfiguration appConfig = new ApplicationConfiguration(args, "org.example.ExceptionStreamDemo");
        YarnLogConfigUtil.setLogConfigFileInConfig(flinkConfiguration, configurationDirectory);
        YarnClusterDescriptor yarnClusterDescriptor = new YarnClusterDescriptor(flinkConfiguration, yarnConfiguration, yarnClient, yarnClientYarnClusterInformationRetriever, true);
        ClusterClientProvider<ApplicationId> clusterClientProvider = null;
        try {
            clusterClientProvider = yarnClusterDescriptor.deployApplicationCluster(clusterSpecification, appConfig);
        } catch (ClusterDeploymentException e) {
            e.printStackTrace();
        }
        ClusterClient<ApplicationId> clusterClient = clusterClientProvider.getClusterClient();
        ApplicationId applicationId = clusterClient.getClusterId();
        System.out.println(applicationId);
        yarnClient.stop();
    }
}
