package cn.org.intelli.zjgflink.example;


import org.apache.flink.api.common.JobID;
import org.apache.flink.client.program.ClusterClient;
import org.apache.flink.client.program.ClusterClientProvider;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.execution.SavepointFormatType;
import org.apache.flink.runtime.client.JobStatusMessage;
import org.apache.flink.yarn.YarnClientYarnClusterInformationRetriever;
import org.apache.flink.yarn.YarnClusterClientFactory;
import org.apache.flink.yarn.YarnClusterDescriptor;
import org.apache.flink.yarn.configuration.YarnConfigOptions;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;

import java.util.Collection;
import java.util.Iterator;
import java.util.concurrent.CompletableFuture;

public class StopYarnMain {
    public static void main(String[] args) throws Exception {
        System.setProperty("HADOOP_USER_NAME", "root");
        YarnClient yarnClient = YarnClient.createYarnClient();
        YarnConfiguration yarnConfiguration = new YarnConfiguration();
        yarnClient.init(yarnConfiguration);
        yarnClient.start();
        YarnClientYarnClusterInformationRetriever yarnClientYarnClusterInformationRetriever = YarnClientYarnClusterInformationRetriever.create(yarnClient);
        Configuration flinkConfiguration = new Configuration();
        flinkConfiguration.set(YarnConfigOptions.APPLICATION_ID, "application_1736500205863_0046");
        YarnClusterDescriptor yarnClusterDescriptor = new YarnClusterDescriptor(flinkConfiguration, yarnConfiguration, yarnClient, yarnClientYarnClusterInformationRetriever, true);
        YarnClusterClientFactory clusterClientFactory = new YarnClusterClientFactory();
        ApplicationId applicationId = clusterClientFactory.getClusterId(flinkConfiguration);
        ClusterClientProvider<ApplicationId> clusterClientProvider = yarnClusterDescriptor.retrieve(applicationId);
        ClusterClient<ApplicationId> clusterClient = clusterClientProvider.getClusterClient();
        Collection<JobStatusMessage> jobStatusMessages = clusterClient.listJobs().get();
        System.out.print(jobStatusMessages.size());
        JobID jobId = null;
        Iterator<JobStatusMessage> iterator = jobStatusMessages.iterator();
        if (iterator.hasNext()) {
            JobStatusMessage jobStatusMessage = iterator.next();
            jobId = jobStatusMessage.getJobId();
        }
        CompletableFuture<String> completableFuture = clusterClient.stopWithSavepoint(jobId, true, "hdfs://192.168.19.121:9000/flink/savepoint", SavepointFormatType.DEFAULT);
        String savePoint = completableFuture.get();
        System.out.println(savePoint);
        yarnClusterDescriptor.killCluster(applicationId);
        yarnClient.stop();
    }
}
