package me.wang;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.SaslOutputStream;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.NodeReport;
import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.IOException;
import java.util.*;

/**
 * @className: main
 * @Description:
 * @Author: wangyifei
 * @Date: 2025/6/17 11:17
 */
public class YarnClientTest {
    private static Logger logger = LoggerFactory.getLogger(YarnClientTest.class);

    public static void main(String[] args) {
//        Configuration conf = new Configuration();
        YarnConfiguration conf = new YarnConfiguration();
        String rmAddress = System.getenv("HADOOP_HOME");
        System.out.println(rmAddress);
        conf.addResource(new Path(rmAddress+ "/etc/hadoop/core-site.xml"));
        conf.addResource(new Path(rmAddress+"/etc/hadoop/yarn-site.xml"));
        conf.addResource(new Path(rmAddress+"/etc/hadoop/hdfs-site.xml"));
        conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
//        conf.set("yarn.resourcemanager.address", "hadoop0002:8032");
        // hadoop0001:8032

        try (YarnClient yarnClient = YarnClient.createYarnClient()) {
            yarnClient.init(conf);
            yarnClient.start();
            List<ApplicationReport> applications = null;
            // 查看任务的状态
            Set<String> appTypes = new HashSet<>(Arrays.asList("MAPREDUCE", "SPARK" , "Apache Flink"));
            EnumSet<YarnApplicationState> states = EnumSet.of(YarnApplicationState.RUNNING);
            applications = yarnClient.getApplications(states);
            for (ApplicationReport app : applications) {
                System.out.printf("App ID: %s , Name: %s , Status: %s \n" , app.getApplicationId() , app.getName() , app.getYarnApplicationState());
            }
            // 杀死任务
//            yarnClient.killApplication();
            // 查看节点的状态
            NodeState[] nodeStates = {NodeState.RUNNING};
            List<NodeReport> nodeReports = yarnClient.getNodeReports(nodeStates);
            for (NodeReport nodeReport : nodeReports) {
                System.out.printf("NodeManager ID: %s , httpAddress: %s , node status: %s , containers num : %s , node status : %s , node total resource: %s " +
                                ", used resource: %s \n"
                        , nodeReport.getNodeId()
                        , nodeReport.getHttpAddress()
                        , nodeReport.getNodeState()
                        , nodeReport.getNumContainers()
                        , nodeReport.getNodeState().name()
                        , nodeReport.getCapability().getMemory()
                        , nodeReport.getUsed().getMemory()
                );
            }


        } catch (IOException e) {
            throw new RuntimeException(e);
        } catch (YarnException e) {
            throw new RuntimeException(e);
        }
    }
}
