package com.guchenbo.spark.demo;

import java.security.PrivilegedExceptionAction;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.security.UserGroupInformation;

/**
 * @author guchenbo
 * @date 2021/10/22
 */
public class HdfsDemo {

    private Configuration configuration;

    public void init() {
        configuration = new Configuration();
        configuration.addResource("ark32/core-site.xml");
        configuration.addResource("ark32/hdfs-site.xml");
        configuration.addResource("ark32/hive-site.xml");
        configuration.addResource("ark32/yarn-site.xml");
    }

    public void demo(String path) throws Exception {
        FileSystem fileSystem = FileSystem.get(configuration);
        boolean flag = fileSystem.exists(new Path(path));
        System.out.println(flag);
        System.out.println("done");
    }

    public void proxyDemo() throws Exception{
        UserGroupInformation superUgi = UserGroupInformation.getCurrentUser();
        UserGroupInformation proxyUgi = UserGroupInformation.createProxyUser("gcb", superUgi);
        proxyUgi.doAs((PrivilegedExceptionAction<Void>) () -> {
            FileSystem fs = FileSystem.get(configuration);
            Path path = new Path("/user/guchenbo/test2");
            boolean flag = fs.mkdirs(path);
            assert flag;

            if (flag) {
                FileStatus[] fileStatuses = fs.listStatus(path);
                for (FileStatus fileStatus : fileStatuses) {
                    System.out.println(fileStatus);
                }
            }
            return null;
        });
    }

    public static void main(String[] args) throws Exception {
        HdfsDemo demo = new HdfsDemo();
        demo.init();
        demo.proxyDemo();
//        demo.demo(args[0]);

    }
}
