package me.wang;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import org.apache.hadoop.fs.FileSystem;

import java.io.IOException;
import java.nio.file.Paths;
import java.util.Objects;

/**
 * @className: TestHDFS
 * @Description:
 * @Author: wangyifei
 * @Date: 2025/6/18 15:51
 */
public class TestHDFS {
    private static Logger logger = LoggerFactory.getLogger(TestHDFS.class);

    public static void main(String[] args) {
        FSDataOutputStream fsDataOutputStream = null ;
        try {

//            YarnConfiguration conf = new YarnConfiguration();
            Configuration conf = new Configuration();
            String rmAddress = System.getenv("HADOOP_HOME");
            conf.addResource(new Path(rmAddress+ "/etc/hadoop/core-site.xml"));
            conf.addResource(new Path(rmAddress+"/etc/hadoop/hdfs-site.xml"));
            conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
            FileSystem fs = FileSystem.get(conf);
            Path path = new Path(fs.getHomeDirectory() , "test/javaOptos");
            fsDataOutputStream = FileSystem.create(fs, path, new FsPermission((short) 0710));
            fsDataOutputStream.writeUTF("/export/bigdata/hadoop");
            FileStatus fileStatus = fs.getFileStatus(path);
            System.out.println(fileStatus);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }finally{
            try {
                if(Objects.nonNull(fsDataOutputStream)){
                    fsDataOutputStream.close();
                }
            } catch (IOException e) {
                throw new RuntimeException(e);
            }
        }

    }
}
