package com.share.bigdata.example.hdfs;

import com.share.common.utils.FileUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;

import java.net.URI;
import java.util.Properties;

public class MainHdfs {
    public static void main(String[] args) throws Exception {
        // HDFS的URI，通常是hdfs://namenode:port
        // String hdfsUri = "hdfs://localhost:8020";
        Properties properties = FileUtil.readPropertiesFromCp("application-dev.properties");
        String hdfsUri = properties.getProperty("hdfs.uri");

        // 获取配置对象
        Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI(hdfsUri), conf);

        // 列出HDFS根目录下的文件和目录
        RemoteIterator<LocatedFileStatus> files = fileSystem.listFiles(new Path("/"), true);

        while (files.hasNext()) {
            LocatedFileStatus next = files.next();
            System.out.println(next.getPath().getName());
        }

        // 关闭文件系统
        fileSystem.close();
    }
}
