package src.mian.java;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.text.SimpleDateFormat;
public class HDFSFileList {
    public static void main(String[] args) {
        String hdfsUri = "hdfs://192.168.254.100:8020"; // 您的 HDFS URI
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", hdfsUri);

        try {
            FileSystem fs = FileSystem.get(conf);
            Path directory = new Path("/test"); // 您要查看的目录路径
            listFiles(fs, directory);
            fs.close();
        } catch (Exception e) {
            e.printStackTrace();
        }
    }
    private static void listFiles(FileSystem fs, Path directory) throws Exception {
        FileStatus[] fileStatuses = fs.listStatus(directory);
        for (FileStatus fileStatus : fileStatuses) {
            if (fileStatus.isDirectory()) {
                System.out.println("Directory: " + fileStatus.getPath());
                listFiles(fs, fileStatus.getPath()); // 递归输出子目录的文件信息
            } else {
                System.out.println("File Path: " + fileStatus.getPath());
                System.out.println("File Permission: " + fileStatus.getPermission());
                System.out.println("File Size: " + fileStatus.getLen() + " bytes");
                SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                System.out.println("File Creation Time: " + dateFormat.format(fileStatus.getModificationTime()));
            }
        }
    }

}
