package org.example;

import org.apache.hadoop.fs.*;

import java.io.IOException;
import java.util.Arrays;

public class 获取文件详情 {
    public static void main(String[] args) throws IOException {
        String pathLocal = HdfsClient文件的更名和移动.class.getClassLoader().getResource("").getPath();


        try (FileSystem fs = HadoopConnectUtil.getFileSystem()) {
            RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"), true);

            while (listFiles.hasNext()) {
                LocatedFileStatus fileStatus = listFiles.next();
                System.out.println("========" + fileStatus.getPath() + "==========");
                System.out.println(fileStatus.getPermission());
                System.out.println(fileStatus.getOwner());
                System.out.println(fileStatus.getGroup());
                System.out.println(fileStatus.getLen());
                System.out.println(fileStatus.getModificationTime());
                System.out.println(fileStatus.getReplication());
                System.out.println(fileStatus.getBlockSize());
                System.out.println(fileStatus.getPath().getName());

                // 获取块信息
                BlockLocation[] blockLocations = fileStatus.getBlockLocations();
                System.out.println(Arrays.toString(blockLocations));
            }
        }
    }
}

