package com.jc.demo.hadoop.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;

import java.net.URI;

/**
 * 获取当前目录的所有文件及目录信息（仅一层）
 */
public class ListStatus {

    public static void main(String[] args) throws Exception {
        String hdfsHost = "hdfs://s1.jevoncode.com:9000/";
        String uri = hdfsHost;
        args = new String[]{"/opt/", "/dir"};
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(URI.create(uri), conf);
        Path[] paths = new Path[args.length];
        for (int i = 0; i < paths.length; i++) {
            paths[i] = new Path(args[i]);
        }
        FileStatus[] status = fs.listStatus(paths);
        Path[] listedPaths = FileUtil.stat2Paths(status);       //将文件状态FileStatus数组转为Path数组
        for (Path p : listedPaths) {
            System.out.println(p);
        }
    }
}
/**
 * output:
 * 06-10 14:29:16 [main] DEBUG o.a.h.s.a.util.KerberosName - Kerberos krb5 configuration not found, setting default realm to empty
 * 06-10 14:29:16 [main] DEBUG o.a.hadoop.util.PerformanceAdvisory - Falling back to shell based
 * 06-10 14:29:18 [main] DEBUG o.a.hadoop.util.PerformanceAdvisory - Both short-circuit local reads and UNIX domain socket are disabled.
 * 06-10 14:29:18 [main] DEBUG o.a.h.h.p.d.s.DataTransferSaslUtil - DataTransferProtocol not using SaslPropertiesResolver, no QOP found in configuration for dfs.data.transfer.protection
 * hdfs://s1.jevoncode.com:9000/opt/command
 * hdfs://s1.jevoncode.com:9000/opt/java
 */