package cn.humaohua.study.hadoop.cmdrunner.c3;

import cn.humaohua.study.hadoop.cmdrunner.Test;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.springframework.stereotype.Component;

import java.net.URI;

@Component
public class FsGlobalStatus implements Test {
    @Override
    public void test() throws Exception {
        Configuration conf = new Configuration();
        //Caused by: java.lang.IllegalArgumentException: Wrong FS: hdfs://localhost/user/hmh/input/docs, expected: file:///
//        FileSystem fs = FileSystem.get(conf);
        FileSystem fs = FileSystem.get(URI.create("hdfs://localhost/"), conf);
        //显示Input目录下除了pig的所有文件夹
        FileStatus[] filestatus = fs.globStatus(new Path("hdfs://localhost/user/hmh/input/*"), new RegexExcludePathFilter("^.*/pig$"));
        Path[] paths = FileUtil.stat2Paths(filestatus);
        for (Path p : paths) {
            System.out.println(p.toString());
        }
    }
}
