package com.xx.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;

import java.io.IOException;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;

public class HdfsInfo {
    private FileSystem fs = null;
    Set<FileStatus> set;
    public HdfsDemo demo;
    private Map map;

    public HdfsInfo(Map map) throws IOException {
        this.map = map;
        init();
    }

    public void init() throws IOException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://" + map.get("ip").toString() + ":" + map.get("port").toString());
        System.setProperty("HADOOP_USER_NAME", map.get("name").toString());
        System.out.println("fs.defaultFS" + "hdfs://" + map.get("ip").toString() + ":" + map.get("port").toString() +
                "8020");
        fs = FileSystem.get(conf);
        demo = new HdfsDemo(fs);
    }

    public void testFileAndDirectory() throws IOException {
        set = new HashSet<>();
        RemoteIterator<LocatedFileStatus> fileStatuses = fs.listLocatedStatus(new Path("/"));
        while (fileStatuses.hasNext()) add(fileStatuses.next(), set);
    }

    public Set<FileStatus> getSet() throws IOException {
        testFileAndDirectory();
        return set;
    }

    public void add(FileStatus status, Set<FileStatus> set) throws IOException {
        set.add(status);
        if (status.isDirectory()) {
            RemoteIterator<LocatedFileStatus> fileStatuses = fs.listLocatedStatus(status.getPath());
            while (fileStatuses.hasNext()) add(fileStatuses.next(), set);
        }
    }
}
