package com.zyx.hadoopdemo.hdfsdemo;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;

/**
 * @author Yaxi.Zhang
 * @since 2021/5/19 12:23
 * desc: 获取HDFS连接
 * FileSystem.get有三种传参方式
 *      ① 只传入conf
 *      ② 传入uri以及conf
 *      ③ 传入uri、conf以及user
 */
public class HdfsTest {
    public static void main(String[] args) throws Exception {
        getFsMethodWithConf();
    }

    private static void getFsMethodWithConf() throws Exception {
        Configuration conf = new Configuration();
        // 参数设置
        conf.set("fs.defaultFS","hdfs://bigdata021:8020");
        FileSystem fs = FileSystem.get(conf);
        RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator = fs.listFiles(new Path("/tmp"), false);
        while (locatedFileStatusRemoteIterator.hasNext()) {
            System.out.println(locatedFileStatusRemoteIterator.next());
        }
        fs.close();
    }

    private static void getFsMethodWithUrl() throws Exception {
        Configuration conf = new Configuration();
        URI uri = new URI("hdfs://bigdata021:8020");
        FileSystem fs = FileSystem.get(uri, conf, "root");
        RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator = fs.listFiles(new Path("/tmp"), false);
        while (locatedFileStatusRemoteIterator.hasNext()) {
            System.out.println(locatedFileStatusRemoteIterator.next());
        }
        fs.close();
    }

}
