package cc.lucien.hadoop;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;

/**
 * 通过HDFS的Java API的接口去去读指定文件
 */
public class MyReader {
    public static void main(String[] args) throws IOException {
        // 设置用户为hadoop
        System.setProperty("HADOOP_USER_NAME","hadoop");
        // 创建一个配置对象
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS","hdfs://10.24.130.132:9000");
        conf.set("fs.hdfs.impl","org.apache.hadoop.hdfs.DistributedFileSystem");
        // 向Hadoop的hdfs中写⼊信息
        FileSystem fs = FileSystem.get(conf);
        //输出fs的相关信息
        System.out.println("fs="+fs);
        // 通过路径对象指定一个要读取的文件
        Path file = new Path("test");
        System.out.println("file="+file);
        FSDataInputStream getIt = fs.open(file);
        BufferedReader d = new BufferedReader(new InputStreamReader(getIt));
        // 一行行读取
        String content = d.readLine();
        System.out.println(content);

        // 关闭资源
        d.close();
        getIt.close();
        fs.close();
    }
}
