package org.bjtu.hadooptest.hadoop;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;

public class MyFSDataInputStream extends FSDataInputStream{
    public MyFSDataInputStream(InputStream in) {
        super(in);
    }

    public static String readline(Configuration conf, String remoteFilePath) {
        try (FileSystem fs = FileSystem.get(conf)) {
            Path remotePath = new Path(remoteFilePath);
            FSDataInputStream in = fs.open(remotePath);
            BufferedReader d = new BufferedReader(new InputStreamReader(in));
            String line = null;
            String result = "";
            line = d.readLine();
            while (line != null) {
                result += line + '\n';
                line = d.readLine();
            }
            d.close();
            in.close();
            return result;
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
            return null;
        }
    }

    public static void main(String[] args) {
        Configuration conf = new Configuration();
        conf.set("fs.default.name", "hdfs://172.31.42.153:9000");
        String remoteFilePath = "/user/root/text.txt"; // HDFS路径

        System.out.println("读取文件: " + remoteFilePath);
        System.out.println(MyFSDataInputStream.readline(conf, remoteFilePath));
        System.out.println("\n读取完成");

    }

//    作者：Tiny_16
//    链接：https://www.jianshu.com/p/0663d74b79b5
//    來源：简书
//    著作权归作者所有。商业转载请联系作者获得授权，非商业转载请注明出处。
}
