package com.tledu.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSClient;

import java.io.ByteArrayOutputStream;
import java.io.IOException;

public class HdfsStudy {
    public static void main(String[] args) throws IOException {
        // 1. 定义读取文件的路径
        String filePath = args[0];
        // 2. 创建hdfs配置
        Configuration conf = new Configuration();
        // 3. 打开创建FileSystem，对hdfs的抽象
        FileSystem fs = FileSystem.newInstance(conf);
        // 4. 创建一个Path。 对文件的引用
        Path path = new Path(filePath);
        // 5. FileSystem 打开输入流
        FSDataInputStream inputStream = fs.open(path);
        // 6. 将输入流的字节数组都读取出来
        byte[] bytes = new byte[1024];
        int len = 0;
        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        while ((len = inputStream.read(bytes)) > 0) {
            bos.write(bytes, 0, len);
        }
        byte[] byteArray = bos.toByteArray();
        // 7. 将字节数组抓化成字符串输出
        String result = new String(byteArray, "utf8");
        System.out.println(result);
    }
}
