package com.yiidata.example.hadoop.hdfs;


import com.google.common.io.CharStreams;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Arrays;

/**
 * <pre>
 *
 * Created by zhenqin.
 * User: zhenqin
 * Date: 2019-01-27
 * Time: 12:21
 * Verdor: NowledgeData
 * To change this template use File | Settings | File Templates.
 *
 * </pre>
 *
 * @author zhenqin
 */
public class HDFSTest {


    static FileSystem fs = null;

    static {
        try {
            fs = FileSystem.get(new Configuration());
        } catch (IOException e) {

        }
    }

    public static void testRead(String path) throws Exception {
        FSDataInputStream in = null;
        try {
            // 打开文件流
            in = fs.open(new Path(path));

            String s = CharStreams.toString(new BufferedReader(new InputStreamReader(
                    in, "UTF-8")));
            System.out.println(s);
        } catch (Exception e) {
            e.printStackTrace();
        } finally {
            IOUtils.closeQuietly(in);
        }
    }


    public static void main(String[] args) throws Exception {
        Configuration conf = new Configuration();

        FileSystem fs = FileSystem.get(conf);
        FileStatus[] files = fs.listStatus(new Path("/user"));
        for (FileStatus file : files) {
            System.out.println(file.getPath());
            testRead(file.getPath().toString());
        }

    }

}
