import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;


import java.net.URI;

/**
 * @author xyr
 * @date 2021/10/12 9:17
 */
public class DemoTwo {

    public static void main(String[] args) {

        //案例2：读取数据
        System.setProperty("hadoop.home.dir","D:\\code\\hadoop-2.7.3");
        System.setProperty("HADOOP_USER_NAME","root");
        try {


        //用api的形式 读取下刚才写的文件DemoOne
        String reaURl ="hdfs://192.168.9.11:9000/test/17demo.txt";

        //加载hdfs配置
        Configuration conf = new Configuration();
        //建立hdfs连接
        FileSystem fileSystem =FileSystem.get(URI.create(reaURl),conf);
        //打开url路径文件
        FSDataInputStream fsDataInputStream =fileSystem.open(new Path(reaURl));
        IOUtils.copyBytes(fsDataInputStream,System.out,4096,false);
    }catch (Exception e){
            e.printStackTrace();
        }

    }


}
