package org.shj.hadoop.fs;

import java.io.InputStream;
import java.net.URI;
import java.net.URL;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class HdfsIO {

	static{
		URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
	}
	
	public static void main(String[] args) throws Exception{
		String path = "hdfs://192.168.56.101:9000/user/shj/output/part-r-00000";
		
		//1. 通过URL
		InputStream in = null;
		try{
			in = new URL(path).openStream();
			IOUtils.copyBytes(in, System.out,  4096, false);
		}finally{
			IOUtils.closeStream(in);
		}
		
		//2. 通过 FileSystem
		FSDataInputStream fsin = null;
		try{
			Configuration conf = new Configuration();
			FileSystem fs = FileSystem.get(URI.create(path), conf);
			fsin = fs.open(new Path(path));
			IOUtils.copyBytes(fsin, System.out,  4096, false);
			fsin.seek(0); // go back to the start of the file
			IOUtils.copyBytes(fsin, System.out,  4096, false); //print twice
		}finally{
			IOUtils.closeStream(fsin);
		}
	}
}
