package test;
import java.io.BufferedInputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.fs.FileSystem;

/**
 * @author wanyi
 * 
 */
/**
 * @function read data form HDFS
 * @throws Exception
 */
public class GetDataFromHdfs {
	public static void get (String uri) throws Exception {
		//uri = "hdfs://localhost:9000/user/tuxinhui/output";
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(URI.create(uri), conf);
		FSDataInputStream in = null;
		
		//String out = null;
		try
		{
			FileStatus[] status = fs.listStatus(new Path(uri));
			
			for(int i = 0; i < status.length; ++i)
			{
				if(status[i].isDir())
				{
					in = fs.open(status[i].getPath()); 
					IOUtils.copyBytes(in, System.out, 4094, false);//设置缓存为4k,默认为conf 64M
				}
				else
				{
					in = fs.open(status[i].getPath()); 
					
					IOUtils.copyBytes(in, System.out, 4094, false);//设置缓存为4k,默认为conf 64M
						
		        }
		    }
			
		}
		finally
		{
			IOUtils.closeStream(in);
		}
		
	}
	
	public static void main (String[] args )
	{
		String uri = "hdfs://localhost:9000/user/tuxinhui/output";
		try {
			GetDataFromHdfs.get(uri);
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
}
