package test;

import java.io.*;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
//import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.io.IOUtils;


public class ReaderFromHDFS {
	
/**
  * @author dxl
  * @revised by wanyi
  * 
  */
 /**
  * @function read data form HDFS
  * @throws Exception
  */
	
final static String CRLN = System.getProperty("line.separator");
	
public  String ReaderHDFS(String dir) throws IOException
{	
	//指定configuration   
	Configuration config = new Configuration();     
	FileSystem fs = FileSystem.get(URI.create(dir),config);  
	StringBuffer sb = new StringBuffer();
	FileStatus[] status = fs.listStatus(new Path(dir));
	String result = null;
	for(int i = 0; i < status.length; ++i)
	{
		if(status[i].isDir())
		{
			//System.out.println(stats[i].getPath().toString());
			sb.append(ReaderHDFS(status[i].getPath().toString()));
		}
		else
		{
			//定义一个DataInputStream  
			//FSDataInputStream in = null;
			InputStream in = null;
			OutputStream out = new ByteArrayOutputStream();
			in = fs.open(status[i].getPath());  
			//String t = null;
			IOUtils.copyBytes(in, out, 4094);//设置缓存为4k,默认为conf 64M
			//while ((t = in.readLine()) != null)
			/*while ((t = in.readLine()) != null)
			{    
				sb.append(t);
				sb.append(CRLN);
			    // t = in.readLine();
			}
			*/
			//System.out.println("test:" + out.toString());
			result = out.toString();
			//System.out.println(out.toString());
			
			/*
			byte[] buffer = new byte[1024];
			while (in.read(buffer) != -1)
			{
				sb.append(in);
			}
			*/
		  }//else
		    
	}//for
	
	//String result = sb.toString();
	//System.out.println("test...");
	//System.out.println(result);
	return result;
}
 
public  void WriterHDFS(String content,String path) throws IOException
{
	Configuration config = new Configuration();    
	FileSystem fs = FileSystem.get(URI.create(path),config);   
	OutputStream out = fs.create(new Path(path));
	out.write(content.getBytes());
}

 public static void main(String[] args) throws Exception 
 {
	 ReaderFromHDFS RF = new ReaderFromHDFS();
	 //String result = RF.ReaderHDFS("hdfs://localhost:9000/user/tuxinhui/output");//part-r-00000
	 String result = RF.ReaderHDFS("hdfs://localhost:9000/user/tuxinhui/clustering");
	 System.out.println(result);
 }

}


