package com.pub.util;

import java.io.IOException;
import java.io.InputStream;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;

public class FileHadoopUtils 
{
	 /**
	  * @param args
	  */
	 public static void main(String[] args) 
	 {
	  
	   String src=args[0];
	   String dst=args[1];
	   String tag=args[2];
	   FileHadoopUtils util = new FileHadoopUtils();
	   
	   if(tag != null && tag.equals("1")){
		    try {
				System.out.println(util.createFile(src, dst));
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
	   }else{
		    try {
				util.delete(dst, true);
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
	   }

	 }

	 /**
	  * copy one local file to HDFS
	  * @param localFile  # local file name
	  * @param hadoopFile # HDFS file path
	  * @return true if success else fail.
	  * @throws IOException 
	  */
	 public  boolean createFile(String localFile,String hadoopFile) throws IOException
	 {
		 Configuration conf=new Configuration();
		 
		 FileSystem src = FileSystem.getLocal(conf);
		 FileSystem dst = FileSystem.get(conf);
	   
		 Path srcpath = new Path(localFile);
		 Path dstpath = new Path(hadoopFile);
		 
		 FileUtil.copy(src, srcpath, dst, dstpath,false,conf);

		 return true;
	 }
	 
	 
	 /** copy one local file to HDFS
	  * @param inStream   # input stream
	  * @param hadoopFile # HDFS file path
	  * @return true if success else fail.
	  * @throws IOException 
	  */
	 public boolean createFileByInputStream(InputStream inStream,String hadoopFile) throws IOException
	 {
		 Configuration conf=new Configuration();
	   
		 FileSystem dst = FileSystem.get(conf);
		 Path dstpath 	= new Path(hadoopFile);
		 
		 FSDataOutputStream oStream=dst.create(dstpath);
		 
		 byte[] buffer = new byte[400];
		 int length = 0;
		 while((length = inStream.read(buffer))>0){
			 oStream.write(buffer,0,length);
		 }
	   
		 oStream.flush();
		 oStream.close();
		 inStream.close();
	    
		 return true;
	 }
	 
	 /**
	  * delete a file or DIR.
	  * @param hadoopFile
	  * @param recursive - if path is a directory and set to true, the directory is deleted else throws an exception. In case of a file 
	  * the recursive can be set to either true or false.
	  * @return
	  * @throws IOException 
	  */
	 public  boolean delete(String hadoopFile, boolean recursive) throws IOException
	 {
		 Configuration conf = new Configuration();
		 FileSystem dst = FileSystem.get(conf);
		 
		 dst.delete(new Path(hadoopFile), recursive);

		 return true;
	 }
	 
	 /**
	  * get input stream from HDFS file.
	  * @param hadoopFile
	  * @return
	 * @throws IOException 
	  */
	 public FSDataInputStream getInputStream(String hadoopFile) throws IOException
	 {
		 Configuration conf = new Configuration();
		
		 FileSystem dst	= FileSystem.get(conf);
		 Path p 		= new Path(hadoopFile);

		 return dst.open(p);
	 }

// http://blog.myspace.cn/e/406592866.htm
//	 通过调用这个类可以将图片存入hadoop 系统.
//
//	 当需要访问某个图片时，先访问jsp服务器(如:tomcat)的一个servlet，这个servlet从hadoop里面读出图片，并
//
//	 返回给浏览器.以下是我们的servlet:
//	 import java.io.IOException;
//	 import java.io.OutputStream;
//	 import java.io.PrintWriter;
//
//	 import javax.servlet.ServletException;
//	 import javax.servlet.http.HttpServletRequest;
//	 import javax.servlet.http.HttpServletResponse;
//
//	 import org.apache.hadoop.fs.FSDataInputStream;
//	 import org.apache.hadoop.io.IOUtils;
//	 import org.apache.log4j.Logger;
//
//	 import com.tixa.dfs.hadoop.util.HadoopFileUtil;
//
//	 public class HadoopServlet extends javax.servlet.http.HttpServlet implements javax.servlet.Servlet {
//	  static Logger logger = Logger.getLogger(HadoopServlet.class);
//	  
//	  public void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException{
//
//	   PrintWriter out=res.getWriter();
//	   res.setContentType("image/jpeg");
//	   java.util.Date date = new java.util.Date();  
//	   res.setDateHeader("Expires",date.getTime()+1000*60*60*24);  
//	   String path=req.getPathInfo();
//	   path=path.substring(1,path.length());
//	   HadoopFileUtil hUtil=new HadoopFileUtil();
//	   FSDataInputStream inputStream=hUtil.getInputStream(path);
//	   OutputStream os = res.getOutputStream();
//	  
//	   byte[] buffer = new byte[400];
//	   int length = 0;
//	   while((length = inputStream.read(buffer))>0){
//	      os.write(buffer,0,length);
//	   }
//	   os.flush();
//	   os.close();
//	   inputStream.close();
//	  }
//	    
//	 }
}
