package com.qq.BFMRSE.Hdfs;

import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;

import javax.servlet.http.HttpServletResponse;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.log4j.Logger;

import com.qq.BFMRSE.BF_MRSEController.handlerNewEncryFile;
import com.qq.BFMRSE.controllerHelper.newIndexHelper;
import com.qq.BFMRSE.model.encryptedIndex;
import com.qq.BFMRSE.model.indexTreeNode;
import com.qq.BFMRSE.util.BFConstants;
import com.qq.BFMRSE.util.objectSerializeUtil;
import com.qq.BFMRSE.util.paraseJsonUtil;

public class hdfsOper {
	/**
	 * 从字节流向hdfs中写文件
	 * @param in
	 * @param des
	 * @throws IOException
	 */
	public static void upFileToHdfs(InputStream in, String des ) {
		String desName=BFConstants.HADOOP_PATH+File.separator+des;
		Path desPath=new Path(desName);
		Configuration conf=new Configuration();
		hdfsConf.initProperHDFS(conf);
		FileSystem hdfs=null;
		try {
			hdfs = FileSystem.newInstance(URI.create(desName),conf);
			OutputStream os=hdfs.create(desPath);
			BufferedOutputStream bos=new BufferedOutputStream(os);
			byte[]data=new byte[4096];
			int length=0;
			while((length=in.read(data))>0)
			{
				bos.write(data, 0, length);;
			}
			bos.close();
			os.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}finally {
			try {
				hdfs.close();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		
	}
	/**
	 * 从本地文件向hdfs中写文件
	 * @param src
	 * @param des
	 * @throws IOException
	 */
	public static void upFileToHdfs(String src,String des){
		String desName=BFConstants.HADOOP_PATH+File.separator+des;
		Configuration conf=new Configuration();
		hdfsConf.initProperHDFS(conf);
		FileSystem hdfs=null;
		try {
			hdfs = FileSystem.get(URI.create(desName),conf);
			Path srcPath=new Path(src);
			Path desPath=new Path(desName);
			hdfs.copyFromLocalFile(false,srcPath, desPath);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}finally {
			try {
				hdfs.close();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		
		
	}
	/**
	 * 创建文件夹
	 * @param dir
	 * @return
	 * @throws IOException
	 */
	public static boolean mkdir(String dir) 
	{
		String desName=BFConstants.HADOOP_PATH+File.separator+dir;
		Configuration conf=new Configuration();
		hdfsConf.initProperHDFS(conf);
		FileSystem hdfs=null;
		boolean result=false;
		try {
			hdfs = FileSystem.get(URI.create(desName),conf);
			Path dirPath=new Path(desName);
		    result=hdfs.mkdirs(dirPath);
			hdfs.close();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}finally {
			try {
				hdfs.close();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		
		return result;
	}
	/**
	 * judge wheather the dir is a directory
	 * @param dir
	 * @return
	 * @throws IOException
	 */
	public static boolean isDirectory(String dir)
	{
		String dirName=BFConstants.HADOOP_PATH+File.separator+dir;
		Configuration conf=new Configuration();
		hdfsConf.initProperHDFS(conf);
		FileSystem hdfs=null;
		boolean flag=false;
		try {
			hdfs = FileSystem.get(URI.create(dirName),conf);
			Path dirPath=new Path(dir);
			System.out.println("测试Path="+dirPath.toUri().toString());
		    flag=hdfs.isDirectory(dirPath);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}finally {
			try {
				hdfs.close();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		return flag;
	}
	/**
	 * judge wether the file is exists in hdfs
	 * @param url
	 * @return
	 * @throws IOException 
	 */
	public static boolean isFileExist(String fileName) 
	{
		String desName=BFConstants.HADOOP_PATH+File.separator+fileName;
		Configuration conf=new Configuration();
		hdfsConf.initProperHDFS(conf);
		FileSystem hdfs=null;
		boolean result=false;
		try {
			hdfs = FileSystem.get(URI.create(desName),conf);
			Path desPath=new Path(desName);
			result=hdfs.exists(desPath);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}finally {
			try {
				hdfs.close();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		return result;
	}
	/**
	 *  delete the file url, if return true delete successfully
	 * @param url
	 * @return
	 * @throws IOException
	 */
	public  static boolean deleteFile(String fileName)
	{
		String desName=BFConstants.HADOOP_PATH+File.separator+fileName;
		Configuration conf=new Configuration();
		hdfsConf.initProperHDFS(conf);
		FileSystem hdfs=null;
		boolean isDelete=false;
		try {
			hdfs = FileSystem.get(URI.create(desName),conf);
			Path path=new Path(desName);
			isDelete=hdfs.delete(path,true);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}finally {
			try {
				hdfs.close();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		return isDelete;
	}
	
	public static String  readFile(String fileName) 
	{
		fileName=BFConstants.HADOOP_PATH+File.separator+fileName;
		StringBuffer sb=new StringBuffer();
		Configuration conf=new Configuration();
		hdfsConf.initProperHDFS(conf);
		FileSystem hdfs=null;
		InputStream in=null;
		BufferedInputStream bin=null;
		try {
			hdfs = FileSystem.get(URI.create(fileName),conf);
			in=hdfs.open(new Path(fileName));
			bin=new BufferedInputStream(in);
			byte[]buffer=new byte[1024];
			int len=-1;
			while((len=bin.read(buffer))!=-1){
				sb.append(new String(buffer, 0, len));
			}
			System.out.println(sb.toString());
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}finally{
			try {
				bin.close();
				in.close();
				hdfs.close();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		return sb.toString();
		
	}
	public static void downloadFile(String src,String des)
	{
		String srcName=BFConstants.HADOOP_PATH+File.separator+src;
		Configuration conf=new Configuration();
		hdfsConf.initProperHDFS(conf);
		FileSystem hdfs=null;
		try {
			hdfs = FileSystem.get(URI.create(srcName),conf);
			Path srcPath=new Path(srcName);
			Path desPath=new Path(des);
			hdfs.copyToLocalFile(srcPath, desPath);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}finally {
			try {
				hdfs.close();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
	}
	/**
	 * 读取hdfs中序列化文件root.obj并反序列化得到indexTreeNode对象
	 * @param fileName
	 * @return
	 */
	public static indexTreeNode readIndexTree(String fileName){
		String filePath=BFConstants.HADOOP_PATH+File.separator+fileName;
		Configuration conf=new Configuration();
		hdfsConf.initProperHDFS(conf);
		indexTreeNode node=null;
		FileSystem hdfs=null;
		FSDataInputStream fin=null;
		try {
			hdfs = FileSystem.newInstance(URI.create(filePath),conf);
			Path srcPath=new Path(filePath);
			fin=hdfs.open(srcPath);
			ObjectInputStream ois=new ObjectInputStream(fin);
			node=(indexTreeNode) ois.readObject();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (ClassNotFoundException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}finally {
			try {
				hdfs.close();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		return node;
	}
	public static Map<String,encryptedIndex> readEnIndex(String fileName){
		String filePath=BFConstants.HADOOP_PATH+File.separator+fileName;
		Map<String,encryptedIndex> enIndex=null;
		Configuration conf=new Configuration();
		hdfsConf.initProperHDFS(conf);
		FileSystem hdfs=null;
		FSDataInputStream fin=null;
		StringBuffer sb=new StringBuffer();
		try {
			hdfs = FileSystem.newInstance(URI.create(filePath),conf);
			Path srcPath=new Path(filePath);
			fin=hdfs.open(srcPath);
			BufferedInputStream bis=new BufferedInputStream(fin);
			byte []buffer=new byte[1024];
			int len=-1;
			while((len=bis.read(buffer))!=-1){
				sb.append(new String(buffer, 0, len));
			}
			System.out.println(sb.toString());
			enIndex=paraseJsonUtil.Json2MapIndex(sb.toString());
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} finally {
			try {
				hdfs.close();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		return enIndex;
	}
	/**
	 * 
	 * @param fileList
	 * @param temp
	 * @throws IOException 
	 */
	public static void addAllFilesToZip(String []files,String userName,String zipPath) 
	{
		Configuration conf=new Configuration();
		hdfsConf.initProperHDFS(conf);
		File zipFile=new File(zipPath);
		if(!zipFile.exists())
		{
			try {
				zipFile.createNewFile();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
		FileOutputStream fos=null;
		ZipOutputStream zos=null;
		try {
			fos = new FileOutputStream(zipFile);
			zos=new ZipOutputStream(fos);
			for(int i=0;i<files.length;i++)
			{
				String fileName=files[i];
				//deal with the file
				String srcPath=BFConstants.HADOOP_PATH+File.separator+userName+File.separator+BFConstants.ENCRYDIRNAME+File.separator+fileName;
				FileSystem hdfs=FileSystem.get(URI.create(srcPath),conf);
				Path path=new Path(srcPath);
				InputStream in=hdfs.open(path);
				BufferedInputStream bis=new BufferedInputStream(in);
				ZipEntry entry=new ZipEntry(fileName.substring(fileName.lastIndexOf('/')+1, fileName.length()));
				zos.putNextEntry(entry);
				int length=0;
				byte []buffer=new byte[1024];
				while((length=bis.read(buffer))!=-1)
				{
					zos.write(buffer, 0, length);
				}
				bis.close();
				in.close();
			}
		} catch (FileNotFoundException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}finally {
			try {
				zos.close();
				fos.close();
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
	}
	/**
	 * get all file Info of the directory
	 * @param dir
	 * @return
	 * @throws IOException
	 */
	public static List<String> getAllFilesOfDir(String dir) throws IOException
	{
		List<String> resultList=new ArrayList<String>();
		String dirName=BFConstants.HADOOP_PATH+File.separator+dir;
		Configuration conf=new Configuration();
		hdfsConf.initProperHDFS(conf);
		FileSystem hdfs=FileSystem.get(URI.create(dirName),conf);
		Path dirPath=new Path(dir);
		FileStatus[]fStatuses=hdfs.listStatus(dirPath);
		int length=fStatuses.length;
		for(int i=0;i<length;i++)
		{
			resultList.add(fStatuses[i].getPath().toUri().toString());
		}
		return resultList;
	}
	
	
}