package com.dao;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import javax.servlet.ServletOutputStream;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.io.IOUtils;

import com.beans.DiskFileInfo;

public class HdfsDao {
	
	private static final  String USER_NAME="admin";
	public static final String HDFS_PATH = "hdfs://zero:9000/";
	private Configuration conf = new Configuration();
	
	public HdfsDao(){
		//conf.set("dfs.client.socket-timeout", "600000");	
		//conf.set("ipc.ping.interval", "600000");
		 System.setProperty("hadoop.home.dir","D:/hadoop-2.6.0-cdh5.15.1" );
	}

	/**
	 * 查出用户根目录下的所有文件和目录
	 * @param userRoot  用户的目录目(其实就是用户账号)
	 * @return 文件和目录列表
	 */
	public DiskFileInfo[] getRootFileList(String userRoot) {
		try {
			String userPath=HDFS_PATH+ userRoot ; 
			
			FileSystem fs = FileSystem.get(URI.create(userPath), conf);
			FileStatus[] fileList = fs.listStatus(new Path(userPath));
			
			DiskFileInfo [] diskInfoList=new DiskFileInfo[fileList.length];
			
			for (int i=0;i< fileList.length;i++) {
				FileStatus f=fileList[i];
				diskInfoList[i]=new DiskFileInfo(f);
			}
			fs.close();
			return diskInfoList;
		}
		catch (Exception ex) {
			throw new RuntimeException(ex);
		}
	}

	
	/**
	 * 查询父目录下的所有文件和目录,不用考虑用户家目录的情况,传过来什么就查什么的子项
	 * @param parent 路径格式为  admin/aaa/bbb
	 * @return 子项列表
	 */
		public DiskFileInfo[] getSubFileList(String parent) {
			try {
			//	conf.set("dfs.client.socket-timeout", "120000");
			//	conf.set("dfs.client.socket.timeout", "120000");
				
				FileSystem fs = FileSystem.get(URI.create(HDFS_PATH), conf);

				FileStatus[] fileList = fs.listStatus(new Path(HDFS_PATH + parent));
				
				DiskFileInfo [] diskInfoList=new DiskFileInfo[fileList.length];
				for (int i=0;i< fileList.length;i++) {
					FileStatus f=fileList[i];
					diskInfoList[i]=new DiskFileInfo(f);
				}

				fs.close();
				return diskInfoList;
			}

			catch (Exception ex) {
				throw new RuntimeException(ex);
			}
		}

		
		
		/**
		 * 计算用户共有了多少网盘空间
		 * @param userRoot 用户名
		 * @return 点用的空间大小,单位为字节
		 */
		public long getUserDiskSize(String userRoot) {
			try {
				FileSystem fs = FileSystem.get(URI.create(HDFS_PATH), conf);
				long size=fs.getContentSummary(new Path("/"+userRoot)).getLength();
				fs.close();
				return size;
			}
			catch (Exception ex) {
				//throw new RuntimeException(ex);
				return 0;
			}
		}
		

		public static void main(String[] args) throws Exception {
			/*HdfsDao dao = new HdfsDao();
			dao.getDirectoryFromHdfs();
			 刚开始测这个方法的时候,出了个错
			 冲突来自 Java_EE_6 中的weld-osgi-bundle.jar 和 guava-11.0.2.jar 这两个包。
			 com/google/common/collect/Interners 类是
			 HDFS在创建FileSystem时所需要使用的一个创建Interner类的工具类，
			 但是 weld-osgi-bundle.jar和guava-11.0.2.jar
			 存在了一样的结构，而weld-osgi-bundle.jar中却不存在Interners类，导致在运行过程中的冲突。
			 解决方法是把javaee中的那个 weld-osgi-bundle.jar 从构建路径里移除掉了*/
		}
		
	
	/**
	 * 下载文件
	 * @param filePath 文件路径,格式如: admin/javatools/Test.txt
	 * @param out servlet对应的输出流,用于下载
	 */
	public void downLoadFileAsStream(String filePath,	ServletOutputStream out)  {
		try{
			FileSystem fs = FileSystem.get(URI.create(HDFS_PATH),conf);
	        Path path = new Path("/"+filePath);
	        FSDataInputStream fsInput = fs.open(path);
	        IOUtils.copyBytes(fsInput, out, 4096, false);
	        out.flush();
	        fsInput.close();
		}
        catch(IOException ex){
        	throw new RuntimeException(ex);
        }
	}

	/**
	 * 上传文件,从web服务器上上传文件到HDFS,传过后,将web服务器上的文件删除
	 * @param parent 父级文件夹
	 * @param localPath  本地文件路径 (这个路径,是在web服务器上,用来做临时中转的目录)
	 */
	public void uploadFile(String parent,String localPath) {
		try{
			FileSystem fs = FileSystem.get(URI.create(HDFS_PATH),conf,USER_NAME);		
			fs.moveFromLocalFile(new Path(localPath), new Path("/"+parent));
			fs.close();
		}
		catch(Exception ex){
			throw new RuntimeException(ex);
		}
	}

	/**
	 * 新用户册的时候,在HDFS根目录上用这个用户名创建一个文件夹,以后这个目录就是用户的专有目录了
	 * 不同的用户,各自有不同的目录,其网盘上的文件内容,都在各自的目录中
	 * @param folderName 目录名称 (其实就是用户的账号名称)
	 * @return 成功返回true,失败返回false, 由于在web端限定账号不能重复,所以不用考虑重名问题
	 */
	public boolean createUserRoot(String folderName){
		try{
			FileSystem fs = FileSystem.get(URI.create(HDFS_PATH),conf,USER_NAME);	
			String path="/"+folderName;
			boolean result=fs.mkdirs(new Path(path));
			fs.close();
			return result;
		}
		catch(Exception ex){
			throw new RuntimeException(ex);
		}
	}
	
	/**
	 * 创建文件夹
	 * @param parent 父级目录 格式类似:  admin 或  admin/xxx/yyy
	 * @param folderName 文件夹的名称, 类似: javatools 
	 * @return 成功返回true ,失败返回 false
	 */
	public boolean createFolder(String parent, String folderName) {
		try{
			FileSystem fs = FileSystem.get(URI.create(HDFS_PATH),conf,USER_NAME);	
			boolean result=fs.mkdirs(new Path("/"+parent+"/"+folderName));
			fs.close();
			return result;
		}
		catch(Exception ex){
			throw new RuntimeException(ex);
		}
	}

	/**
	 * 删除HDFS上的文件或文件夹
	 * @param 文件或文件夹的全路径
	 * @return 
	 */
	public boolean deleteFile(String filePath) {
		try{
			FileSystem fs = FileSystem.get(URI.create(HDFS_PATH),conf,USER_NAME);	
			String newPath="/"+filePath.replace(HDFS_PATH, "");
			boolean result=fs.delete(new Path(newPath), true);  // //若不用true ,文件夹非空,删不掉,true表示递归
			fs.close();
			return result;
		}
		catch(Exception ex){
			throw new RuntimeException(ex);
		}
		
	}


    /*下面这两个方法,是用来递归遍历的,但用另外的方法取代了
	public void recuersionFile(FileSystem fs ,FileStatus[] listStatus) throws Exception {
		
		if (listStatus != null) {
			for (FileStatus status : listStatus) {
	
				if (status.isDirectory()) {
					FileStatus[] listStatus2 = fs.listStatus(new Path(status.getPath().toString()));
					recuersionFile(fs,listStatus2);
				}else{
					String name = status.getPath().getName();
					// System.out.println(status.getPath());
					System.out.println("fileName: "+name);
				}
			}
		}
	} 
	
	void ttt() throws Exception{
		FileSystem fs = FileSystem.get(URI.create(hdfsPath),conf,USER_NAME);
		FileStatus[] list = fs.listStatus(new Path(hdfsPath));
		recuersionFile(fs,list);
	}
	*/
	
	/**
	 * 递归得到某个用户下所有的某种类型的文件列表
	 * @param userRoot  用户家目录(和账号相同)
	 * @param type  文件类型
	 * @return  文件列表
	 */
	public List<DiskFileInfo> getFileListByType(String userRoot,String type)  {
		List<DiskFileInfo> fileList=new ArrayList<DiskFileInfo>();
		try{
			String userPath=HDFS_PATH+userRoot;
			FileSystem fs = FileSystem.get(URI.create(userPath),conf,USER_NAME);	
			
			//这个方法可以直接递归遍历
			RemoteIterator<LocatedFileStatus> files = fs.listFiles(new Path(userPath), true);	
			while (files.hasNext()) {
				LocatedFileStatus file=files.next();
				
				String [] fileExtList=typeToFileExtMap.get(type);
				
				for(String ext:fileExtList ){
					if(file.getPath().getName().toLowerCase().endsWith(ext)){
						DiskFileInfo info=new DiskFileInfo(file);
						fileList.add(info);
						break;
					}
				}
			}
			
			return fileList;
		}
		catch(Exception ex){
			throw new RuntimeException(ex);
		}
	}
	
	//文件类型与扩展名的后缀对应的map
	private static Map<String,String[]> typeToFileExtMap;
	
	static{
		typeToFileExtMap=new HashMap<String,String[]>();
		typeToFileExtMap.put("picture",new String[]{".jpeg",".bmp",".gif",".png",".tiff",".psd",".eps",".raw",".pdf",".png",".pxr",".mac",".jpg",".tga",".img",".pcd"});
		typeToFileExtMap.put("txt",new String[]{".txt",".doc",".docx",".wps",".xls",".xlsx",".java",".pdf",".c",".h",".cpp",".xml"});
		typeToFileExtMap.put("avi",new String[]{".wmv",".avi",".wma",".rmvb",".rm",".flash",".mp4",".mid",".3gp",".mpg", ".mp2v",".mpv2",".m1v",".dat",".dsa",".mkv"});
		typeToFileExtMap.put("sound",new String[]{".mp3",".wma",".avi",".rm",".rmvb",".flv",".mpg",".mov",".mkv"});
		typeToFileExtMap.put("gz",new String[]{".gz",".zip",".rar",".z",".bz",".bz2",".7z",".mov",".mkv"});
		typeToFileExtMap.put("torrent",new String[]{".torrent"});
	}

	/**
	 * 根据名称查询文件列表
	 * @param userRoot 当前登录的账号名
	 * @param fileName 文件或文件夹名称,不区分大小写
	 * @return 符合条件的文件列表
	 */
	public List<DiskFileInfo> getFileListByFileName(String userRoot,String fileName) {
		fileName=fileName.toLowerCase();
		List<DiskFileInfo> fileList=new ArrayList<DiskFileInfo>();
		try{
			FileSystem fs = FileSystem.get(URI.create(HDFS_PATH),conf,USER_NAME);	
			
			//这个方法可以直接递归遍历
			RemoteIterator<LocatedFileStatus> files = fs.listFiles(new Path(HDFS_PATH+userRoot), true);	
			while (files.hasNext()) {
				LocatedFileStatus file=files.next();
				if(file.getPath().getName().toLowerCase().contains(fileName)){
					DiskFileInfo info=new DiskFileInfo(file);
					fileList.add(info);
				}
			}
			
			return fileList;
		}
		catch(Exception ex){
			throw new RuntimeException(ex);
		}
	}

}
