package DFS_API;

import java.io.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import DFS_Server.*;

/***
 * HDFS 文件系统 API
 * @author wuhuayang@gmail.com
 * 
 */
public class HDFSUtil {
	 /**
     * 建立DFS文件系统
     *  
     *  
     * @param ip
     * @param port
     */  
	public synchronized static FileSystem getFileSystem(ConfReader conf) {
		FileSystem hdfs = null;
		String url = ConfReader.loadconfig().getConnString();
		Configuration config = new Configuration();
		config.set("fs.default.name", url);
		try {
			hdfs = FileSystem.get(config);
		} catch (Exception e) {
			Log.logger.error("getFileSystem failed :"+e.toString());
		}
		return hdfs;
	}

	/**
     * 创建新文件
     *  
     * @param hdfs 
     * @param path 
     * @param data 
     */  
	public synchronized static void createHDFSFile(FileSystem hdfs, String newFilepath,  
            String data) {  
			        Path dstPath = new Path(newFilepath);  
			        try {  
			             FSDataOutputStream os = hdfs.create(dstPath);  
			             os.writeUTF(data);  
			             os.close();
			             Log.logger.info("write data to " + newFilepath + " successed. ");  
		         } catch (Exception e) {  
		        	 	Log.logger.error("write data to " + newFilepath + " failed."+e.toString());  
			         }  
			    }  
	
	 /** 
	  * 创建新目录
	  *  
	  * @param hdfs 
	  * @param dirName 
	  */  
	public synchronized static void mkdirs(FileSystem hdfs, String dirName) {  
	        Path src = new Path(dirName);  
	        boolean succ;  
	         try {  
	             succ = hdfs.mkdirs(src);  
	             if (succ) {  
	            	 Log.logger.info("create directory " + dirName + " successed. ");  
	             } else {  
	            	 Log.logger.error("create directory " + dirName + " failed. ");  
	            }  
	         } catch (Exception e) {  
	        	 Log.logger.error("create directory " + dirName + " failed: "+e.toString());  
	         }  
	     }  
	
	/**
     * 删除文件或文件夹
     *  
     * @param hdfs 
     * @param dstFile
     */  
	public synchronized static void deleteHDFSFile(FileSystem hdfs, String dstFile) {  
			        Path dstPath = new Path(dstFile); 
			        try {  
			        	boolean isDeleted = hdfs.delete(dstPath, true);
			        	if (isDeleted){
			        		Log.logger.info("delete HDFS file " + dstPath + " successed. "); 
			        	}else{
			        		 Log.logger.error("delete HDFS file " + dstPath + " failed. "); 
			        	}
		         } catch (Exception e) {  
		        	 Log.logger.error("delete HDFS file " + dstPath + " failed :"+e.toString());
			         }  
			    }  
	
	 /**
     * 上传文件或文件夹  
     * @param hdfs 
     * @param srcFile 
     * @param dstFile 
     */     
	public synchronized static void uploadFileToHDFS(FileSystem hdfs, String srcFile,
			String dstFile) throws IOException {
		Path srcPath = new Path(srcFile);
		Path dstPath = new Path(dstFile);
	    try {  
		 hdfs.copyFromLocalFile(false, true, srcPath, dstPath); 
		 Log.logger.info("upload " + srcFile + " to  " + dstFile + " successed. ");  
	 } catch (Exception e) {  
		 Log.logger.error("upload " + srcFile + " to  " + dstFile + " failed :"+e.toString());   
			         }  
	}    
	
	/** 
	* 下载文件或文件夹
	*  
	* @param hdfs 
	* @param localPath 
	* @param remotePath 
	*/  
	public synchronized static void download(FileSystem fs, String localPath,  
	String remotePath) {    
	        Path dstPath = new Path(remotePath);  
	        Path srcPath = new Path(localPath);  
	        try {  
	            fs.copyToLocalFile(false, dstPath, srcPath);  
	            Log.logger.info("download from " + remotePath + " to  " + localPath  
	                    + " successed. ");  
	        } catch (Exception e) {  
	        	Log.logger.error("download from " + remotePath + " to  " + localPath + " failed :"+e.toString());  
	        }  
	    }  
	
	public synchronized static void listFile(FileSystem hdfs, String path) {
		Path dst;
		if (null == path || "".equals(path)) {
			dst = new Path(path);
		} else {
			dst = new Path(path);
		}
		try {
			String relativePath = "";
			FileStatus[] fList = hdfs.listStatus(dst);
			for (FileStatus f : fList) {
				if (null != f) {
					relativePath = new StringBuffer().append(
							f.getPath().getParent()).append("/").append(
							f.getPath().getName()).toString();
					if (f.isDir()) {
						listFile(hdfs, relativePath);
					} else {
						Log.logger.info(convertSize(f.getLen()) + "/t/t"
								+ relativePath);
					}
				}
			}
		} catch (Exception e) {
			Log.logger.error("list files of " + path + " failed :"+e.toString());
		} finally {
		}
	}
	
	/**
	 * 文件大小
	 * @param size
	 * @return
	 */
	public synchronized static String convertSize(long size) {
		String result = String.valueOf(size);
		if (size < 1024 * 1024) {
			result = String.valueOf(size / 1024) + " KB";
		} else if (size >= 1024 * 1024 && size < 1024 * 1024 * 1024) {
			result = String.valueOf(size / 1024 / 1024) + " MB";
		} else if (size >= 1024 * 1024 * 1024) {
			result = String.valueOf(size / 1024 / 1024 / 1024) + " GB";
		} else {
			result = result + " B";
		}
		return result;
	}
}
