package com.ctsi.ftp.util;

import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.PathFilter;

public class HdfsUtil {
	static Configuration conf = new Configuration();
	static FileSystem hdfs;
	static {
        int conf_size = Integer.valueOf(ConfigProperties.getProperty("hdfs.conf.size"));

		String path = "./conf/";
		conf.addResource(new Path("core-site.xml"));
		conf.addResource(new Path("hdfs-site.xml"));

        for (int i = 0; i < conf_size; i++) {

            String[] property = ConfigProperties.getProperty("hdfs.conf"+i).split("\\|");
            conf.set(property[0],property[1]);

        }
//        conf.set("fs.defaultFS", "hdfs://mycluster");
//        conf.set("dfs.nameservices", "mycluster");
//        conf.set("dfs.ha.namenodes.mycluster", "nn1,nn2");
//        conf.set("dfs.namenode.rpc-address.mycluster.nn1", "172.16.0.1:9000");
//        conf.set("dfs.namenode.rpc-address.mycluster.nn2", "172.16.0.2:9000");
//        conf.set("dfs.client.failover.proxy.provider.mycluster","org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
//        conf.set("fs.hdfs.impl","org.apache.hadoop.hdfs.DistributedFileSystem");
        //conf.addResource(new Path(path + "mapred-site.xml"));
//        conf.set("","");
//        System.setProperty("HADOOP_USER_NAME", ConfigProperties.getProperty("HADOOP_USER_NAME"));
		try {
			hdfs = FileSystem.get(conf);
//            hdfs = FileSystem.get(URI.create("hdfs://mycluster/user/hadoop"), conf);
		} catch (IOException e) {
			e.printStackTrace();
		}
	}
	public static boolean makeHdfsDir(Path hdfsDir) throws IOException {
        hdfs = FileSystem.get(URI.create(hdfsDir.toString()), conf);
		boolean result = hdfs.mkdirs(hdfsDir);
		//  hdfs.close();
		return result;
	}
	// 重命名HDFS文件
	public static boolean renameHdfsFile(Path hdfsSrcFile,Path hdfsDestFile) throws Exception {
		//System.out.print(hdfsDestFile.getName());
		return hdfs.rename(hdfsSrcFile, hdfsDestFile);
	}
	/**
	 * 列出指定路径下的文件（非递归）
	 */
	public static RemoteIterator<LocatedFileStatus> listHdfsFile(String hdfsPath) throws Exception{
		return hdfs.listFiles(new Path(hdfsPath), false);
	}
	/**
	 * 列出指定路径下的文件夹和文件
	 */

	public static FileStatus[] listHdfsDir(String hdfsPath) throws Exception{
		return hdfs.listStatus(new Path(hdfsPath));
	}
	public static FileStatus[] listHdfsDir(Path hdfsPath) throws Exception{
		return hdfs.listStatus(hdfsPath);
	}

    public static FileStatus[] listHdfsDir(String hdfsPath, PathFilter filter) throws Exception{
        return hdfs.listStatus(new Path(hdfsPath), filter);
    }
    public static FileStatus[] listHdfsDir(Path hdfsPath, PathFilter filter) throws Exception{
        return hdfs.listStatus(hdfsPath,filter);
    }
	// 查看HDFS文件是否存在


	public static boolean existsHdfsPath(Path hdfsPath) throws Exception {
		return hdfs.exists(hdfsPath);
	}


	public static boolean isHdfsDirectory(Path hdfsPath) throws IOException{
		return hdfs.isDirectory(hdfsPath);
	}
	public static boolean isHdfsFile(Path hdfsPath) throws IOException{
		return hdfs.isFile(hdfsPath);
	}
    //判断是否文件夹为空，要求输入的路径是文件夹
    public static boolean isHdfsDirEmpty(Path hdfsPath) throws IOException{
        return hdfs.listStatus(hdfsPath).length==0?true:false;
    }
    //删除文件夹,要求传入的文件是文件夹
    public static boolean deleteDir(Path hdfsPath) throws IOException{
        return hdfs.delete(hdfsPath, true);
    }
	public static byte[] ReadHDFSFileByte(Path hdfsFilePath) throws Exception
	{
		// check if the file exists
		if ( hdfs.exists(hdfsFilePath) )
		{
			FSDataInputStream is = hdfs.open(hdfsFilePath);
			// get the file info to create the buffer
			FileStatus stat = hdfs.getFileStatus(hdfsFilePath);
			// create the buffer
			byte[] buffer = new byte[Integer.parseInt(String.valueOf(stat.getLen()))];
			is.readFully(0, buffer);

			is.close();
			hdfs.close();

			return buffer;
		}
		else
		{
			throw new Exception("the file is not found .");
		}
	}
	public static FSDataInputStream ReadHDFSFileStream(Path hdfsFilePath) throws Exception
	{
		// check if the file exists
		if ( hdfs.exists(hdfsFilePath) )
		{
			FSDataInputStream is = hdfs.open(hdfsFilePath);
			return is;
		}
		else
		{
			throw new Exception("the file is not found .");
		}
	}
}
