package com.platform.apex.util;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.net.URI;
import java.util.Date;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;

public class HadoopFileOperUtil {

	private static Configuration conf = new Configuration();

	private static  FileSystem fs;

	private static DistributedFileSystem hdfs;
	
	private String hdfsPath = "hdfs://localhost:9000";

	private String user = "root";
	
	
	public HadoopFileOperUtil() {
	}

	public HadoopFileOperUtil(String fsDefaultFS) {
		try {
		//	conf.set("fs.defaultFS", fsDefaultFS);
			conf.set("fs.default.name", fsDefaultFS);
			conf.setBoolean("fs.hdfs.impl.disable.cache", true);
			fs = FileSystem.get(new URI(fsDefaultFS), conf, user);
			//fs = FileSystem.get(conf);
			hdfs = (DistributedFileSystem) fs;
		} catch (Exception e) {
			e.printStackTrace();
		}
	}
	
	public DistributedFileSystem gethdfs(){
		return hdfs;
	}

	/**
	 * 列出所有DataNode的名字信息
	 */
	public void listDataNodeInfo() {
		try {
			DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats();
			String[] names = new String[dataNodeStats.length];
			System.out.println("List of all the datanode in the HDFS cluster:");

			for (int i = 0; i < names.length; i++) {
				names[i] = dataNodeStats[i].getHostName();
				System.out.println(names[i]);
			}
			System.out.println(hdfs.getUri().toString());
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	/**
	 * 查看文件是否存在
	 */
	public Boolean checkFileExist(String filePath) {
		try {
			Path a = hdfs.getHomeDirectory();
			System.out.println("main path:" + a.toString());

			Path f = new Path(filePath);
			return fs.exists(f);

		} catch (Exception e) {
			e.printStackTrace();
		}
		return false;
	}

	/**
	 * 删除文件
	 * 
	 */
	public void deleteFile(String filePath) {
		try {
			Path a = hdfs.getHomeDirectory();
//			System.out.println("main path:" + a.toString());

			Path f = new Path(filePath);
			boolean exist = hdfs.exists(f);
//			System.out.println("Whether exist of this file:" + exist);

			// 删除文件
			if (exist) {
				boolean isDeleted = false;
				if (hdfs.isDirectory(f)){
					isDeleted = hdfs.delete(f, true);
				}
				else{
					isDeleted = hdfs.delete(f, false);
				}
				
				if (isDeleted) {
//					System.out.println("Delete success");
				}
			}
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	/**
	 * 创建文件到HDFS系统上
	 */
	public void createFile(String filePath, String fileContent) {
		try {
			// 如果存在则删除已存在文件
			deleteFile(filePath);

			Path f = new Path(filePath);
//			System.out.println("Create and Write :" + f.getName() + " to hdfs");

			FSDataOutputStream os = hdfs.create(f, true);
			Writer out = new OutputStreamWriter(os, "utf-8");// 以UTF-8格式写入文件，不乱码
			out.write(fileContent);
			out.close();
			os.close();
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	/**
	 * 读取本地文件到HDFS系统<br>
	 * 请保证文件格式一直是UTF-8，从本地->HDFS
	 */
	public void copyFileToHDFS(String targetHdfsFilePath, String localFilePath) {
		try {
			// 如果存在则删除已存在文件
			deleteFile(targetHdfsFilePath);

			Path f = new Path(targetHdfsFilePath);
			File file = new File(localFilePath);

			FileInputStream is = new FileInputStream(file);
			InputStreamReader isr = new InputStreamReader(is, "utf-8");
			BufferedReader br = new BufferedReader(isr);

			FSDataOutputStream os = fs.create(f, true);
			Writer out = new OutputStreamWriter(os, "utf-8");

			String str = "";
			while ((str = br.readLine()) != null) {
				out.write(str + "\n");
			}
			br.close();
			isr.close();
			is.close();
			out.close();
			os.close();
			System.out.println("Write content of file " + file.getName() + " to hdfs file " + f.getName() + " success");
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	/**
	 * 取得文件块所在的位置..
	 */
	public void getLocation(String filePath) {
		try {
			Path f = new Path(filePath);
			FileStatus fileStatus = fs.getFileStatus(f);

			BlockLocation[] blkLocations = fs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());
			for (BlockLocation currentLocation : blkLocations) {
				String[] hosts = currentLocation.getHosts();
				for (String host : hosts) {
					System.out.println(host);
				}
			}

			// 取得最后修改时间
			long modifyTime = fileStatus.getModificationTime();
			Date d = new Date(modifyTime);
			System.out.println(d);
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	/**
	 * 读取hdfs中的文件内容
	 */
	public StringBuilder readFileFromHdfs(String filePath) {
		try {

			if (!checkFileExist(filePath)) {
				return null;
			}

			Path f = new Path(filePath);

			FSDataInputStream dis = fs.open(f);
			InputStreamReader isr = new InputStreamReader(dis, "utf-8");
			BufferedReader br = new BufferedReader(isr);

			StringBuilder result = new StringBuilder();
			String str = "";
			while ((str = br.readLine()) != null) {
				result.append(str);
			}
			br.close();
			isr.close();
			dis.close();

			return result;
		} catch (Exception e) {
			e.printStackTrace();
		}
		return null;
	}
}