package tools;

import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.Job;

public class HDFSTools {

	public static  List<String> getFilePathList(Job job, Configuration conf, String filePath){
		//去掉filePath以'/'结尾，否则会导致读入的文件不完整
		String endStr = filePath.substring(filePath.length()-1, filePath.length());
		if(endStr.indexOf("/")!=-1){
			filePath = filePath.substring(0, filePath.length()-1);
		}
		List<String> filePathList = new ArrayList<String>();
		try {
			FileSystem hdfs = FileSystem.get(conf);
			FileStatus[] fileList = hdfs.listStatus(new Path(filePath));
			for (int i = 0; i < fileList.length; i++) {
				String fileName = fileList[i].getPath().getName();
				String fullFilePath = filePath + "/" + fileName;
				filePathList.add(fullFilePath);
			}
			hdfs.close();
		} catch (Exception e) {
			e.printStackTrace();
		}

		return filePathList;
	}

	public static List<String> getHDFSFileToList(Job job, Configuration conf,String filePath) {
		List<String> lineList = new ArrayList<String>();
		try {
			List<String> filePathList = HDFSTools.getFilePathList(job, conf, filePath);
			BufferedReader br = null;
			for (String path : filePathList) {
				br = makeBufferReader(path);
				String strLineBuff = "";
				while ((strLineBuff = br.readLine()) != null) {
					strLineBuff = strLineBuff.trim();
					if (strLineBuff != null && !"".equals(strLineBuff)) {
						lineList.add(strLineBuff);
					}
				}
			}
			killBufferReader(br);
		} catch (Exception e) {
			e.printStackTrace();
		}
		return lineList;
	}

	public static void killBufferReader(BufferedReader reader) {
		try {
			if (reader != null) {
				reader.close();
			}
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}

	public static BufferedReader makeBufferReader(String filePath)
			throws Exception {
		// 打开读文件
		String charset = "utf-8";
		return makeBufferReader(filePath, charset);
	}

	public static BufferedReader makeBufferReader(String filePath, String charset)throws Exception {
		// 打开读文件
		Configuration config = new Configuration();
		FileSystem hdfs = FileSystem.get(config);
		Path path = new Path(filePath);
		InputStreamReader isr = null;
		BufferedReader fileBR = null;

		try {
			isr = new InputStreamReader(hdfs.open(path), charset);
			fileBR = new BufferedReader(isr);
			return fileBR;
		} catch (Exception e) {
			e.printStackTrace();
			if (fileBR != null) {
				try {
					fileBR.close();
				} catch (IOException e1) {
					e1.printStackTrace();
				}
			}
			if (isr != null) {
				try {
					isr.close();
				} catch (IOException e1) {
					e1.printStackTrace();
				}
			}
			return null;
		}
	}

}
