package com.hw.hdfs.service;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;

import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;

@Service
public class HDFSService {

	@Autowired
	private Configuration conf;

	private static Logger log = LoggerFactory.getLogger(HDFSService.class);

	private static final int bufferSize = 1024 * 1024 * 64;

	private FileSystem fs = null;

	// 构造方法执行之后进行初始化
	@PostConstruct
	public void init() throws IOException {
		fs = FileSystem.get(conf);
	}

	// 程序退出之前断开连接
	@PreDestroy
	public void close() throws IOException {
		fs.close();
	}

	/**
	 * 创建HDFS文件夹
	 * 
	 * @param path
	 * @return
	 * @throws Exception
	 */
	public boolean mkdir(String path) throws Exception {
		if (StringUtils.isEmpty(path) || null == fs) {
			return false;
		}
		Path hpath = new Path(path);
		if (fs.exists(hpath)) {
			log.info("file exist: " + path);
			return false;
		}

		return fs.mkdirs(hpath);
	}

	// 读取HDFS目录信息
	public List<Map<String, Object>> getPathList(String path) throws FileNotFoundException, IOException {
		List<Map<String, Object>> files = new LinkedList<Map<String, Object>>();

		if (StringUtils.isNotEmpty(path)) {

			Path hpath = new Path(path);
			FileStatus[] filestatus = fs.listStatus(hpath);
			if (fs.exists(hpath) && filestatus.length > 0) {
				for (FileStatus f : filestatus) {

					Map map = new HashMap<String, Object>();
					map.put("filePath", f.getPath());
					map.put("fileStatus", f.toString());
					files.add(map);
				}
			}
		}
		return files;
	}

	/**
	 * 使用IO流将远程文件上传到HDFS
	 * 
	 * @param path
	 * @param file
	 * @throws Exception
	 */
	public boolean uploadFile(String hdfsPath, MultipartFile file) {
		if (StringUtils.isEmpty(hdfsPath) || null == file) {
			return false;
		}
		String fileName = file.getOriginalFilename();

		// 上传时默认当前目录，后面自动拼接文件的目录
		Path newPath = new Path(hdfsPath + "/" + fileName);
		try {
			// 打开一个输出流
			FSDataOutputStream outputStream = fs.create(newPath);
			outputStream.write(file.getBytes());
			outputStream.close();
		} catch (IOException e) {
			log.error(e.getMessage());
			return false;
		}
		return true;
	}

	/**
	 * 使用IO流从远程HDFS下载文件到本地
	 * 
	 * @param path
	 * @param file
	 * @throws Exception
	 */
	public boolean downLoadFile(String hdfsPath, String localPath) {
		
		if (StringUtils.isEmpty(hdfsPath) || StringUtils.isEmpty(localPath)) {
			return false;
		}

		try {
			FSDataInputStream open = fs.open(new Path(hdfsPath));
			FileOutputStream f = new FileOutputStream(new File(localPath));

			byte[] b = new byte[bufferSize];
			int len = 0;
			while ((len = open.read(b)) != -1) {
				f.write(b, 0, len);
			}

			open.close();
			f.close();
			return true;
					
		} catch (IOException e) {
			// TODO 自动生成的 catch 块
			log.error(e.getMessage());
			return false;
		}
	}

	/**
	 * 本地文件存入HDFS
	 * 
	 * @param path
	 * @param uploadPath
	 * @return
	 * @throws Exception
	 */
	public boolean localToHdfs(String localPath, String hdfsPath) {
		if (StringUtils.isEmpty(localPath) || StringUtils.isEmpty(hdfsPath)) {
			return false;
		}

		// 上传路径
		Path clientPath = new Path(localPath);
		// 目标路径
		Path serverPath = new Path(hdfsPath);

		try {
			// 调用文件系统的文件复制方法，第一个参数是否删除原文件true为删除，默认为false
			fs.copyFromLocalFile(false, clientPath, serverPath);
		} catch (IOException e) {
			log.info(e.getMessage());
			return false;
		}
		return true;
	}

	/**
	 * 本地下载HDFS文件
	 * 
	 * @param path
	 * @param downloadPath
	 * @throws Exception
	 */
	public boolean HdfsToLocal(String hdfsPath, String localPath) {
		if (StringUtils.isEmpty(hdfsPath) || StringUtils.isEmpty(localPath)) {
			return false;
		}

		// 下载路径
		Path clientPath = new Path(hdfsPath);
		// 目标路径
		Path serverPath = new Path(localPath);

		try {
			// 调用文件系统的文件复制方法，第一个参数是否删除原文件true为删除，默认为false
			fs.copyToLocalFile(false, clientPath, serverPath);
		} catch (Exception e) {
			log.debug(e.getMessage());
			return false;
		}

		return true;
	}

	/**
	 * 读取HDFS文件内容
	 * 
	 * @param path
	 * @return
	 * @throws Exception
	 */
	public String readFile(String path) throws Exception {
		if (StringUtils.isEmpty(path)) {
			return null;
		}
		if (!fs.exists(new Path(path))) {
			return null;
		}

		// 目标路径
		Path srcPath = new Path(path);
		FSDataInputStream inputStream = null;
		try {
			inputStream = fs.open(srcPath);
			// 防止中文乱码
			BufferedReader reader = new BufferedReader(new InputStreamReader(inputStream));
			String lineTxt = "";
			StringBuffer sb = new StringBuffer();
			while ((lineTxt = reader.readLine()) != null) {
				sb.append(lineTxt);
			}
			return sb.toString();
		} finally {
			inputStream.close();

		}
	}

	/**
	 * 读取HDFS文件列表
	 * 
	 * @param path
	 * @return
	 * @throws Exception
	 */
	public List<Map<String, String>> listFile(String path) throws Exception {
		if (StringUtils.isEmpty(path)) {
			return null;
		}
		if (!fs.exists(new Path(path))) {
			return null;
		}

		// 目标路径
		Path srcPath = new Path(path);
		// 递归找到所有文件
		RemoteIterator<LocatedFileStatus> filesList = fs.listFiles(srcPath, true);
		List<Map<String, String>> returnList = new ArrayList<>();
		while (filesList.hasNext()) {
			LocatedFileStatus next = filesList.next();
			String fileName = next.getPath().getName();
			Path filePath = next.getPath();
			Map<String, String> map = new HashMap<>();
			map.put("fileName", fileName);
			map.put("filePath", filePath.toString());
			returnList.add(map);
		}

		return returnList;
	}

	/**
	 * HDFS重命名文件
	 * 
	 * @param oldName
	 * @param newName
	 * @return
	 * @throws Exception
	 */
	public boolean renameFile(String oldName, String newName) throws Exception {
		if (StringUtils.isEmpty(oldName) || StringUtils.isEmpty(newName)) {
			return false;
		}

		// 原文件目标路径
		Path oldPath = new Path(oldName);
		// 重命名目标路径
		Path newPath = new Path(newName);
		boolean isOk = fs.rename(oldPath, newPath);

		return isOk;
	}

	/**
	 * 删除HDFS文件
	 * 
	 * @param path
	 * @return
	 * @throws Exception
	 */
	public boolean deleteFile(String path) throws Exception {
		if (StringUtils.isEmpty(path)) {
			return false;
		}
		if (!fs.exists(new Path(path))) {
			return false;
		}

		Path srcPath = new Path(path);
//		boolean isOk = fs.deleteOnExit(srcPath);
//		永久性删除指定的文件或目录，只有recursive＝true时，一个非空目录及其内容才会被删除
		boolean isOk = fs.delete(srcPath,true);

		return isOk;
	}

	/**
	 * HDFS文件复制
	 * 
	 * @param sourcePath
	 * @param targetPath
	 * @throws Exception
	 */
	public void copyFile(String sourcePath, String targetPath) throws Exception {
		if (StringUtils.isEmpty(sourcePath) || StringUtils.isEmpty(targetPath)) {
			return;
		}

		// 原始文件路径
		Path oldPath = new Path(sourcePath);
		// 目标路径
		Path newPath = new Path(targetPath);

		FSDataInputStream inputStream = null;
		FSDataOutputStream outputStream = null;
		try {
			inputStream = fs.open(oldPath);
			outputStream = fs.create(newPath);

			IOUtils.copyBytes(inputStream, outputStream, bufferSize, false);
		} finally {
			inputStream.close();
			outputStream.close();

		}
	}

	/**
	 * 打开HDFS上的文件并返回byte数组
	 * 
	 * @param path
	 * @return
	 * @throws Exception
	 */
	public byte[] openFileToBytes(String path) throws Exception {
		if (StringUtils.isEmpty(path)) {
			return null;
		}
		if (!fs.exists(new Path(path))) {
			return null;
		}

		// 目标路径
		Path srcPath = new Path(path);
		try {
			FSDataInputStream inputStream = fs.open(srcPath);
			return IOUtils.readFullyToByteArray(inputStream);
		} finally {

		}
	}

	/**
	 * 打开HDFS上的文件并返回java对象
	 * 
	 * @param path
	 * @return
	 * @throws Exception
	 */
	// public <T extends Object> T openFileToObject(String path, Class<T> clazz)
	// throws Exception {
	// if (StringUtils.isEmpty(path)) {
	// return null;
	// }
	// if (!fs.exists(new Path(path))) {
	// return null;
	// }
	// String jsonStr = readFile(path);
	// return JsonUtil.fromObject(jsonStr, clazz);
	// }

	/**
	 * 获取某个文件在HDFS的集群位置
	 * 
	 * @param path
	 * @return
	 * @throws Exception
	 */
	public BlockLocation[] getFileBlockLocations(String path) throws Exception {
		if (StringUtils.isEmpty(path)) {
			return null;
		}
		if (!fs.exists(new Path(path))) {
			return null;
		}
		// 目标路径
		Path srcPath = new Path(path);
		FileStatus fileStatus = fs.getFileStatus(srcPath);
		return fs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());
	}

}
