package com.bigdata;

import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.util.StringUtils;

/**
 * java接口对Hadoop进行操作 1.配置环境变量：HADOOP_HOME HADOOP_USER_NAME Created by shirukai
 * on 2017/11/2.
 */

// 告诉junit spring配置文件
public class TestHadoop {
	private Logger logger = LoggerFactory.getLogger(this.getClass());

	/**
	 * 连接Hadoop
	 */
	public FileSystem getFileSystem() {
		String nameNodeUrl = "hdfs://192.168.50.161:8020/apps/hbase/data";
		String nameNodeName = "fs.defaultFS";
		FileSystem fs = null;
		Configuration configuration = new Configuration();
		try {
			configuration.set(nameNodeName, nameNodeUrl);
			fs = FileSystem.get(configuration);
			logger.info("连接成功：Path={}", fs.getFileStatus(new Path("/")));
		} catch (Exception e) {
			logger.error(e.getMessage(), e);
		}
		return fs;
	}

	/**
	 * 创建目录
	 *
	 * @throws Exception
	 *             异常
	 */
	@Test
	public void mkdirFolder() throws Exception {
		FileSystem fs = getFileSystem();
		String folderName = "/input";
		fs.mkdirs(new Path(folderName));
		// List<String> list = ["item"];
		// String str = list[0];
	}

	/**
	 * 上传文件到Hadoop
	 *
	 * @throws Exception
	 *             异常
	 */
	@Test
	public void uploadFile() throws Exception {
		FileSystem fs = getFileSystem();
		// 定义本地上传的文件路径
		String localFilePath = "D://test//user-dao//";
		// 定义上传文件
		String fileName = "pom.xml";
		// 定义要上传到的文件夹
		String uploadFolder = "/input/";

		InputStream in = new FileInputStream(localFilePath + fileName);
		OutputStream out = fs.create(new Path(uploadFolder + fileName));

		IOUtils.copyBytes(in, out, 4096, true);

	}

	/**
	 * 从Hadoop获取文件
	 *
	 * @throws Exception
	 *             异常
	 */
	@Test
	public void getFileFromHadoop() throws Exception {
		FileSystem fs = getFileSystem();
		// 定义要下载路径
		String downloadPath = "/input/";
		// 定义要下载的文件名
		String downloadFileName = "pom.xml";
		// 定义要保存的路径
		String savePath = "D://test//" + downloadFileName;

		InputStream in = fs.open(new Path(downloadPath + downloadFileName));
		OutputStream out = new FileOutputStream(savePath);
		IOUtils.copyBytes(in, out, 4096, true);
	}

	/**
	 * 删除文件 delete(path,boolean) boolean如果为true，将进行递归删除，子目录及文件都会删除 false 只删除当前
	 *
	 * @throws Exception
	 */
	@Test
	public void deleteFile() throws Exception {
		FileSystem fs = getFileSystem();
		// 要删除的文件路径
		String deleteFilePath = "/input/pom.xml";
		Boolean deleteResult = fs.delete(new Path(deleteFilePath), true);
		logger.info("删除文件：={}", deleteResult);
	}

	/**
	 * 遍历指定目录下所有的文件
	 * 
	 * @throws Exception
	 *             异常
	 */
	@Test
	public void getAllFile() throws Exception {
		FileSystem fs = getFileSystem();
		// 定义要获取的目录
		String getPath = "/";
		// 后边加上PathFilter参数，可以根据filter获取目录下的文件
		FileStatus[] statuses = fs.listStatus(new Path(getPath));
		for (FileStatus file : statuses) {
			logger.info("fileName=", file.getPath().getName());
		}
	}

	/**
	 * 文件上传至 HDFS
	 * 
	 * @param delSrc
	 * @param overwrite
	 * @param srcFile
	 * @param destPath
	 */
	public void copyFileToHDFS(boolean delSrc, boolean overwrite, String srcFile, String destPath) {
		// 源文件路径是Linux下的路径，如果在 windows
		// 下测试，需要改写为Windows下的路径，比如D://hadoop/djt/weibo.txt
		Path srcPath = new Path(srcFile);

		// 目的路径
		String hdfsUri = "";
		if (StringUtils.isNotBlank(hdfsUri)) {
			destPath = hdfsUri + destPath;
		}
		Path dstPath = new Path(destPath);

		// 实现文件上传
		try {
			// 获取FileSystem对象
			FileSystem fs = getFileSystem();
			// String dstPath = fs.getUri() + destPath;
			fs.copyFromLocalFile(srcPath, dstPath);
			fs.copyFromLocalFile(delSrc, overwrite, srcPath, dstPath);
			// 释放资源
			fs.close();
		} catch (IOException e) {
			e.printStackTrace();
			logger.error("", e);
		}
	}

	/**
	 * 获取 HDFS 集群节点信息
	 * 
	 * @return DatanodeInfo[]
	 */
	@Test
	public void getHDFSNodes() {
		// 获取所有节点

		try {
			// 返回FileSystem对象
			FileSystem fs = getFileSystem();
			// 获取分布式文件系统
			DistributedFileSystem hdfs = (DistributedFileSystem) fs;

			DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats();
			for (DatanodeInfo datanodeInfo : dataNodeStats) {
				System.out.println(datanodeInfo.toString());
			}
		} catch (IOException e) {
			logger.error("", e);
		}
	}

	@Test
	public void otherOption() throws Exception {
		FileSystem fs = getFileSystem();
	}

}