package com.yutel.hdfs;

import java.io.IOException;
import java.io.InputStream;
import java.text.SimpleDateFormat;
import java.util.Date;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.io.IOUtils;

/**
 * 对文件的一些基础操作
 * 
 * @author Administrator
 *
 */
public class OperaHDFS {
	public static void main(String args[]) throws IOException {
		// 测试 创建新文件
		 byte[] contents = "hello world 世界你好\n--created by eclipse\n".getBytes();
		 createFile("/eclipse/first.txt", contents); //或 createFile("hdfs://192.168.137.56:9000/eclipse/first.txt", contents);

		// 测试 上传本地文件
		// uploadFile("D:\\c.txt", "/eclipse/");

		// 测试重命名
		// rename("/eclipse/c.txt", "/eclipse/cc.txt");

		// 测试删除文件
		// delete("/eclipse/cc.txt"); //使用相对路径
		// delete("/eclipse2"); //删除目录

		// 测试新建目录
		// mkdir("/eclipse2/");

		// 测试读取文件
		// readFile("/eclipse/first.txt");

		// 测试文件是否存在
		// fileIsExists("/eclipse/first.txt");

//		getNodeMsgHdfs();

	}

	// 1、创建新文件（直接生成指定路径下的first.txt，即：/eclipse/first.txt）
	public static void createFile(String dst, byte[] contents)
			throws IOException {
		System.setProperty("HADOOP_USER_NAME", "hadoop");
		Configuration conf = new Configuration();
		System.out.println("-----------:" + conf);
		conf.set("fs.defaultFS", "hdfs://192.168.1.243:9000"); // master
		FileSystem fs = FileSystem.get(conf);
		Path dstPath = new Path(dst); // 目标路径
		// 打开一个输出流
		FSDataOutputStream outputStream = fs.create(dstPath);
		outputStream.write(contents);
		outputStream.close();
		fs.close();
		System.out.println("文件创建成功！");
	}

	// 2、上传本地文件
	public static void uploadFile(String src, String dst) throws IOException {
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.137.56:9000"); // master
		FileSystem fs = FileSystem.get(conf);
		Path srcPath = new Path(src); // 源路径
		Path dstPath = new Path(dst); // 目标路径
		// 调用文件系统的文件复制函数,前面参数是指是否删除原文件，true为删除，默认为false
		fs.copyFromLocalFile(false, srcPath, dstPath);

		// 打印文件路径
		System.out.println("Upload to " + conf.get("fs.default.name"));
		// 列出指定路径下的所有文件
		System.out.println("------------list files------------" + "\n");
		FileStatus[] fileStatus = fs.listStatus(dstPath);
		for (FileStatus file : fileStatus) {
			System.out.println(file.getPath() + "--" + file.getGroup() + "--"
					+ file.getBlockSize() + "--" + file.getLen() + "--");
		}
		fs.close();
	}

	// 文件重命名
	public static void rename(String oldName, String newName)
			throws IOException {
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.137.56:9000");
		FileSystem fs = FileSystem.get(conf);
		Path oldPath = new Path(oldName);
		Path newPath = new Path(newName);
		boolean isOk = fs.rename(oldPath, newPath);
		if (isOk) {
			System.out.println("rename ok");
		} else {
			System.out.println("rename failure");
		}
		fs.close();
	}

	// 删除文件
	public static void delete(String filePath) throws IOException {
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.137.56:9000");
		FileSystem fs = FileSystem.get(conf);
		Path path = new Path(filePath);
		// boolean isOk = fs.delete(path);
		// 或者
		boolean isOk = fs.deleteOnExit(path);
		if (isOk) {
			System.out.println("delete ok!");
		} else {
			System.out.println("delete failture");
		}
		fs.close();
	}

	// 创建目录
	public static void mkdir(String path) throws IOException {
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.137.56:9000");
		FileSystem fs = FileSystem.get(conf);
		Path srcPath = new Path(path);
		boolean isOk = fs.mkdirs(srcPath);
		if (isOk) {
			System.out.println("create dir ok!");
		} else {
			System.out.println("create dir failture");
		}
	}

	// 读取文件的内容
	public static void readFile(String path) throws IOException {
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.137.56:9000");
		FileSystem fs = FileSystem.get(conf);
		Path srcPath = new Path(path);

		// 文件的元数据
		FileStatus fileStatus = fs.getFileStatus(srcPath);
		Long lastModifyTime = fileStatus.getModificationTime(); // 文件的最后修改时间
		Date date = new Date(lastModifyTime);
		SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
		System.out.println("最后修改时间：" + format.format(date));
		System.out.println("文件路径：" + fileStatus.getPath());
		System.out.println("文件所有者：" + fileStatus.getOwner());
		System.out.println(
				fileStatus.getGroup() + "--" + fileStatus.getBlockSize() + "--"
						+ fileStatus.getLen() + "--");

		// 查看某个文件在HDFS集群的位置
		BlockLocation[] bloLocations = fs.getFileBlockLocations(fileStatus, 0,
				fileStatus.getLen());
		for (int i = 0; i < bloLocations.length; i++) {
			System.out.println("block_" + i + "_location:"
					+ bloLocations[i].getHosts()[0]);
		}

		// 输出文件内容
		InputStream in = null;
		try {
			in = fs.open(srcPath);
			IOUtils.copyBytes(in, System.out, 4096, false); // 复制到标准输出流
		} catch (Exception e) {
			e.printStackTrace();
		} finally {
			IOUtils.closeStream(in);
		}
		fs.close();
	}

	// 检查文件是否存在
	public static void fileIsExists(String path) throws IOException {
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.137.56:9000");
		FileSystem fs = FileSystem.get(conf);
		Path srcPath = new Path(path);
		boolean isExists = fs.exists(srcPath);
		if (isExists) {
			System.out.println("文件存在");
		} else {
			System.out.println("文件不存在");
		}
	}

	// 获取集群上结点的信息
	public static void getNodeMsgHdfs() throws IOException {
		System.setProperty("HADOOP_USER_NAME", "hadoop");
		Configuration conf = new Configuration();
		conf.set("fs.defaultFS", "hdfs://192.168.1.243:9000");
		FileSystem fs = FileSystem.get(conf);
		DistributedFileSystem distributedFileSystem = (DistributedFileSystem) fs;
		DatanodeInfo[] dataInfos = distributedFileSystem.getDataNodeStats();
		for (int j = 0; j < dataInfos.length; j++) {
			System.out.println(
					"DataNode_" + j + "_Name:" + dataInfos[j].getHostName()
							+ "--->" + dataInfos[j].getDatanodeReport() + "-->"
							+ dataInfos[j].getDfsUsedPercent() + "-->"
							+ dataInfos[j].getLevel());
		}
	}

}
