/**   
* @Title: HdfsApiTest.java 
* @Package com.hadoop 
* @Description: TODO(你猜猜看...) 2
* @author jjw
* @date 2018年7月25日 下午6:01:27 
* @version V1.0   
*/
package com.hadoop.hdfs.api;

import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;

import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Text;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

/**
 * @ClassName: HdfsApiTest
 * @Description:hdfs api测试   参考网址：https://blog.csdn.net/menghuannvxia/article/details/44651061
 * @author jjw
 * @date 2018年7月25日 下午6:01:27
 */
public class HdfsApiTest {

	Configuration conf = null;
	FileSystem fs = null;

	@Before
	public void conn() throws IOException {

		conf = new Configuration(true);// true 使用配置文件；false 手动配置
		fs = FileSystem.get(conf);// 文件系统对象
	}

	@After
	public void close() throws IOException {
		fs.close();
	}

	// 查看ls、创建目录mkdir、上传put、下载、删除del
	@Test
	public void ls() throws Exception {
		Path path = new Path("/user");
		FileStatus[] status = fs.listStatus(path);
		for (FileStatus s : status) {
			System.out.println(s.getPath());
			System.out.println(s.getAccessTime());
			System.out.println(s.getLen());
			System.out.println(s.getBlockSize());
		}
	}

	/**
	* @Title: mkdir 
	* @Description:创建文件夹
	* @author jjw
	* @param @throws Exception    设定文件 
	* @return void    返回类型 
	* @throws
	 */
	@Test
	public void mkdir() throws Exception {
		Path path = new Path("/user/dir");
		Boolean flag = fs.mkdirs(path);
		if (flag) {
			System.out.println("mkdir /user/dir success~~");
		}
	}

	/**
	* @Title: uploadFile 
	* @Description:上传文件
	* @author jjw
	* @param @throws Exception    设定文件 
	* @return void    返回类型 
	* @throws
	 */
	@Test
	public void uploadFile() throws Exception {
		Path path = new Path("/user/targetFile");
		FSDataOutputStream out = fs.create(path);// hdfs创建path路径文件
		// IOUtils.copyBytes(new FileInputStream(new File("E:/HDFS/aaa.txt")),out, conf);//将aaa.txt文件上传到输出流中
		InputStream input = new BufferedInputStream(new FileInputStream(new File("E:/HDFS/a.txt")));
		IOUtils.copyBytes(input, out, conf, true);
	}

	// 获取文件block信息,读取信息
	@Test
	public void blockLocation() throws IOException {
		Path input = new Path("/user/root/hadooptest");
		FileStatus inFile = fs.getFileLinkStatus(input);
		BlockLocation[] bls = fs.getFileBlockLocations(inFile, 0, inFile.getLen());

		for (BlockLocation b : bls) {
			System.out.println(b);// 0,18,localhost.localdomain
			// 0:偏移量 18：长度 localhost.localdomain：副本节点信息
		}

		FSDataInputStream open = fs.open(input);
		byte char1 = open.readByte();
		System.out.println((char) char1);

		open.seek(6);// 设置输入流的偏移量,即从6号位置开始读取
		System.out.println((char) open.readByte());
		open.seek(0);// 设置输入流的偏移量,即从6号位置开始读取
		System.out.println((char) open.readByte());
		open.seek(1);// 设置输入流的偏移量,即从6号位置开始读取
		System.out.println((char) open.readByte());
		open.seek(7);// 设置输入流的偏移量,即从6号位置开始读取
		System.out.println((char) open.readByte());
	}

	/**
	* @Title: download 
	* @Description:下载文件到本地
	* @author jjw
	* @param @throws IOException    设定文件 
	* @return void    返回类型 
	* @throws
	 */
	@Test
	public void download() throws IOException {
		Path path = new Path("/user/root/hadooptest");
		FSDataInputStream input = fs.open(path);
		//IOUtils.copyBytes(input, System.out, conf);// 输出到控制台
		OutputStream outputStream = new FileOutputStream("E:/HDFS/b.txt");
		IOUtils.copyBytes(input, outputStream, conf);// 下载文件  !!!!!!!!!注意：一个流不能使用多次，因为copyBytes实现关闭了流。否则会报...Stream closed
	}

	/**
	* @Title: putsmall 
	* @Description:上传小文件，某个目录下多个文件
	* @author jjw
	* @param @throws Exception    设定文件 
	* @return void    返回类型 
	* @throws
	 */
	@Test
	public void putsmall() throws Exception {
		Path path = new Path("/user/bigfile");
		@SuppressWarnings("deprecation")
		SequenceFile.Writer write = new SequenceFile.Writer(fs, conf, path, Text.class, Text.class);

		File[] files = new File("E:/HDFS/").listFiles();
		for (File f : files) {
			write.append(new Text(f.getName()), new Text(FileUtils.readFileToString(f)));
		}
		write.close();
	}

	@Test
	public void getsmall() throws Exception {
		Path path = new Path("/user/bigfile");
		@SuppressWarnings({ "deprecation", "resource" })
		SequenceFile.Reader reader = new SequenceFile.Reader(fs, path, conf);

		Text key = new Text();
		Text val = new Text();

		while (reader.next(key, val)) {
			System.out.println("111");
			System.out.println(key.toString());
			System.out.println(val.toString());
		}
	}

	/**
	* @Title: del 
	* @Description:删除文件 
	* @author jjw
	* @param @throws Exception    设定文件 
	* @return void    返回类型 
	* @throws
	 */
	@Test
	public void del() throws Exception {
		Path path = new Path("/user/bigfile");
		fs.delete(path, true);
	}
}
