package test.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;

import java.io.IOException;
import java.net.URI;

import org.apache.hadoop.fs.RemoteIterator;
import org.junit.Test;

public class HdfsClient {
//	private String URL = "hdfs://cloud-41:9000";
	private String URL = "hdfs://localhost:8000";//该地址配置在 /etc/hadoop/core-site.xml 的 fs.default.name 中
	private String UESR = "root";
	private String testFile = "testfile.txt";
	private String testDir = "/test";
	
	@Test
	public void fileGenerate() throws IOException {
//		https://www.cnblogs.com/youmiyou/p/15779230.html
		Runtime.getRuntime().exec("cmd /c echo 12345 > " + testFile);
	}
	
	/**
	 * 使用Java API在Hadoop上创建目录
	 *
	 * @throws Exception
	 */
	@Test
	public void creatDirectory() throws Exception {
		Configuration configuration = new Configuration();
		//获取hdfs客户端对象
		URI uri = new URI(URL);
		FileSystem fileSystem = FileSystem.get(uri, configuration, UESR);
		
		Path path = new Path(testDir);
		fileSystem.mkdirs(path);
		//关闭资源
		fileSystem.close();
	}
	
	/**
	 * 使用Java API上传文件到Hadoop
	 *
	 * @throws Exception
	 */
	@Test
	public void fileUpload() throws Exception {
		Configuration configuration = new Configuration();
		//获取hdfs客户端对象
		URI uri = new URI(URL);
		FileSystem fileSystem = FileSystem.get(uri, configuration, UESR);
		Path localPath = new Path(testFile);
		Path hdfsPath = new Path(testDir);
		fileSystem.copyFromLocalFile(localPath, hdfsPath);
		
		//关闭资源
		fileSystem.close();
	}
	
	/**
	 * 使用Java API从Hadoop下载文件到本地
	 *
	 * @throws Exception
	 */
	@Test
	public void fileDownload() throws Exception {
		Configuration configuration = new Configuration();
		//获取hdfs客户端对象
		URI uri = new URI(URL);
		FileSystem fileSystem = FileSystem.get(uri, configuration, UESR);
		Path localPath = new Path("testbak.txt");
		Path hdfsPath = new Path(testDir + "/" + testFile);
		fileSystem.copyToLocalFile(false,hdfsPath, localPath,true);
//关闭资源
		fileSystem.close();
	}
	
	/**
	 * 使用Java API从Hadoop删除文件夹(文件夹可以非空)
	 *
	 * @throws Exception
	 */
	@Test
	public void dirDel() throws Exception {
		Configuration configuration = new Configuration();
		//获取hdfs客户端对象
		URI uri = new URI(URL);
		FileSystem fileSystem = FileSystem.get(uri, configuration, UESR);
		
		Path path = new Path(testDir);
		fileSystem.delete(path, true);
		
		fileSystem.close();
	}
	
	/**
	 * 使用Java API查看Hadoop文件夹详情
	 *
	 * @throws Exception
	 */
	@Test
	public void checkFileDetail() throws Exception {
		Configuration configuration = new Configuration();
		//获取hdfs客户端对象
		URI uri = new URI(URL);
		FileSystem fileSystem = FileSystem.get(uri, configuration, UESR);
		Path path = new Path("/");
		RemoteIterator<LocatedFileStatus> listFiles = fileSystem.listFiles(path, true);
		while (listFiles.hasNext()) {
			LocatedFileStatus status = listFiles.next();
			
			System.out.print(status.getLen());
			System.out.print("\t");
			System.out.print(status.getPermission());
			System.out.print("\t");
			System.out.print(status.getGroup());
			System.out.print("\t");
			System.out.print(status.getOwner());
			System.out.print("\t");
			System.out.println(status.getPath().getName());

//			System.out.println(status);
		}
	}
}
