package com.zhl.hadoop.hdfs;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.File;
import java.io.FileOutputStream;
import java.net.URI;

/**
 * @program: demos
 * @description:
 * @author: 刘振华
 * @create: 2020-10-26 12:32
 **/
public class FileOperate{
	public String hdfsuri = "hdfs://C111908191:8020";
//	public String hdfsuri = "hdfs://zhlcluster";
	public FileSystem fs;

	String path = "/zhltest";

//	public static FileSystem getFileSystem1()throws Exception{
//		Configuration conf = new Configuration();
//		conf.set("fs.defaultFS",hdfsuri);
//		return FileSystem.get(conf);
//	}

	@Before
	public void init(){
		fs = getFileSystem();
	}

	@After
	public void after() throws Exception{
		fs.close();
	}

	@Test
	public void run() throws Exception{
//		listAllFiles();
		mkdir("/zhltest/join/");
//		createFile("/zhltest/sort");

//		download("/output/sort/part-r-00000","D:\\课程\\2.大数据\\测试数据\\out\\sort.out");
//		download("/output/partition/part-r-00001","D:\\课程\\2.大数据\\测试数据\\partition2");

		upload("D:\\课程\\2.大数据\\测试数据\\in\\join\\products.txt","/zhltest/join/prods.txt");
//		mergeSmallFiles();


//		deletePath();


	}

	private FileSystem getFileSystem(){
		try {
//			return FileSystem.get(new URI(hdfsuri),new Configuration(),"zhl");
			return FileSystem.newInstance(new URI(hdfsuri), new Configuration(),"zhl");
		}catch (Exception e){
			e.printStackTrace();
		}
		return null;
	}
//	public static FileSystem getFileSystem3()throws Exception{
//		Configuration conf = new Configuration();
//		conf.set("fs.defaultFS",hdfsuri);
//		return FileSystem.newInstance(conf);
//	}

	//删除文件夹
	public void deletePath()throws Exception{
		String dpath = "/doc_20201026";
		boolean delete = fs.delete(new Path(dpath), true);
		System.out.println(delete);
	}

	//小文件合并上传
	public void mergeSmallFiles()throws Exception{
		String dest = path+"/bigFile";
		String localPath = "file:///D:\\课程\\2.大数据\\配置文件\\hadoop";
		FSDataOutputStream distributeStream = fs.create(new Path(dest));
		LocalFileSystem local = FileSystem.getLocal(new Configuration());
		FileStatus[] fileStatuses = local.listStatus(new Path(localPath));
		for(FileStatus tfs : fileStatuses){
			FSDataInputStream localStream = local.open(tfs.getPath());
			IOUtils.copy(localStream,distributeStream);
			IOUtils.closeQuietly(localStream);
		}
		IOUtils.closeQuietly(distributeStream);
	}

	//上传文件
	public void upload(String local,String serverFile)throws Exception{
		fs.copyFromLocalFile(new Path(local),new Path(serverFile));
	}


	public void download(String sourceFile,String local)throws Exception{
		fs.copyToLocalFile(new Path(sourceFile),new Path(local));
	}

	//下载文件到本地
	public void download1()throws Exception{
		String sourceFile = "/zhltest/a";
		String dest = "D:\\课程\\2.大数据\\测试数据\\a";
		FSDataInputStream sourceStream = fs.open(new Path(sourceFile));
		FileOutputStream destStream = new FileOutputStream(new File(dest));
		IOUtils.copy(sourceStream,destStream);
		IOUtils.closeQuietly(destStream);
		IOUtils.closeQuietly(sourceStream);
	}


	//创建文件夹
	public void mkdir(String dir)throws Exception{
		//递归创建文件夹
		boolean mkdirs = fs.mkdirs(new Path(dir));
		System.out.println(mkdirs);
	}

	//创建文件
	public void createFile(String filePath)throws Exception{
		fs.create(new Path(filePath));
	}


	//遍历所有文件
	public void listAllFiles()throws Exception{
		RemoteIterator<LocatedFileStatus> iteFs = fs.listFiles(new Path(path), true);

		while (iteFs.hasNext()){
			LocatedFileStatus next = iteFs.next();
			System.out.println(next.getPath().toString() + "===" + next.getBlockLocations().length);
		}
//		LocatedFileStatus next = iteFs.next();
//		System.out.println(next.getPath());
//		System.out.println(next.getBlockLocations().length);
	}
}
