import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.util.Arrays;

/**
 * @author dinghao
 * @create 2021-09-18 16:24
 * @message
 */
public class HDFSTest {
    // 1获取文件的配置信息
    Configuration configuration = new Configuration();
    // 配置在集群上运行
    FileSystem fs;
    @Before // 配置在集群上运行
    public void testbegin() {
        try {
            fs = FileSystem.get(new URI("hdfs://192.168.1.193:9000"), configuration, "work");
        } catch (Exception e) {
            e.getMessage();
        }
    }

    @After // 关闭FileSystem
    public void testlast() {
        try {
            fs.close();
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    @Test // 在hdfs上创建文件目录
    public void testMakdir() throws Exception {// 创建目录
        fs.mkdirs(new Path("/dinghao/dingding"));
    }

    @Test // 拷贝文件到本地
    public void testCopyFromLocalFile() throws Exception {
        fs.copyToLocalFile(new Path("/"), new Path("D:/source"));
    }

    @Test // 将本地文件或目录复制到hdfs上的指定目录
    public void testCopyFromLocal() throws Exception {
        fs.copyFromLocalFile(new Path("D:/source/bigdata"), new Path("/dinghao"));
        fs.copyFromLocalFile(new Path("d:/source/bigdata/test"), new Path("/dinghao"));
    }

    @Test // hdfs文件夹或文件删除
    public void testDelete() throws Exception {
        // 针对目录=》true:递归删除；false：非递归删除
        // 针对文件=》true和false都可以删除
        fs.delete(new Path("/dinghao/bigdata"), true);
        fs.delete(new Path("/dinghao/test"), true);
    }

    @Test // 文件或文件夹重命名
    public void testrename() throws Exception {
        fs.rename(new Path("/dinghao/test"), new Path("/dinghao/bangzhang1"));
        fs.rename(new Path("/dinghao/bigdata"), new Path("/dinghao/big"));
    }

    @Test // hdfs文件详情查看
    public void testListFiles() throws Exception {
// 1、递归获取所有的文件 true
        RemoteIterator<LocatedFileStatus> listFiles = fs.listFiles(new Path("/"), true);
        int i = 0;
        while (listFiles.hasNext()) {
            System.out.println("=================" + i++ + "================");
            LocatedFileStatus fileStatus = listFiles.next();
            // 获取文件名称
            System.out.println("文件名称：" + fileStatus.getPath().getName());
            // 获取长度
            System.out.println("文件大小：" + fileStatus.getLen());
            // 获取权限
            System.out.println("获取权限" + fileStatus.getPermission());
            // 获取分组
            System.out.println("获取分组" + fileStatus.getGroup());
            // 获取块信息
            BlockLocation[] blockLocations = fileStatus.getBlockLocations();
            for (BlockLocation blockLocation : blockLocations) {
                // 获取块存储的主机节点
                System.out.println("存储节点：" + Arrays.toString(blockLocation.getHosts()));
            }
        }
    }

    //递归获取至今文件夹下文件目录的树结构
    public void getFileMessage(Path path,int i) throws Exception {
        FileStatus[] fileStatus = fs.listStatus(path);
        if (fileStatus.length != 0) {
            i++;
            for (FileStatus fileStatus2 : fileStatus) {
                for(int t=0;t<i-1;t++) {
                    System.out.print("\t");
                }
                if (fileStatus2.isFile()) {
                    System.out.println("|-文件：" + fileStatus2.getPath().getName());
                } else {
                    System.out.println("|-目录：" + fileStatus2.getPath().getName());
                    getFileMessage(fileStatus2.getPath(),i);
                }
            }
        }
    }

    @Test//测试递归获取文件树结构
    public void testgetmessage()throws Exception{
        getFileMessage(new Path("/user"),0);
    }

    @Test//hdfs文件上传
    public void putFileToHdfs()throws Exception{
        //1、创建输入流
        FileInputStream input = new FileInputStream(new File("D:/source/hadoop.md"));
        //2、获取输出流
        FSDataOutputStream output = fs.create(new Path("/hadoop.md"));
        //3、流对拷
        IOUtils.copyBytes(input, output, configuration);
        //4、关闭资源
        IOUtils.closeStream(input);
        IOUtils.closeStream(output);
    }

    @Test//下载文件
    public void getFileHdfs()throws Exception{
        //1、创建输入流
        FSDataInputStream inputStream = fs.open(new Path("/hadoop.md"));
        //2、获取输出流
        FileOutputStream outputStream = new FileOutputStream(new File("D:/source/test.md"));
        //3、流对拷
        IOUtils.copyBytes(inputStream, outputStream, configuration);
        //4、关闭资源
        IOUtils.closeStream(inputStream);
        IOUtils.closeStream(outputStream);
    }

    @Test//定位文件读取-获取第1块文件
    public void getfile1()throws Exception{
        //1、获取输入流
        FSDataInputStream inputStream = fs.open(new Path("/hadoop.md"));
        //2、获取输出流
        FileOutputStream outputStream = new FileOutputStream(new File("d:/source/test1.md"));
        //3、流拷贝
        byte[] bs = new byte[1024];
        for(int i=0;i<5;i++) {
            inputStream.read(bs);
            outputStream.write(bs);
        }
        //4、关闭资源
        IOUtils.closeStream(inputStream);
        IOUtils.closeStream(outputStream);
    }
    @Test//定位文件读取-获取第2块文件
    public void getfile2()throws Exception{
        //1、获取输入流
        FSDataInputStream inputStream = fs.open(new Path("/hadoop.md"));
        //2、获取输出流
        FileOutputStream outputStream = new FileOutputStream(new File("d:/source/test2.md"));
        inputStream.seek(1024*5);
        //3、流拷贝
        IOUtils.copyBytes(inputStream, outputStream, configuration);
        //4、关闭资源
        IOUtils.closeStream(inputStream);
        IOUtils.closeStream(outputStream);
    }
}
