package com.honey.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.Test;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

public class HdfsClient {

    //获取文件服务器
    public FileSystem getHDFS(String hdfs_url, String user) throws URISyntaxException, IOException, InterruptedException {
        // 1 创建配置信息对象
        // new Configuration();的时候，它就会去加载jar包中的hdfs-default.xml
        // 然后再加载classpath下的hdfs-site.xml
        Configuration configuration = new Configuration();

        // 直接配置访问集群的路径和访问集群的用户名称
        // 2 设置参数
        // 参数优先级： 1、客户端代码中设置的值  2、classpath下的用户自定义配置文件 3、然后是服务器的默认配置
//        configuration.set("dfs.replication", "2");
        FileSystem fileSystem = FileSystem.get(new URI(hdfs_url),configuration,user);

        return fileSystem;
    }

    //上传文件到hdfs
    @Test
    public void upfile() throws URISyntaxException, IOException, InterruptedException {

//        Configuration configuration = new Configuration();
//        FileSystem fileSystem = FileSystem.get(new URI("hdfs://localhost:9000"),configuration,"root");
        FileSystem fileSystem = getHDFS("hdfs://localhost:9000","root");
        //执行上传文件到hdfs
        fileSystem.copyFromLocalFile(new Path("/Users/honey-shawn/workspace/hadoopWorkspace/MapReduce/src/com/honey/reduce_self/flowcount/phone.txt"),new Path("/phone.txt"));

        //关闭文件系统
        fileSystem.close();
        System.out.println("success!");
    }

    @Test
    public void putFileToHDFS() throws Exception{
//        Configuration configuration = new Configuration();
//        FileSystem fileSystem = FileSystem.get(new URI("hdfs://localhost:9000"),configuration, "root");
        FileSystem fileSystem = getHDFS("hdfs://localhost:9000","root");

        // 3 创建要上传文件所在的本地路径
        Path src = new Path("/Users/honey-shawn/workspace/Git_person/hadoop/src/main/resources/phone.txt");

        // 4 创建要上传到hdfs的目标路径
        Path dst = new Path("hdfs://localhost:9000/user/root/input/phone.txt");

        // 5 拷贝文件
        fileSystem.copyFromLocalFile(src, dst);
        fileSystem.close();
        System.out.println("success!");
    }

    //下载文件
    @Test
    public void getFileFromHDFS() throws Exception{

        // 1 创建配置信息对象
//        Configuration configuration = new Configuration();
//        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop101:9000"),configuration, "honey");
        FileSystem fileSystem = getHDFS("hdfs://hadoop101:9000","honey");

        //	fs.copyToLocalFile(new Path("hdfs://hadoop101:9000/user/honey/hello.txt"), new Path("d:/docker.txt"));
        // boolean delSrc 指是否将原文件删除
        // Path src 指要下载的文件路径
        // Path dst 指将文件下载到的路径
        // boolean useRawLocalFileSystem 是否开启文件效验
        // 2 下载文件
        fileSystem.copyToLocalFile(false, new Path("hdfs://hadoop101:9000/user/honey/docker.txt"), new Path("e:/dockercopy.txt"), true);
        fileSystem.close();
        System.out.println("success!");
    }

    // HDFS目录创建
    @Test
    public void mkdirAtHDFS() throws Exception{
        // 1 创建配置信息对象
//        Configuration configuration = new Configuration();
//        FileSystem fileSystem = FileSystem.get(new URI("hdfs://localhost:9000"),configuration, "root");
        FileSystem fileSystem = getHDFS("hdfs://localhost:9000","root");

        //2 创建目录
        fileSystem.mkdirs(new Path("hdfs://localhost:9000/user/root/input"));
        System.out.println("success!");
    }

    //HDFS文件夹删除
    @Test
    public void deleteAtHDFS() throws Exception{
        // 1 创建配置信息对象
//        Configuration configuration = new Configuration();
//        FileSystem fileSystem = FileSystem.get(new URI("hdfs://localhost:9000"),configuration, "honey");
        FileSystem fileSystem = getHDFS("hdfs://localhost:9000","honey");

        //2 删除文件夹 ，如果是非空文件夹，参数2是否递归删除，true递归
        fileSystem.delete(new Path("hdfs://localhost:9000/user/root/output"), true);
        System.out.println("success!");
    }

    //文件名更改
    @Test
    public void renameAtHDFS() throws Exception{
        // 1 创建配置信息对象
//        Configuration configuration = new Configuration();
//        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop101:9000"),configuration, "honey");
        FileSystem fileSystem = getHDFS("hdfs://hadoop101:9000","honey");

        //2 重命名文件或文件夹
        fileSystem.rename(new Path("hdfs://hadoop101:9000/user/honey/docker.txt"), new Path("hdfs://hadoop101:9000/user/honey/docker-re.txt"));
        System.out.println("success!");
    }

    //HDFS文件详情查看
    @Test
    public void readListFiles() throws Exception {
        // 创建配置信息对象
//        Configuration configuration = new Configuration();
//        FileSystem fileSystem = FileSystem.get(new URI("hdfs://localhost:9000"),configuration, "root");
        FileSystem fileSystem = getHDFS("hdfs://localhost:9000","root");

        // 思考：为什么返回迭代器，而不是List之类的容器
        RemoteIterator<LocatedFileStatus> listFiles = fileSystem.listFiles(new Path("/"), true);

        while (listFiles.hasNext()) {
            LocatedFileStatus fileStatus = listFiles.next();

            System.out.println(fileStatus.getPath().getName());
            System.out.println(fileStatus.getBlockSize());
            System.out.println(fileStatus.getPermission());
            System.out.println(fileStatus.getLen());

            BlockLocation[] blockLocations = fileStatus.getBlockLocations();

            for (BlockLocation bl : blockLocations) {

                System.out.println("block-offset:" + bl.getOffset());

                String[] hosts = bl.getHosts();

                for (String host : hosts) {
                    System.out.println(host);
                }
            }

            System.out.println("--------------分割线--------------");
        }
    }

    //HDFS文件和文件夹判断
    @Test
    public void findAtHDFS() throws Exception, IllegalArgumentException, IOException{

        // 1 创建配置信息对象
//        Configuration configuration = new Configuration();
//        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop101:9000"),configuration, "honey");
        FileSystem fileSystem = getHDFS("hdfs://hadoop101:9000","honey");

        // 2 获取查询路径下的文件状态信息
        FileStatus[] listStatus = fileSystem.listStatus(new Path("/"));

        // 3 遍历所有文件状态
        for (FileStatus status : listStatus) {
            if (status.isFile()) {
                System.out.println("f--" + status.getPath().getName());
            } else {
                System.out.println("d--" + status.getPath().getName());
            }
        }
    }

}
