package cn.itcast.hdfs;

import com.google.inject.internal.cglib.core.$ClassInfo;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.Test;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

//hdfs的测试类
public class HDFSTest {
    @Test
    public void test01() throws IOException {
        //需求1  如何 连接 HDFS的客户端
      /*  Configuration conf = new Configuration();
        conf.set("fs.defaultFS","hdfs://node1:8020");
        FileSystem fileSystem = FileSystem.newInstance(conf);
        System.out.println(fileSystem);*/

        //        //第一种
//        Configuration conf = new Configuration();
//        conf.set("fs.defaultFS","hdfs://node1:8020");
//        FileSystem fileSystem = FileSystem.newInstance(conf);
//        System.out.println(fileSystem);


 /*       Configuration conf = new Configuration();
        conf.set("fs.defaultFS","hdfs://node1:8020");
        FileSystem fileSystem = FileSystem.get(conf);
        System.out.println(fileSystem);*/


        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://node1:8020");
        FileSystem fileSystem = FileSystem.get(conf);
        System.out.println(fileSystem);
    }

    @Test
    public void test02() throws IOException, URISyntaxException {
    //第二种:获取HDFS的Client的方案
      /*  Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), conf);
        System.out.println(fileSystem);*/


      //2
        /*Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.newInstance(new URI("hdfs://node1:8020"), conf);
        System.out.println(fileSystem);*/

        //2

        Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.newInstance(new URI("hdfs://node1:8020"),conf);
        System.out.println(fileSystem);
    }

    @Test
    public void demo03() throws URISyntaxException, IOException, InterruptedException {
        //第三种:
      /*  Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.newInstance(new URI("hdfs://node1:8020"), conf, "root");
        System.out.println(fileSystem);*/


      //3
        /*Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), conf, "root");
        System.out.println(fileSystem);*/

       //3 HDFS 的client的方案
        Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), conf, "root");
        System.out.println(fileSystem);

    }

    @Test
    //需求2 将HDFS的目录下的所有文件 全部都获取到 包括 目录的文件

    public void demo04() throws URISyntaxException, IOException {
      /*  //需求2 将HDFS的目录下的所有文件 全部都获取到 包括 目录的文件
        //1 获取hdfs的客户端
        Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), conf);
        System.out.println(fileSystem);

        //2 执行相关操作:获取 根目录下所有的文件
        RemoteIterator<LocatedFileStatus> listFile = fileSystem.listFiles(new Path("/"), true);

        //获取数据
        while(listFile.hasNext()){
            LocatedFileStatus fileStatus = listFile.next();
            Path path = fileStatus.getPath();
            String Filename = path.getName();
            System.out.println("文件名称为:" + Filename + "文件路径为:" +path);
        }
        //4 释放资源
        fileSystem.close();*/

     /* //需求2
        //1 获取hdfs的客户端
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS","hdfs://node1:8020");
        FileSystem fileSystem = FileSystem.get(conf);
        System.out.println(fileSystem);

        //执行相关操作:获取 根目录下所有的文件
        RemoteIterator<LocatedFileStatus> listFiles = fileSystem.listFiles(new Path("/"),true);
        //3 获取数据
        while(listFiles.hasNext()){
            LocatedFileStatus fileStatus = listFiles.next();
            Path path = fileStatus.getPath();
            String fileName = path.getName();
            System.out.println("文件名称:" + fileName + ": 文件路径为:" +path);
        }
        fileSystem.close();*/

        //hdfs 连接客户端
        Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"),conf );
        System.out.println(fileSystem);

        //2 执行相关操作:获取 根目录下所有文件
        RemoteIterator<LocatedFileStatus> locatedFile = fileSystem.listFiles(new Path("/"),true);
        //3获取数据
        while(locatedFile.hasNext()){
            LocatedFileStatus fileStatus = locatedFile.next();
            Path path = fileStatus.getPath();
            String fileName = path.getName();
            System.out.println("文件名称为:" + fileName +";文件路径为:" + path);
        }
        //4 释放资源
        fileSystem.close();

    }

    @Test
    public void demo05() throws URISyntaxException, IOException, InterruptedException {
//需求3 创建一个文件夹 在/export/input
        //获取hdfs的客户端
        Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), conf);

        //执行相关操作
        fileSystem.mkdirs(new Path("/export/input"));
        //释放资源
        fileSystem.close();

    //获取hdfs的客户端
     /*   Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), conf, "root");

        //执行相关操作
        fileSystem.mkdirs(new Path("/aaaa/bb/1.txt"));
        //释放资源
        fileSystem.close();*/
    }

    @Test
    //需求4 : 下载一个文件
    public void demo06() throws URISyntaxException, IOException, InterruptedException {
      /*  //获取hdfs的客户端
        Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), conf);

        //执行相关操作
        fileSystem.copyToLocalFile(new Path("/aaa/bbb/1.txt"),new Path("D:\\WorkSpace\\hadoop下载!!!"));

        //释放资源
        fileSystem.close();*/


      //4
        //获取hdfs的客户端
        Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), conf, "root");

        //执行相关操作
        fileSystem.copyToLocalFile(new Path("/ppp/1.txt"),new Path("D:\\WorkSpace\\hadoop下载!!!\\2.txt"));

        //释放资源
        fileSystem.close();
    }

    @Test
    //上传文件
    public void  demo07() throws URISyntaxException, IOException {
        //获取hdfs的客户端
        Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), conf);

        //执行相关操作 :上传文件
        fileSystem.copyFromLocalFile(new Path("D:\\WorkSpace\\hadoop下载!!!\\aaa.txt.txt"),new Path("/ppp"));

        //释放资源
        fileSystem.close();
    }

    @Test
    public void demo08() throws URISyntaxException, IOException {
        //需求6  通过java代码, 基于本地IO 和网络IO 实现小文件合并的操作
        //获取HDFS的客户端对象 以及 本地文件系统客户端
        Configuration conf = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), conf);

        LocalFileSystem localFileSystem = FileSystem.getLocal(new Configuration());


        //2 在hdfs中创建一个空文件
        FSDataOutputStream outputStream = fileSystem.create(new Path("/aaa/join2.xml"));

        //3 基于本地文件系统 获取 本地输入流
        //3.1 获取本地目录下所有的小文件

        RemoteIterator<LocatedFileStatus> listFiles = localFileSystem.listFiles(new Path("D:\\WorkSpace\\hadoop下载!!!"), false);

        //3.2 获取其文件的path路径
        while(listFiles.hasNext()){
            Path path = listFiles.next().getPath();
            System.out.println(path);

            //3.3 基于path路径 获取输入流
            FSDataInputStream inputStream = localFileSystem.open(path);

            //4 两个流 对接一下
            int len;
            byte[] b = new byte[1024];
            while(( len = inputStream.read(b)) != -1){
               outputStream.write(b,0,len);
               outputStream.flush();//字节不需要的 心安
            }
            //5 IO 释放资源
            inputStream.close();
        }

        //6 释放资源
        outputStream.close();
        localFileSystem.close();
        fileSystem.close();

    }
}
