package org.example;
/*
    利用java工具的基础编程
 */

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

public class demo_1 {

    /*
        测试文件上传
     */
    @Test
    public void upload() throws URISyntaxException {
        /*
        创建一个Configuration对象时，其构造方法会默认加载hadoop中的两个配置文件，
        分别是hdfs-site.xml以及core-site.xml，这两个文件中会有访问hdfs所需的参数值，
        主要是fs.default.name，指定了hdfs的地址，有了这个地址客户端就可以通过这个地址访问hdfs了。
        即可理解为configuration就是hadoop中的配置信息
         */
        Configuration conf = new Configuration();
        //hdfs服务端地址,启动hadoop关闭防火墙后可以连接
        URI uri = new URI("hdfs://192.168.10.132:8020");
        // 本地文件地址，绝对路径
        String localDir = "file:///E:/JavaHadoop/upload/demo1.txt";
        // 目标文件地址<绝对地址>
        String hdfsDir = "/text/";
        try {
            Path localPath = new Path(localDir);
            Path hdfsPath = new Path(hdfsDir);
            /*
            Hadoop有1个抽象的文件系统概念，HDFS只是其中一个实现。
            该抽象文件系统由抽象类org.apache.hadoop.fs.FileSystem 定义，
            该类继承了org.apache.hadoop.conf.Configured类，并实现了java.io.Closeable接口。
             该抽象类类提供了丰富的方法用于对文件系统进行操作，比如创建目录、删除文件、重命名等。
             */
            FileSystem fs = FileSystem.get(uri, conf, "root");
            fs.copyFromLocalFile(localPath, hdfsPath);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /*
        测试文件下载
     */
    @Test
    public void download() {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://192.168.10.132:8020");
        //IDEA在windows系统中的，所以为本地文件
        String locaDir = "file:///E:/JavaHadoop/download";
        // 下载的目标文件在hadoop中的文件路径
        String hdfsDir = "/learnDemo/a";
        try {
            Path localPath = new Path(locaDir);
            Path hdfsPath = new Path(hdfsDir);
            FileSystem fs = FileSystem.newInstance(conf);
            fs.copyToLocalFile(hdfsPath, localPath);
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /*
    创建文件夹
     */
    @Test
    public void mkdir() throws URISyntaxException {
        Configuration conf = new Configuration();
        URI uri = new URI("hdfs://192.168.10.132:8020");
        String hdfsDir = "/mkdir_text";
        Path path = new Path(hdfsDir);
        try {
            FileSystem fs = FileSystem.get(uri, conf, "root");
            fs.mkdirs(path);
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    /*
     查看文件状态
     */
    @Test
    public void lisFiles() {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://192.168.10.132:8020");
        try {
            FileSystem fs = FileSystem.newInstance(conf);
            // listFiles递归查询所有目录和文件 Path为自己的
            RemoteIterator<LocatedFileStatus> iterator = fs.listFiles(new Path("/"), true);
            while (iterator.hasNext()) {
                LocatedFileStatus next = iterator.next();
                System.out.println(next.getPath());
            }
            System.out.println("=======");
            // listStatus只查询当前目录下的目录和文件
            FileStatus[] fileStatuses = fs.listStatus(new Path("/"));
            for (int i = 0; i < fileStatuses.length; i++) {
                FileStatus fileStatus = fileStatuses[i];
                System.out.println(fileStatus.getPath());
            }
        } catch (IOException e) {
            e.printStackTrace();
        }
    }

    /*
    删除文件
     */
    @Test
    public void delete() throws URISyntaxException {
        Configuration conf = new Configuration();
        URI uri = new URI("hdfs://192.168.10.132:8020");
        String hdfsDir = "/mkdir_text";
        try {
            FileSystem fs = FileSystem.get(uri, conf, "root");
            Path hdfsPath = new Path(hdfsDir);
            fs.delete(hdfsPath, true);
        } catch (IOException | InterruptedException e) {
            e.printStackTrace();
        }
    }


    // 采用流拷贝发的方式上传，下载
     /*
     上传
      */
    @Test
    public void uploadByIOStream() throws URISyntaxException {
        Configuration conf = new Configuration();
        URI uri = new URI("hdfs://192.168.10.132:8020");
        try {
            FileSystem fs = FileSystem.get(uri, conf, "root");
            FileInputStream is = new FileInputStream("E:\\JavaHadoop\\upload\\demo_3.txt");
            // 创建一个路径（目录）
            FSDataOutputStream os = fs.create(new Path("/input/demo2.txt"));
            // 流拷贝
            IOUtils.copyBytes(is, os, 1024);
            // 关闭资源
            is.close();
            os.close();
            fs.close();
        } catch (IOException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        }
    }

    /*
    下载
     */
    @Test
    public void downloadByIOStream() throws URISyntaxException, IOException, InterruptedException {
        Configuration conf = new Configuration();
        URI uri = new URI("hdfs://192.168.10.132:8020");
        FileSystem fs = FileSystem.get(uri, conf, "root");
        FSDataInputStream is = fs.open(new Path("/input/demo1.txt"));//输入流
        FileOutputStream os = new FileOutputStream("E:\\JavaHadoop\\download\\1.txt");
        IOUtils.copyBytes(is, os, 1024);
        is.close();
        os.close();
        fs.close();
    }

    /*
    批量下载
    利用了拷贝流下载
     */
    @Test
    public void downloadFire() throws IOException,
            InterruptedException, URISyntaxException {
       Configuration conf = new Configuration();
       URI uri = new URI("hdfs://192.168.10.132:8020");
       FileSystem fs = FileSystem.get(uri, conf, "root");
       try {
           FileStatus[] fileStatuses = fs.listStatus(new Path("/learnDemo"));
           for (int i = 0; i < fileStatuses.length; i++){
               // 查询出hdfs
               FileStatus fileStatus = fileStatuses[i];
               System.out.println(fileStatus.getPath());
               FSDataInputStream is = fs.open(new Path(String.valueOf(fileStatus.getPath())));
               FileOutputStream os = new FileOutputStream("E:\\JavaHadoop\\download\\"+ String.valueOf(i) +".txt");
               IOUtils.copyBytes(is, os, 2048);
               is.close();
               os.close();
           }

           fs.close();
       } catch (IOException e) {
           e.printStackTrace();
       }

    }

    @Test
    /*
    批量上传
     */
    public void uploadFire() throws URISyntaxException{
        Configuration conf = new Configuration();
        URI uri = new URI("hdfs://192.168.10.132:8020");
        String hdfsDir = "/input";
        try {
            // 上传hdfs目标文件地址路径
            Path hdfsPath = new Path(hdfsDir);
            // 本地上传目标文件夹路径
            File folder = new File("E://JavaHadoop//upload//");
            // 获取文件名存入到列表
            File[] listOfFiles = folder.listFiles();
            // for 循环进行遍历上传
            for (int i = 0; i < listOfFiles.length; i++) {
                // 获得该文件夹下的每一个文件
                File file = listOfFiles[i];
                Path localPath = new Path(String.valueOf(file));
                /*
                若需要进行判断文件类型是否为.txt文件
                 if (file.isFile() && file.getName().endsWith(".txt")) {
                                String content = FileUtils.readFileToString(file);
                            }
                 */
                FileSystem fs = FileSystem.get(uri, conf,"root");
                fs.copyFromLocalFile(localPath,hdfsPath);

            }


        } catch (Exception e){
            e.printStackTrace();
        }
    }








}











