package top.igotcha.hdfs;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.jupiter.api.Test;

import java.io.File;
import java.io.FileOutputStream;
import java.net.URI;

/**
 * @time 2021/1/10
 * @auth Gotcha
 * @describe
 */
public class HdfsFileUpAndDown {
    //文件下载方法一
    @Test
    public void getFileToLocal01()throws  Exception{
        //1.获取FileSystem对象
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://192.168.127.110:8020"), new Configuration());
        //2.获取HDFS文件输入流
        FSDataInputStream open = fileSystem.open(new Path("/test01/hello.sh"));
        //3.获取本地文件输出流
        FileOutputStream fileOutputStream = new FileOutputStream(new File("..\\hello01.sh"));
        //4.实现文件复制
        IOUtils.copy(open,fileOutputStream);
        IOUtils.closeQuietly(open);
        IOUtils.closeQuietly(fileOutputStream);
        //5.关闭fileSystem
        fileSystem.close();
    }

    //文件下载方法二
    @Test
    public void getFileToLocal02()throws  Exception{
        //1.获取FileSystem对象
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://192.168.127.110:8020"), new Configuration());
        //2.调用方法直接实现文件下载
        fileSystem.copyToLocalFile(new Path("/test01/hello.sh"),new Path("..\\hello02.sh"));
        //3.关闭fileSystem
        fileSystem.close();
    }

    //文件上传方法
    @Test
    public void putFileTOHDFS() throws  Exception{
        //1.获取FileSystem对象
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://192.168.127.110:8020"), new Configuration());
        //2.调用方法直接实现文件下载
        fileSystem.copyFromLocalFile(new Path("..\\hello02.sh"),new Path("/test03/hello02.sh"));
        //3.关闭fileSystem
        fileSystem.close();
    }

}
