package cn.xiao.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Before;
import org.junit.Test;

import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;

public class HadoopMain {

    private FileSystem fs;

    private String HDFSUri = "hdfs://192.168.95.128:9000";

    private String uploadFilePath = "G://Java_Envir//uploadFile.txt";

    private String hdfsFilePath = "/hello.log";

    private String winOutPath = "G://Java_Envir";

    @Before
    public void init() throws URISyntaxException, IOException, InterruptedException {

        // 工具类
        fs = FileSystem.get(new URI(HDFSUri), new Configuration(), "root");
    }

    /**
     * 本地文件，上传到hdfs上
     */
    @Test
    public void testUpload() throws IOException {

        // 1. 读取本地文件，返回输入流
        InputStream in = new FileInputStream(uploadFilePath);

        // 2. 在HDFS上，创建一个文件，返回输出流
        OutputStream out = fs.create(new Path("/testUploadFile.txt"));

        // 输入 -> 输出
        IOUtils.copyBytes(in, out, 4096, true);


    }

    @Test
    public void testDownload() throws IOException {

        // 下载
        fs.copyToLocalFile(new Path(hdfsFilePath), new Path(winOutPath + "//helloTest.log"));

    }

    // 删除文件
    @Test
    public void deleteFile() throws IOException {

        fs.delete(new Path("/testUploadFile.txt"), false);

    }


    /**
     * 创建文件夹
     */
    @Test
    public void mkdir() throws IOException {
        fs.mkdirs(new Path("/testDir"));
    }


    public static void main(String[] args) throws URISyntaxException, IOException {


//        InputStream in = fs.open(new Path("/hello.log"));
//
//        OutputStream out = new FileOutputStream("G://Java_Envir//hello.log");
//
//        // 为什么给4096， 默认4096，   true 关闭流
//        IOUtils.copyBytes(in, out, 4096, true);

    }

}
