package com.atguigu.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Test;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

public class HdfsClient {
    @Test
    public void testMkdirs() throws IOException, URISyntaxException, InterruptedException {
        // 1.获取文件系统
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop102:8020"), new Configuration(), "atguigu");
        // 2.创建目录
        fileSystem.mkdirs(new Path("/weiranyi/hdfs"));
        // 3.关闭资源
        fileSystem.close();
    }
    // 上传文件
    @Test
    public void testUpload() throws IOException, URISyntaxException, InterruptedException {
        Configuration configuration = new Configuration();
        configuration.set("dfs.replication","2");
        // 1.获取文件系统
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop102:8020"), configuration, "atguigu");
        // 2.上传文件
        fileSystem.copyFromLocalFile(false,true, new Path("D:\\BaiduNetdiskDownload\\02.资料\\07_测试数据\\wcinput\\hello.txt"), new Path("/weiranyi/hello.txt"));
        // 3.关闭资源
        fileSystem.close();
    }
    // 下载文件
    @Test
    public void testDownload() throws IOException, URISyntaxException, InterruptedException {
        // 1.获取文件系统
        Configuration configuration = new Configuration();
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://hadoop102:8020"), configuration, "atguigu");
        // 2.执行下载操作
        fileSystem.copyToLocalFile(false,  new Path("/weiranyi/hdfs/hello.txt"), new Path("D:\\BaiduNetdiskDownload\\02.资料\\07_测试数据\\wcinput\\hello.txt"), true);
        // 3.关闭资源
        fileSystem.close();
    }
}
