package com.atguigu.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

public class HDFSClient {

    private FileSystem fs;

    /**
     * 初始化，获取HDFSClient对象
     *
     * @throws URISyntaxException
     * @throws IOException
     * @throws InterruptedException
     */
    @Before
    public void init() throws URISyntaxException, IOException, InterruptedException {
        URI uri = new URI("hdfs://175.178.160.34:8020");
        String user = "hadoop";
        Configuration configuration = new Configuration();
        fs = FileSystem.get(uri, configuration, user);
    }

    /**
     * 关闭资源
     *
     * @throws IOException
     */
    @After
    public void close() throws IOException {
        fs.close();
    }

    /**
     * 创建目录
     *
     * @throws URISyntaxException
     * @throws IOException
     * @throws InterruptedException
     */
    @Test
    public void testmkdir() throws URISyntaxException, IOException, InterruptedException {

        fs.mkdirs(new Path("/input1"));

    }

    @Test
    public void testfile() throws IOException {
        fs.copyFromLocalFile(false,false,new Path("E:\\Hadoop\\WordCount.txt"),new Path("/input"));
    }

    @Test
    public void testdel(){
        try {
            fs.delete(new Path("/ouput"),true);
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }


}
