package codemperor.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.util.Progressable;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;

public class HDFSApp {
    private Configuration configuration = null;
    private FileSystem fileSystem = null;

    /**
     * 配置
     *
     * @throws IOException
     * @throws URISyntaxException
     */
    @Before
    public void config() throws IOException, URISyntaxException {
        System.out.println("hdfs config before");
        configuration = new Configuration();
        //这里可以设置副本系数
        configuration.set("dfs.replication", "1");
        //这里针对外网链接测试，设置datanode的hostname通信
        configuration.set("dfs.client.use.datanode.hostname", "true");
        //端口号在 /hadoop/etc/hadoop/core-site.xml中配置
        URI uri = new URI("hdfs://master:9000");
        fileSystem = FileSystem.get(uri, configuration);
    }

    @Test
    public void mkdir() throws IOException {
        Path path = new Path("/hdfsapi/test");
        boolean result = fileSystem.mkdirs(path);
        System.out.println(result);
    }

    /**
     * 删除文件
     *
     * @throws IOException
     */
    @Test
    public void removeFile() throws IOException {
        Path path = new Path("/hdfsapi");
        //递归删除 -r
        boolean result = fileSystem.delete(path, true);
        System.out.println(result);
    }

    /**
     * 查看文件
     *
     * @throws IOException
     */
    @Test
    public void textFile() throws IOException {
        Path path = new Path("/wordcount/output/part-r-00000");
        FSDataInputStream fsDataInputStream = fileSystem.open(path);
        IOUtils.copyBytes(fsDataInputStream, System.out, 1024);
    }

    /**
     * 写入文件
     *
     * @throws Exception
     */
    @Test
    public void writeFile() throws Exception {
        Path path = new Path("/hdfsapi/a.txt");
        FSDataOutputStream out = fileSystem.create(path);
        out.writeUTF("Hello wahaha ");
        out.flush();
        out.close();
    }

    /**
     * 重命名
     *
     * @throws Exception
     */
    @Test
    public void renameFile() throws Exception {
        Path oldPath = new Path("/hdfsapi/a.txt");
        Path newPath = new Path("/hdfsapi/rename.txt");
        boolean result = fileSystem.rename(oldPath, newPath);
        System.out.println(result);
    }

    /**
     * 上传本地文件到hdfs，带进度条
     *
     * @throws Exception
     */
    @Test
    public void copyLocalFileToHadoop() throws Exception {

        //copy本地一个文件到hdfs，简单的
//        Path oldPath = new Path("/etc/demo/a.txt");
//        Path newPath = new Path("/hdfsapi/rename.txt");
//        fileSystem.copyFromLocalFile(oldPath, newPath);

        // 下面上传文件有进度条
        final int[] i = {0};
        FSDataOutputStream out = fileSystem.create(new Path("/hdfsapi/wuxi-app.zip"), new Progressable() {
            public void progress() {
                i[0]++;
                System.out.println("上传中: " + i[0]);
            }
        });

        InputStream in = new BufferedInputStream(new FileInputStream(new File("/Users/zhang.lu/git/wuxi-app.zip")));

        IOUtils.copyBytes(in, out, 4096);
    }

    @After
    public void hdfsDown() {
        configuration = null;
        fileSystem = null;
    }
}
