package com.example;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.IOException;

public class HDFSExample {
    private FileSystem fs;
    @Before
    public void init() throws IOException {
        // 创建Hadoop配置对象
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://master");

        // 创建HDFS文件系统对象
        fs = FileSystem.get(conf);

    }
    @After
    public void destory() throws IOException {
        fs.close();
    }
    @Test
    public void listDir() throws IOException {
        Path dirPath = new Path("/dataexchange/");
        RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator = fs.listFiles(dirPath, true);
        if (locatedFileStatusRemoteIterator.hasNext()) {
            LocatedFileStatus var = locatedFileStatusRemoteIterator.next();
            System.out.printf("file:"+var.getPath().toString()+"\tsize:"+var.getLen());
        }
    }
    @Test
    public void createDir() throws IOException {
        Path dirPath = new Path("/dataexchange/javacreate/");
        boolean mkdirs = fs.mkdirs(dirPath);
        System.out.println("文件夹创建结果："+mkdirs);
    }
    @Test
    public void uploadFile() throws IOException {
        Path srcPath = new Path("D:\\tmp.log");
        Path destPath = new Path("/dataexchange/javacreate/");
        fs.copyFromLocalFile(srcPath, destPath);
    }
    @Test
    public void downloadFile() throws IOException {
        Path srcPath = new Path("/dataexchange/javacreate/tmp.log");
        Path destPath = new Path("D:\\tmp-fromhdfs.log");
        fs.copyToLocalFile(srcPath, destPath);
    }
    @Test
    public void deleteFile() throws IOException {
        Path srcPath = new Path("/dataexchange/javacreate");
        fs.delete(srcPath, true); // true表示递归删除目录
    }
}