package pack01_hdfs;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.Test;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

/**
 * @ClassName Test1HDFS
 * @Description JavaLotus
 * @Author spring
 * @Date 2024/11/13 17:29
 * @Version 1.0
 **/
public class Test1HDFS {

    @Test
    public void test1GetFileSystem() throws Exception {

        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), new Configuration());

        System.out.println(fileSystem);
    }

    @Test
    public void test2ListFiles() throws Exception {
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), new Configuration());

        RemoteIterator<LocatedFileStatus> locatedFileStatusRemoteIterator = fileSystem.listFiles(new Path("/"), true);
        while (locatedFileStatusRemoteIterator.hasNext()){
            LocatedFileStatus next = locatedFileStatusRemoteIterator.next();
            System.out.println(next.getPath().toString());
        }
        fileSystem.close();
    }

    @Test
    public void mkdirs() throws  Exception{
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), new Configuration());
        boolean mkdirs = fileSystem.mkdirs(new Path("/hello/mydir/test"));
        fileSystem.close();
    }

    @Test
    public void  getFileToLocal() throws Exception{
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), new Configuration());
        FSDataInputStream inputStream = fileSystem.open(new Path("/timer.txt"));
        FileOutputStream outputStream = new FileOutputStream(new File("e:\\timer.txt"));
        IOUtils.copy(inputStream,outputStream);
        IOUtils.closeQuietly(inputStream);
        IOUtils.closeQuietly(outputStream);
        fileSystem.close();
    }

    @Test
    public void downLoadFile() throws URISyntaxException, IOException, InterruptedException {
        //1:获取FiletSystem对象
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), new Configuration());

        //2:实现文件下载
        fileSystem.copyToLocalFile(new Path("/anaconda-ks.cfg"), new Path("E:\\test"));

        //3:释放资源
        fileSystem.close();
    }

    @Test
    public void putData() throws  Exception{
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), new Configuration());
        fileSystem.copyFromLocalFile(new Path("file:///c:\\install.log"),new Path("/hello/mydir/test"));
        fileSystem.close();
    }


    @Test
    public void mergeFile() throws  Exception{
        //获取分布式文件系统
        FileSystem fileSystem = FileSystem.get(new URI("hdfs://node1:8020"), new Configuration(),"root");
        FSDataOutputStream outputStream = fileSystem.create(new Path("/bigfile.txt"));
        //获取本地文件系统
        LocalFileSystem local = FileSystem.getLocal(new Configuration());
        //通过本地文件系统获取文件列表，为一个集合
        FileStatus[] fileStatuses = local.listStatus(new Path("file:///E:\\input"));
        for (FileStatus fileStatus : fileStatuses) {
            FSDataInputStream inputStream = local.open(fileStatus.getPath());
            IOUtils.copy(inputStream,outputStream);
            IOUtils.closeQuietly(inputStream);
        }
        IOUtils.closeQuietly(outputStream);
        local.close();
        fileSystem.close();
    }




}
