package org.example.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

/**
 * @author Heshan On 2021/5/23
 */
public class HdfsClientDemo {
    FileSystem fs = null;
    @Before
    public void init() throws IOException {
        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://localhost:9000");
//        FileSystem fs = FileSystem.get(new URI("hdfs://localhost:9000"), conf, "hessin");
        fs = FileSystem.get(conf);
    }
    @After
    public void release() throws IOException {
        fs.close();
    }
    @Test
    public void testMkdirs() throws URISyntaxException, IOException, InterruptedException {
        fs.mkdirs(new Path("/api_test2"));
    }

    @Test
    public void testPut() throws IOException {
        fs.copyFromLocalFile(new Path("src/main/resources/hdfs-site.xml"), new Path("/api_test2/"));
    }

    @Test
    public void readSeek() {
        // 2 打开输入流,读取数据输出到控制台
        FSDataInputStream in = null;
        try{
            in= fs.open(new Path("/test.txt"));
            IOUtils.copyBytes(in, System.out, 4096, false);
            //从头再次读取
            in.seek(0);
            IOUtils.copyBytes(in, System.out, 4096, false);
        } catch (IOException e) {
            e.printStackTrace();
        } finally {
            IOUtils.closeStream(in);
        }
    }
}
