package com.larry.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;

/**
 * @ProjectName HDFS
 * @Author larry
 * @Date 14:06 2022/3/22
 * @Description TODO
 **/
public class HDFSIO {


    private FileSystem fs;
    private Configuration conf;

    @Before
    public void init() throws URISyntaxException, IOException, InterruptedException {
        conf = new Configuration();
        fs = FileSystem.get(new URI("hdfs://hadoop102:9000"), conf, "atguigu");
    }

    @After
    public void close() throws IOException {
        fs.close();
    }

    /**
     * IO操作上传
     */
    @Test
    public void putFileToHDFS() throws IOException {

        //输入流
        FileInputStream fis = new FileInputStream(new File("C:\\Users\\larry\\IdeaProjects\\HDFS\\src\\main\\resources\\haha.txt"));

        //输出流
        FSDataOutputStream fos = fs.create(new Path("/haha4.txt"));

        //拷贝
        IOUtils.copyBytes(fis,fos,conf);

        IOUtils.closeStream(fos);
        IOUtils.closeStream(fis);
    }

    /**
     * 文件下载IO
     */
    @Test
    public void getFileFromHDFS() throws IOException {

        FSDataInputStream fis = fs.open(new Path("/haha4.txt"));
        FileOutputStream fos = new FileOutputStream(new File("C:\\Users\\larry\\IdeaProjects\\HDFS\\src\\main\\resources\\haha5.txt"));
        IOUtils.copyBytes(fis,fos,conf);

        IOUtils.closeStream(fos);
        IOUtils.closeStream(fis);
    }

    /**
     * 定位文件读取
     */
    @Test
    public void readFileSeek1() throws IOException {
        FSDataInputStream fis = fs.open(new Path("/hadoop-2.7.2.tar.gz"));
        FileOutputStream fos = new FileOutputStream(new File("C:\\Users\\larry\\IdeaProjects\\HDFS\\src\\main\\resources\\hadoop-2.7.2.tar.gz.p1"));

        byte[] bytes = new byte[1024];
        for (int i = 0; i < 1024 * 128; i++) {
            fis.read(bytes);
            fos.write(bytes);
        }

        IOUtils.closeStream(fos);
        IOUtils.closeStream(fis);
    }

    /**
     * 定位文件读取
     */
    @Test
    public void readFileSeek2() throws Exception {
        FSDataInputStream fis = fs.open(new Path("/hadoop-2.7.2.tar.gz"));
        //设置指定读取的起点
        fis.seek(1024 * 1024 * 128);
        FileOutputStream fos = new FileOutputStream(new File("C:\\Users\\larry\\IdeaProjects\\HDFS\\src\\main\\resources\\hadoop-2.7.2.tar.gz.p2"));

        IOUtils.copyBytes(fis,fos,conf);

        IOUtils.closeStream(fos);
        IOUtils.closeStream(fis);
    }

}
