package com.hy.study.hdfs;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import org.apache.log4j.Logger;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.util.Locale;
import java.util.UUID;

/**
 * java客户端使用hdfs 权限异常2中解决办法 1.在运行时传递 -DHADOOP_USER_NAME=root 2.FileSystem fs = FileSystem.get(new URI("hdfs://123.57.217.217:9000"), conf, "root");
 *
 * @author huyong
 * @date 2024 年 01 月 23 日 17:30: 49
 */
public class HdfsIO {
    private final static Logger log = Logger.getLogger(HdfsIO.class);
    private FileSystem fs = null;
    private String basePath = "/data/file/" + UUID.randomUUID().toString().replaceAll("-", "");
    private Configuration conf = new Configuration();

    @Before
    public void before() throws Exception {
        // 1.获取hdfs客户端
        //  conf.set("dfs.client.use.datanode.hostname", "true");
        conf.set("dfs.replication", "1");// 设置副本数为1
        fs = FileSystem.get(new URI("hdfs://127.0.0.1:9000"), conf, "DIGITAL-HUYONG");
    }

    /**
     * 使用流的方式将本地文件推送到HDFS服务器上
     *
     * @throws Exception
     */
    @Test
    public void putFileToHDFS() throws Exception {
        // 得到文件的输入流
        FileInputStream inputStream = new FileInputStream("F:\\Downloads\\datamodeler-21.4.1.349.1605-no-jre.zip");
        // 得到一个输出流
        FSDataOutputStream fsDataOutputStream = fs.create(new Path( "datamodeler-21.4.1.349.1605-no-jre.zip"));
        // 流的对拷
        IOUtils.copyBytes(inputStream, fsDataOutputStream, conf);
        // 关闭打开的流信息
        IOUtils.closeStream(fsDataOutputStream);
        IOUtils.closeStream(inputStream);
    }

    /**
     * 使用流的方式将HDFS中的文件下载到本地
     *
     * @throws IOException
     */
    @Test
    public void getFileToLocal() throws Exception {
        FSDataInputStream inputStream = fs.open(new Path("/data/file/FAQ.doc"));
        FileOutputStream outputStream = new FileOutputStream("G:\\file\\FAQ_1.doc");
        IOUtils.copyBytes(inputStream, outputStream, conf);
        IOUtils.closeStream(outputStream);
        IOUtils.closeStream(inputStream);
    }


    /**
     * 分块下载文件 定位读取 比如日志 只下载新的一部分
     */
    @Test
    public void readFileSeek1() throws IOException {
        FSDataInputStream inputStream = fs.open(new Path("/user/DIGITAL-HUYONG/datamodeler-21.4.1.349.1605-no-jre.zip"));
        FileOutputStream outputStream = new FileOutputStream("G:\\file\\datamodeler.zip.part1");
        byte[] buff=new byte[1024];
        for(int i=0;i<1024*128;i++){
            inputStream.read(buff);
            outputStream.write(buff);
        }
        IOUtils.closeStream(outputStream);
        IOUtils.closeStream(inputStream);
    }

    /**
     * 下载第二块
     * @throws IOException
     */

    @Test
    public void readFileSeek2() throws IOException {
        FSDataInputStream inputStream = fs.open(new Path("/user/DIGITAL-HUYONG/datamodeler-21.4.1.349.1605-no-jre.zip"));
        FileOutputStream outputStream = new FileOutputStream("G:\\file\\datamodeler.zip.part2");
        //设置指定读取流的起点
        inputStream.seek(1024*1024*128);
        IOUtils.copyBytes(inputStream,outputStream,conf);
        IOUtils.closeStream(outputStream);
        IOUtils.closeStream(inputStream);
    }

    @After
    public void after() throws IOException {
        if (fs != null) {
            fs.close();
            log.info("overs");
        }
    }

    private String getUUID() {
        return UUID.randomUUID().toString().replaceAll("-", "").toUpperCase(Locale.ROOT);
    }
}
