package com.xl.testHadfAPI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.io.IOUtils;
import org.junit.Test;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.net.URI;


/**
 * @ClassName testFileUpload
 * @Description TODO
 * @Author hxl
 * @Date 2021/3/22 18:07
 * Version 1.0
 **/
public class testHdfsIOApi {

  // 需求：把本地D盘上的banhua.txt文件上传到HDFS根目录
  @Test
  public void testUploadFile() throws Exception {
    Configuration conf = new Configuration();
    // 1.获取fs
    FileSystem fs = FileSystem.get(new URI("hdfs://hadoop100:9000"),conf,"root");

    //2.获取输入流
    FileInputStream fis = new FileInputStream(new File("d://banhua.txt"));
    //3. 获取输出流
    FSDataOutputStream fos = fs.create(new Path("/banhua.txt"));

    //4.流的对拷
    IOUtils.copyBytes(fis,fos,conf);

    //3. 关闭资源
    IOUtils.closeStream(fos);
    IOUtils.closeStream(fis);
    fs.close();
  }

  //从HDFS上下载banhua.txt到本地D盘
  @Test
  public void testDowloadFile() throws Exception {
    Configuration conf = new Configuration();
    // 1.获取fs
    FileSystem fs = FileSystem.get(new URI("hdfs://hadoop100:9000"),conf,"root");

    //2.获取输入流
    FSDataInputStream fis = fs.open(new Path("/banhua.txt"));
    //3. 获取输出流
    FileOutputStream fos = new FileOutputStream(new File("D://banhua.txt"));
    //4.流的对拷
    IOUtils.copyBytes(fis,fos,conf);
    //3. 关闭资源
    IOUtils.closeStream(fos);
    IOUtils.closeStream(fis);
    fs.close();
  }

  //文件的定位读取
  //实现：分块读取HDFS上的大文件，比如根目录下的/hadoop-2.7.2.tar.gz
  //读取第一段
  @Test
  public void testReadBigFile() throws Exception {
    Configuration conf = new Configuration();
    // 1.获取fs
    FileSystem fs = FileSystem.get(new URI("hdfs://hadoop100:9000"),conf,"root");

    //2.获取输入流
    FSDataInputStream fis = fs.open(new Path("/hadoop-2.7.2.tar.gz"));
    //3. 获取输出流
    FileOutputStream fos = new FileOutputStream(new File("D://hadoop-2.7.2.tar.gz.part1"));

    //4.流的对拷 只拷贝128m
    byte[] buf = new byte[1024];
    for (int i =0; i< 1024*128;i++){
      fis.read(buf);
      fos.write(buf);
    }
    //3. 关闭资源
    IOUtils.closeStream(fos);
    IOUtils.closeStream(fis);
    fs.close();
  }

  //文件的定位读取
  //实现：分块读取HDFS上的大文件，比如根目录下的/hadoop-2.7.2.tar.gz
  //读取从第一段之后的所有
  @Test
  public void testReadBigSecondFile() throws Exception {
    Configuration conf = new Configuration();
    // 1.获取fs
    FileSystem fs = FileSystem.get(new URI("hdfs://hadoop100:9000"),conf,"root");

    //2.获取输入流
    FSDataInputStream fis = fs.open(new Path("/hadoop-2.7.2.tar.gz"));
    //3. 获取输出流
    FileOutputStream fos = new FileOutputStream(new File("D://hadoop-2.7.2.tar.gz.part2"));

    //4.流的对拷 拷贝第一段128m之后的内容
    fis.seek(1024*1024*128);
    IOUtils.copyBytes(fis,fos,conf);
    //3. 关闭资源
    IOUtils.closeStream(fos);
    IOUtils.closeStream(fis);
    fs.close();
  }
}
