package com.lilei.bigdata.hdfs;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Before;
import org.junit.Test;

import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;

/**
 * 用流的方式来操作hdfs
 */
public class HdfsStreamDemo {

    FileSystem fs = null;
    Configuration conf = null;

    @Before
    public void init() throws Exception{
        conf = new Configuration();
        conf.set("dfs.replication", "5");

        fs =FileSystem.get(new URI("hdfs://mini1:9000"),conf,"root");
    }

    //通过流的方式来上传文件
    @Test
    public void testUpload() throws Exception{

        FSDataOutputStream outputStream =fs.create(new Path("/1.txt"),true);
        FileInputStream inputStream = new FileInputStream("g://1.txt");
        IOUtils.copy(inputStream,outputStream);
    }

    //通过流的方式来下载文件
    @Test
    public void testDownLoad() throws Exception{

        FSDataInputStream inputStream =fs.open(new Path("/1.txt"));
        FileOutputStream outputStream = new FileOutputStream("g://2.txt");
        IOUtils.copy(inputStream,outputStream);
    }
    //从指定位置读
    @Test
    public void testRandomAccess() throws Exception{

        FSDataInputStream inputStream = fs.open(new Path("/1.txt"));

        inputStream.seek(1200);

        FileOutputStream outputStream = new FileOutputStream("g:/3.txt");

        IOUtils.copy(inputStream, outputStream);

    }
    /**
     * 显示hdfs上文件的内容
     * @throws IOException
     * @throws IllegalArgumentException
     */
    @Test
    public void testCat() throws IllegalArgumentException, IOException{

        FSDataInputStream in = fs.open(new Path("/1.txt"));

        IOUtils.copy(in, System.out);

//		IOUtils.copyBytes(in, System.out, 1024);
    }

}
