package com.knight.hadoop.day07.hdfs;

import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.junit.Before;
import org.junit.Test;

/**
 * 通过流的形式操作hdfs
 *
 */
public class HdfsRandomStreamAccess {
	private FileSystem fs = null;
	
	@Before
	public void init() throws Exception{
		Configuration conf = new Configuration();
		fs = FileSystem.get(new URI("hdfs://hadoop4:9000"), conf, "hadoop");
	}
	
	/**
	 * 通过流的方式上传文件
	 * @throws Exception
	 */
	@Test
	public void testUseStreamUploadFile() throws Exception{
		FSDataOutputStream outputStream = fs.create(new Path("/code2"));
		FileInputStream inputStream = new FileInputStream("d://settings.txt");
		int bytesize = IOUtils.copy(inputStream, outputStream);
	}
	
	/**
	 * 通过流的方式下载文件
	 * @throws Exception
	 */
	@Test
	public void testUseStreamDownloadFile() throws Exception{
		FSDataInputStream inputStream = fs.open(new Path("/code2"));
		FileOutputStream outputStream = new FileOutputStream("D://setting");
		IOUtils.copyLarge(inputStream, outputStream, 3, 20);
	}
}
