package com.moxi.study.hadoop.hdfs;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLConnection;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

/**
 * 完成hdfs相关操作
 * @author Administrator
 *
 */
public class TestHDFS {
	
	public static void readFile() throws Exception {
		// 注册URL流处理工厂
		URL.setURLStreamHandlerFactory( new FsUrlStreamHandlerFactory());
		URL url = new URL("hdfs://192.168.154.128:8020/home/centos/hadoop/test.txt");
		URLConnection conn = url.openConnection(); //打开连接
		InputStream is = conn.getInputStream(); //获取流
		byte [] buf = new byte[is.available()]; 
		is.read(buf);
		is.close();
		String str = new String(buf);
		System.out.println(str);
		
	}
	
	/**
	 * 通过hadoop Api访问文件
	 * @throws IOException 
	 */
	public static void readFileByApi() throws IOException {
		
		//定义一个 hadoop配置文件
		Configuration conf = new Configuration();
		//设置名称节点的url
		conf.set("fs.defaultFS", "hdfs://192.168.154.128:8020/");
		
		//获取到 hadoop的文件系统
		FileSystem fs = FileSystem.get(conf);
		
		Path path = new Path("/home/centos/hadoop/test.txt");
		FSDataInputStream fis = fs.open(path);
		byte [] buf = new byte[1024];
		int len = -1;
		ByteArrayOutputStream baos = new ByteArrayOutputStream();
		while((len = fis.read(buf)) != -1 ) {
			baos.write(buf, 0, len);			
		}
		fis.close();
		baos.close();
		System.out.println(new String(baos.toByteArray()));
	}
	
	
	/**
	 * 使用Hadoop提供的IOUtils操作
	 * 
	 * @throws IOException
	 */
	public static void readFileByApi2() throws IOException {
		
		//定义一个 hadoop配置文件
		Configuration conf = new Configuration();
		//设置名称节点的url
		conf.set("fs.defaultFS", "hdfs://192.168.154.128:8020/");
		
		FileSystem fs = FileSystem.get(conf);
		
		Path path = new Path("/home/centos/hadoop/test.txt");
		
		FSDataInputStream fis = fs.open(path);
		
		ByteArrayOutputStream baos = new ByteArrayOutputStream();
		
		//将fis中字节拷贝到baos中
		IOUtils.copyBytes(fis, baos, conf);
		
		System.out.println(new String(baos.toByteArray()));
	}
	
	/**
	 * hadoop文件系统中创建文件夹
	 * @throws IOException
	 */
	public static void mkdir() throws IOException {
			
		//定义一个 hadoop配置文件
		Configuration conf = new Configuration();
		//设置名称节点的url
		conf.set("fs.defaultFS", "hdfs://192.168.154.128:8020/");
		//获取hadoop的文件系统
		FileSystem fs = FileSystem.get(conf);
		fs.mkdirs(new Path("/home/centos/myhadoop"));
			
	}
	
	/**
	 * 添加文件
	 * @throws IOException
	 */
	public static void putFile() throws IOException {
		
		//定义一个 hadoop配置文件
		Configuration conf = new Configuration();
		//设置名称节点的url
		conf.set("fs.defaultFS", "hdfs://192.168.154.128:8020/");
		//获取hadoop的文件系统
		FileSystem fs = FileSystem.get(conf);
		
		/**
		 * create(Path f, boolean overwrite, int bufferSize, short replication, long blockSize) 
		 */
		FSDataOutputStream out = fs.create(new Path("/home/centos/myhadoop/hello.txt"),
				true, 1024, (short)2, 1024);
		out.write("hello".getBytes());
		out.close();
			
	}
	
	/**
	 * 删除文件
	 * @throws IOException
	 */
	public static void removeFile() throws IOException {
		
		//定义一个 hadoop配置文件
		Configuration conf = new Configuration();
		//设置名称节点的url
		conf.set("fs.defaultFS", "hdfs://192.168.154.128:8020/");
		//获取hadoop的文件系统
		FileSystem fs = FileSystem.get(conf);
		Path path = new Path("/home/centos/myhadoop/a.txt");
		fs.delete(path, true);
			
	}
	
	
	public static void main( String args []) throws Exception {
		
//		readFile();
		
//		readFileByApi();
		
		putFile();
		
//		removeFile();
	}
	
}
