package com.ruby.bigtable.hdfs;

import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class HdfsApp {
	
	/**
	 * 获取文件系统
	 * @return
	 * @throws Exception 
	 */
	private static FileSystem getFileSystem() throws Exception{
		Configuration conf = new Configuration();
		FileSystem fileSystem = FileSystem.get(conf);
		return fileSystem;
	}
	
	/**
	 * 写文件
	 * 待写入的目录
	 * 输入流本地文件
	 * @param toFolder
	 * @throws Exception 
	 */
	private static void write(String toFolder) throws Exception{
		FileSystem fileSystem = getFileSystem();
		fileSystem.delete(new Path(toFolder), true);
		FileInputStream in = null;
		FSDataOutputStream out = null;
		try {
			in = new FileInputStream(new File("E:\\1804211523.txt"));
			out = fileSystem.create(new Path(toFolder));
			IOUtils.copyBytes(in, out, 4096, false);
		} catch (Exception e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}finally{
			IOUtils.closeStream(in);
			IOUtils.closeStream(out);
		}
		
	}
	
	/**
	 * 读指定路径的文件
	 * @param path
	 * @throws Exception 
	 * @throws IOException 
	 */
	private static void read(String filePath) throws Exception{
		//预处理，删除文件加
		FileSystem fileSystem = getFileSystem();
		Path path = new Path(filePath);
		FSDataInputStream inStream = null;
		//用文件系统操作路径
		try{
			inStream = fileSystem.open(path);
			IOUtils.copyBytes(inStream, System.out, 4096, false);
		}catch(Exception e){
			e.printStackTrace();
		}finally{
			IOUtils.closeStream(inStream);
		}
	}
	
	public static void main(String[] args) throws Exception {
		String fileFolder = "/user/sunyu/input/wordcount/666.out";
		write(fileFolder);
	}

}
