package com.jt.www.hadoopcould.util;

import java.io.InputStream;
import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;

public class HadoopFileUtil {
	public static final String HDFS_PATH = "hdfs://192.168.1.100:9000";
	static Logger logger = Logger.getLogger(HadoopFileUtil.class);

	/**
	 * ����һ���ļ���hadoop
	 * 
	 * @param localFile
	 * @param hadoopFile
	 * @return
	 */
	public boolean createFile(String localFile, String hadoopFile) {
		try {
			Configuration conf = new Configuration();
			FileSystem src = FileSystem.getLocal(conf);
			FileSystem dst = FileSystem.get(conf);
			Path srcpath = new Path(localFile);
			Path dstpath = new Path(hadoopFile);
			FileUtil.copy(src, srcpath, dst, dstpath, false, conf);
		} catch (Exception e) {
			e.printStackTrace();
			return false;
		}

		return true;
	}

	/**
	 * ͨ�����ķ�ʽ���ļ�д��hadoop
	 * 
	 * @param inStream
	 * @param hadoopFile
	 * @return
	 */
	public boolean createFileByInputStream(InputStream inStream,
			String hadoopFile) {
		try {
			FileSystem fs = FileSystem.get(new URI(HDFS_PATH),
					new Configuration());
			Path dstpath = new Path(hadoopFile);
			FSDataOutputStream oStream = fs.create(dstpath);
			byte[] buffer = new byte[400];
			int length = 0;
			while ((length = inStream.read(buffer)) > 0) {
				oStream.write(buffer, 0, length);
			}
			oStream.flush();
			oStream.close();
			inStream.close();
		} catch (Exception e) {
			e.printStackTrace();
			return false;
		}
		return true;
	}

	/**
	 * 
	 * ɾ��hadoop�ļ�
	 * 
	 * @param hadoopFile
	 * @return
	 */
	public boolean deleteFile(String hadoopFile) {
		try {
			FileSystem fs = FileSystem.get(new URI(HDFS_PATH),
					new Configuration());
			FileUtil.fullyDelete(fs, new Path(hadoopFile));
		} catch (Exception e) {
			e.printStackTrace();
			return false;
		}

		return true;
	}

	/**
	 * ��ȡ·���ļ�
	 * 
	 * @param hadoopFile
	 * @return
	 */
	public FSDataInputStream getInputStream(String hadoopFile) {
		FSDataInputStream iStream = null;
		try {
			Configuration conf = new Configuration();
			FileSystem fs = FileSystem.get(conf);
			Path p = new Path(hadoopFile);
			iStream = fs.open(p);
		} catch (Exception e) {
			e.printStackTrace();
			logger.error("getInputStream error:", e);
		}
		return iStream;
	}

	/**
	 * ��ȡhadoop�ļ�ϵͳ���ļ�
	 * 
	 * @param hadoopFile
	 * @return
	 */
	public FSDataInputStream getInputStreams(String hadoopFile) {
		FSDataInputStream iStream = null;
		try {
			FileSystem fs = FileSystem.get(new URI(HDFS_PATH),
					new Configuration());
			Path p = new Path(hadoopFile);
			iStream = fs.open(p);
		} catch (Exception e) {
			e.printStackTrace();
			logger.error("getInputStream error:", e);
		}
		return iStream;
	}
	
}