package com.zc.hadoop.dfs;

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;

/**
 * HDFS 工具
 *
 */
public class HdfsUtil {

	private final static String DEFAULT_FS = "fs.defaultFS";
	private final static String DEFAULT_FS_VALUE = "hdfs://192.168.100.201:9000";

	// 用于连接HDFS文件系统的函数
	public static FileSystem getFs() {
		System.setProperty("hadoop.home.dir", "D:\\server\\hadoop-2.7.3");
		
		// 连接用到的配置文件
		Configuration conf = new Configuration();
		conf.set(DEFAULT_FS, DEFAULT_FS_VALUE);
		FileSystem fs = null;
		try {
			fs = FileSystem.get(conf);
		} catch (IOException e) {
			e.printStackTrace();
			// XXX 做一些日志等记录
		}
		return fs;
	}

	public static LocalFileSystem getLocalFS() {
		System.setProperty("hadoop.home.dir", "D:\\server\\hadoop-2.7.3");
		// 连接用到的配置文件
		Configuration conf = new Configuration();
		conf.set(DEFAULT_FS, DEFAULT_FS_VALUE);
		LocalFileSystem lfs = null;
		
		try {
			lfs = LocalFileSystem.newInstanceLocal(conf);
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
		return lfs;
	}

}
