package com.prj.ufdm.demo.tutorial.hadoop.hdfs;

import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

import com.prj.ufdm.demo.tutorial.hadoop.constants.HADOOP_HOST;

public class HdfsClient {

	static FileSystem fileSystem = null;
	
	public static final String WORD_LOCAL_FILE= "D:\\wcinput.txt";
	
	public static void main(String[] args)  throws Exception { 

		// 拷贝文件
		copyFromLocalFile(WORD_LOCAL_FILE,HADOOP_HOST.WORD_INPUT_PATH_FILE);
	}
	
	/**
	 * 获取当前路径下的目录或文件
	 * @throws Exception
	 */
	public static void getDirs(String filePath) throws Exception  {
		Path path = new Path(filePath);
		FileStatus[] srcFileStatus = fileSystem.listStatus(path);
        for (FileStatus fs : srcFileStatus) {
            System.out.println("\n 路径："+fs.getPath());
        }
	}
	
	/**
	 * 创建目录
	 * @throws Exception
	 */
	public static void createDirs(String filePath) throws Exception  {
		before();
		Path path = new Path(filePath);
		fileSystem.mkdirs(path); 
		after();

	}
	
	public static void deleteOnExit(String filePath) throws Exception  {
		before();
		Path path = new Path(filePath);
		fileSystem.deleteOnExit(path); 
		after();
	}
	
	/**
	 * 上传文件
	 * @throws Exception
	 */
	public static void copyFromLocalFile(String srcPath,String dstPath) throws Exception  {
		before();
		Path src = new Path(srcPath);
		Path dst = new Path(dstPath);
		fileSystem.deleteOnExit(dst); 
		fileSystem.copyFromLocalFile(src, dst);
		after();
	}
	
	private static void before() throws Exception {
		fileSystem = FileSystem.get(new URI(HADOOP_HOST.HDFS_URI),new Configuration(),"root");
	}
	
	private static void after() throws Exception {
		if(fileSystem!=null) fileSystem.close();
	}

}
