package com.hadoop.hdfs;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class Hdfs {
	private static FileSystem fileSystem;
	private static String HADOOP_URL="hdfs://master:9000";
	static{
		if(fileSystem==null){
			try {
				fileSystem=FileSystem.get(new URI(HADOOP_URL),new Configuration());
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			} catch (URISyntaxException e) {
				e.printStackTrace();
			}
		}
	}
	//创建文件夹
	protected void createDir(String dir) throws Exception{
		fileSystem.mkdirs(new Path(dir));
	}
	//删除文件
	protected void delete(String file) throws IOException{
		fileSystem.delete(new Path(file));
	}
	//上传文件
	protected void upLoad(String src,String dst) throws IOException{
		fileSystem.copyFromLocalFile(new Path(src), new Path(dst));
	}
	//下载
	protected void downLoad(String src,String dst)throws Exception{
		fileSystem.copyToLocalFile(new Path(src), new Path(dst));
	}
	//所有文件
	protected void list(String path) throws IOException{
		 FileStatus[] status = fileSystem.listStatus(new Path(path));  
        for (int i = 0; i< status.length; i++) {  
            System.out.println(status[i].getPath().toString());  
        } 
	}
	public static void main(String[] args) throws Exception{
		Hdfs hdfs=new Hdfs();
	//	hdfs.createDir("/test");
	//	hdfs.delete("/test");
	//	hdfs.upLoad("e:\\jd-gui.exe", "/");
		hdfs.downLoad("/d1","c:\\");
		hdfs.list("/");
	}
}
