package com.tonysor.hadoop.hdfs;

import java.net.URI;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class HdfsUtil {
	private static Configuration conf = new Configuration();
	static FileSystem fs;
	static String userName = "hadoop";
	static String hdfsInput = "hdfs://h1:9000";
	public static void getFileList() throws Exception {
		//fs = FileSystem.get(new URI(hdfsInput), conf, userName);
		fs = FileSystem.get(conf);
		FileStatus[] list = fs.listStatus(new Path("/output"));
		for(FileStatus f : list) {
			System.out.println(f);
		}
	}
	
	public static void downloadFile() throws Exception {
		fs = FileSystem.get(conf);
		fs.copyToLocalFile(false,new Path("/input/1.txt"), new Path("d:/"));
		fs.close();
	}
	
	public static void uploadFile() throws Exception {
		fs = FileSystem.get(new URI(hdfsInput), conf, userName);
		fs.copyToLocalFile(false,new Path("/input/1.txt"), new Path("d:/"));
		fs.copyFromLocalFile(new Path("d:/aa报表.xlsx"), new Path("/input/aa"));
		fs.close();
	}
	
	
	public static void mkdir() throws Exception {
		fs = FileSystem.get(new URI(hdfsInput), conf, userName);
		fs.mkdirs(new Path("/input/aa"));
	}
	
	public static void addContent() throws Exception {
		fs = FileSystem.get(new URI(hdfsInput), conf, userName);
		String content =" 中文总过";
		String filename="/input/aa/test.txt";
		//创建文件
		//fs.create(new Path(filename));
		FSDataOutputStream out = fs.append(new Path(filename));
		out.write(content.getBytes());
	}
}
