package com.hadoop.fs;

import java.io.BufferedInputStream;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

public class HadoopFileMaintain {

	public static void main(String[] args) {
		try {

			// upload(args[0], args[1]);
			// get(args[0], args[1]);
			List<String> files = listFiles(args[0]);
			for (String f : files) {
				System.out.println(f);
			}
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	/**
	 * 从本地上传文件到hadoop系统中
	 * 
	 * @param localSrc
	 *            本地系统的文件
	 * @param dist
	 *            hadoop系统文件地址
	 */
	public static void upload(String localSrc, String dist) {
		try {
			InputStream in = new BufferedInputStream(new FileInputStream(
					localSrc));
			Configuration conf = new Configuration();
			FileSystem fs = FileSystem.get(URI.create(dist), conf);
			OutputStream out = null;
			try {
				out = fs.create(new Path(dist));
				IOUtils.copyBytes(in, out, 4096, true);
			} finally {
				in.close();
				IOUtils.closeStream(out);
			}
			System.out.println("Upload a file success.");
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	/**
	 * 从hadoop系统中的文件读取到本地系统文件中
	 * 
	 * @param hdfsSrc
	 * @param localDist
	 */
	public static void get(String hdfsSrc, String localDist) {
		try {
			Configuration conf = new Configuration();
			FileSystem fs = FileSystem.get(URI.create(hdfsSrc), conf);
			InputStream in = null;
			OutputStream out = null;
			try {
				in = fs.open(new Path(hdfsSrc));
				out = new FileOutputStream(localDist);
				IOUtils.copyBytes(in, out, 4096, true);
			} finally {
				IOUtils.closeStream(in);
				out.close();
			}
			System.out.println("Get a file success.");
		} catch (Exception e) {
			e.printStackTrace();
		}
	}

	/**
	 * 列出hadoop文件系统中目录下的文件
	 * 
	 * @param hdfsDir
	 * @return
	 */
	public static List<String> listFiles(String hdfsDir) {
		List<String> files = new ArrayList<String>();
		try {
			Configuration conf = new Configuration();
			FileSystem fs = FileSystem.get(URI.create(hdfsDir), conf);
			Path[] paths = new Path[1];
			Path p = new Path(hdfsDir);
			paths[0] = p;
			FileStatus[] fstatus = fs.listStatus(paths);
			paths = FileUtil.stat2Paths(fstatus);
			for (Path p1 : paths) {
				files.add(p1.toString());
			}
		} catch (Exception e) {
			e.printStackTrace();
		}
		return files;
	}
}
