package com.song.hadoop01;

import java.io.BufferedInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.net.URI;

import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocatedFileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RemoteIterator;

public class HDFSUtil {
	
	public static String hdfsUrl ="hdfs://song:9000";
	static{
		System.setProperty("hadoop.home.dir", "/opt/hadoop/hadoop");
	}
	/**
	 *   创建文件夹
	 * @param url
	 */
	public static void   HdfsMkdir(String url)
	{
		Configuration conf = new Configuration();
		try {
			FileSystem fs  = FileSystem.get(URI.create(hdfsUrl) ,conf);
			Path path = new Path(url);
			fs.mkdirs(path);		
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	
	/**
	 *  列举目录
	 * @param url
	 */
	public static void ListDir(String url) {
		Configuration conf = new Configuration();
		
		try {
			FileSystem fs = FileSystem.get(URI.create(hdfsUrl), conf);
			
			RemoteIterator<LocatedFileStatus> files  = fs.listFiles(new Path(url), true); //recurisize  是否迭代的访问
			
			while(files.hasNext())
			{
				LocatedFileStatus status = files.next();
				Path path = status.getPath();
				System.out.println(path.toString() +" depth is :"+path.depth());
			}
			
			
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		
	}
	/**
	 * 读取文件
	 * @param filepath
	 * @return
	 */
	public static String  readFile(String filepath)
	{
		Configuration conf = new Configuration();
		try {
			org.apache.commons.io.output.ByteArrayOutputStream bout = new org.apache.commons.io.output.ByteArrayOutputStream();
			FileSystem fs = FileSystem.get(URI.create(hdfsUrl), conf);
			FSDataInputStream in  =fs.open(new Path(filepath));
			bout.write(in);
			String content = new String(bout.toByteArray());
			System.out.println(content);
			return content;
		} catch (Exception e) {
			// TODO: handle exception
		}
		return null;
		
	}
	
	/**
	 *  列举文件的状态
	 * @param filepath
	 */
	public static void listFileStatus(String filepath)
	{
		Configuration conf = new Configuration();
		try {
			FileSystem fs = FileSystem.get(URI.create(hdfsUrl), conf);
			FileStatus[] statuss =  fs.listStatus(new Path(filepath));
			
			for(FileStatus status :statuss)
			{
				System.out.println(status.toString());
			}
			
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	
	public static void deleteFile(String filepath)
	{
		Configuration conf  = new Configuration();
		try {
			FileSystem fs = FileSystem.get(URI.create(hdfsUrl) , conf);
			fs.deleteOnExit(new Path(filepath));
			
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	
	public static void renameFile(String oldname ,String newname)
	{
		Configuration conf  = new Configuration();
		try {
			FileSystem fs = FileSystem.get(URI.create(hdfsUrl) , conf);
			fs.rename(new Path(oldname),new Path(newname));
			
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
	}
	
	public static void main(String[] args) {
		HdfsMkdir("/songyang");
//		ListDir("/");
//		readFile("/join/result/part-r-00000");
//		listFileStatus("/");
	}

}
