package com.sarnath.sardoop.modules.hadoop.service;

import java.io.BufferedInputStream;
import java.io.IOException;
import java.io.InputStream;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.springframework.context.annotation.Lazy;
import org.springframework.data.hadoop.fs.HdfsResourceLoader;
import org.springframework.stereotype.Service;

import com.sarnath.sardoop.modules.hadoop.entity.HdfsFileInfo;
import com.sarnath.sardoop.modules.hadoop.excption.FileOrDirectoryIsExists;
import com.sarnath.sardoop.modules.hadoop.excption.FileOrDirectoryNotExists;
import com.sarnath.sardoop.modules.hadoop.util.PathUtils;

/**
 * hdfs 常用操作命令
 * @author yingc
 */
@Service("hdfsExplorer")
@Lazy(false)
public class HdfsExplorerService {
	private static int bufferSize=4096;
	@javax.annotation.Resource
    private HdfsResourceLoader hadoopFsLoader;
	@javax.annotation.Resource
	private FileSystem hadoopFs;
	@javax.annotation.Resource
	private Configuration hadoopConfiguration;
	
	/**
	 * @param parent 在parent目录内新建文件
	 * @param name 新建文件名称
	 * @param in 文件内容输入流
	 */
	public void create(String parent,String name,InputStream in){
		try {
			String fullName=PathUtils.concat(parent,name);
			if(exists(fullName)) throw new FileOrDirectoryIsExists(name);
			if(!exists(parent)){
				hadoopFs.mkdirs(new Path(parent));
			}
			if(!isDirectory(parent)) throw new FileOrDirectoryNotExists(parent);
			FSDataOutputStream fsOut=hadoopFs.create(new Path(fullName),false);
			IOUtils.copyBytes(new BufferedInputStream(in), fsOut, bufferSize, true);
		}catch(FileOrDirectoryNotExists|FileOrDirectoryIsExists e){
			throw e;
		}catch (Exception e) {
			throw new RuntimeException(e);
		}
	}
	
	public boolean del(String path){
		try {
			path=PathUtils.concat(path);
			if(!hadoopFs.exists(new Path(path))) throw new FileOrDirectoryNotExists(path);
			return hadoopFs.delete(new Path(path),true);
		}catch(FileOrDirectoryNotExists|FileOrDirectoryIsExists e){
			throw e;
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}
	
	public boolean mkdir(String path){
		try {
			path=PathUtils.concat(path);
			if(hadoopFs.exists(new Path(path))) throw new FileOrDirectoryIsExists(path);
			return hadoopFs.mkdirs(new Path(path));
		}catch(FileOrDirectoryNotExists|FileOrDirectoryIsExists e){
			throw e;
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}
	
	public void cp(String src,String dest){
		try {
			src=PathUtils.concat(src);
			dest=PathUtils.concat(dest);
			if(!hadoopFs.exists(new Path(src))) throw new FileOrDirectoryNotExists(src);
			if(hadoopFs.exists(new Path(dest))) throw new FileOrDirectoryIsExists(dest);
			FileUtil.copy(hadoopFs, new Path(src), hadoopFs, new Path(dest), false, hadoopConfiguration);
		}catch(FileOrDirectoryNotExists|FileOrDirectoryIsExists e){
			throw e;
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}
	
	public void mv(String src,String dest){
		try {
			src=PathUtils.concat(src);
			dest=PathUtils.concat(dest);
			if(!hadoopFs.exists(new Path(src))) throw new FileOrDirectoryNotExists(src);
			if(hadoopFs.exists(new Path(dest))) throw new FileOrDirectoryIsExists(dest);
			FileUtil.copy(hadoopFs, new Path(src), hadoopFs, new Path(dest), true, hadoopConfiguration);
		}catch(FileOrDirectoryNotExists|FileOrDirectoryIsExists e){
			throw e;
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}
	public HdfsFileInfo[] du(String path){
		path=PathUtils.concat(path);
		return listStatus(path,true);
	}
	
	public HdfsFileInfo[] ls(String filePath){
		filePath=PathUtils.concat(filePath);
		return listStatus(filePath,false);
	}
	public HdfsFileInfo[] listStatus(String filePath,boolean isDu){
		try {
			filePath=PathUtils.concat(filePath);
			if(!exists(filePath)) throw new FileOrDirectoryNotExists(filePath);
			FileStatus[] items =  hadoopFs.listStatus(new Path(filePath));
			HdfsFileInfo[] infos=new HdfsFileInfo[items.length];
			for (int i=0;i<items.length;i++) {
				FileStatus status = items[i];
				infos[i]=new HdfsFileInfo(status);
				if(isDu && status.isDirectory()){
					infos[i].setBytes(hadoopFs.getContentSummary(status.getPath()).getLength());
				}
			}
			return infos;
		}catch(FileOrDirectoryNotExists|FileOrDirectoryIsExists e){
			throw e;
		} catch (Exception e) {
			throw new RuntimeException(e);
		}
	}
	
	public byte[] download(String filePath){
		FSDataInputStream inputStream=null;
		try {
			filePath=PathUtils.concat(filePath);
			if(!isFile(filePath)) throw new FileOrDirectoryNotExists(filePath);
			inputStream=hadoopFs.open(new Path(filePath), bufferSize);
			return org.apache.commons.io.IOUtils.toByteArray(inputStream);
		}catch(FileOrDirectoryNotExists|FileOrDirectoryIsExists e){
			throw e;
		} catch (Exception e) {
			throw new RuntimeException(e);
		}finally{
			try {
				if(inputStream!=null) inputStream.close();
			} catch (IOException e) {
			}
		}
	}
	
	public boolean isDirectory(String filePath){
		filePath=PathUtils.concat(filePath);
		Path p=new Path(filePath);
		try {
			if(!hadoopFs.exists(p)) throw new FileOrDirectoryNotExists(filePath);
			return hadoopFs.getFileStatus(p).isDirectory();
		}catch(FileOrDirectoryNotExists|FileOrDirectoryIsExists e){
			throw e;
		} catch (IOException e) {
			throw new RuntimeException(e);
		}
	}
	public boolean isFile(String filePath){
		filePath=PathUtils.concat(filePath);
		Path p=new Path(filePath);
		try {
			if(!hadoopFs.exists(p)) throw new FileOrDirectoryNotExists(filePath);
			return hadoopFs.getFileStatus(p).isFile();
		}catch(FileOrDirectoryNotExists|FileOrDirectoryIsExists e){
			throw e;
		} catch (IOException e) {
			throw new RuntimeException(e);
		}
	}
	
	public int fileType(String filePath){
		filePath=PathUtils.concat(filePath);
		if(!exists(filePath)){
			return 0;
		}else if(isDirectory(filePath)){
			return 1;
		}else if(isFile(filePath)){
			return 2;
		}
		return 3;
	}
	
	public boolean exists(String filePath){
		filePath=PathUtils.concat(filePath);
		try {
			return hadoopFs.exists(new Path(filePath));
		} catch (IOException e) {
			throw new RuntimeException(e);
		}
	}
	
}
