package com.ls.fw.hadoop.client.base;

import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.List;

import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileContext;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DistributedFileSystem;

import com.ls.fw.hadoop.client.bean.Datanode;
import com.ls.fw.hadoop.client.bean.FileTreeItem;
import com.ls.fw.hadoop.client.bean.HadoopFileStatus;
import com.ls.fw.hadoop.client.bean.Namenode;
import com.ls.fw.hadoop.client.bean.PageBean;
import com.ls.fw.hadoop.client.constant.StatusType;
import com.ls.fw.hadoop.client.exception.HadoopException;

public interface BaseHadoopDao {

	
	
	/**
	 * 查看HDFS文件是否存在
	 * 
	 * @author ls 2013-8-30 下午04:27:37
	 * @param path
	 * @param hdfs
	 * @return
	 * @throws Exception
	 */
	public  boolean isExist(String path, FileSystem hdfs)
			throws IOException ;

	
	/**
	 * 将本地文件上传到hadoop 两个参数分别是本地文件系统的的输入文件路径和HDFS中的输出文件位置
	 * 
	 * @author ls 2013-8-30 下午04:59:12
	 * @param srcPath
	 *            如果这段代码最终运行在Hadoop所在的服务器上，那么本地文件系统是相对于那台服务器的本地文件系统
	 *            如果这段代码运行在我们Windows PC上，那么本地文件系统是这台Window PC的文件系统
	 * @param destPath
	 * @throws Exception
	 */
	public  void uploadFile(String srcPath, String destPath)
			throws HadoopException;
	

	/**
	 * 上传本地文件到HDFS
	 * 
	 * @author ls 2013-8-30 下午05:13:13
	 * @param srcPath
	 * @param destPath
	 * @throws Exception
	 */
	public  void uploadFileFromLocalFile(String srcPath, String destPath)
			throws HadoopException ;

	/**
	 * 下载HDFS到本地文件
	 * 
	 * @author ls 2013-8-30 下午05:10:56
	 * @param hdfsFilePath
	 * @param dstPath
	 * @throws Exception
	 */
	public  void downFile(String srcPath, String destPath, FileSystem fs)
			throws HadoopException ;

	/**
	 * 将文件下载到本地
	 * 
	 * @author ls 2013-9-6 上午09:39:44
	 * @param srcPath
	 * @param destPath
	 * @throws Exception
	 */
	public  void downFileToLocal(String srcPath, String destPath)
			throws HadoopException ;

	public  void downFile(String srcPath, String destPath)
			throws HadoopException ;

	/**
	 * 打开文件输出流
	 * 
	 * @author ls 2013-8-30 下午04:45:59
	 * @param path
	 * @return
	 * @throws Exception
	 */
	public  OutputStream getOutputStream(String path) throws Exception ;
	
	
	public  FileStatus[] listStatus(String path, int page, int pageSize)
								throws IOException;
	
	public  int getTotalNum(String dirPath) throws IOException;
	
	/**
	 * @param request
	 * @param response
	 * @param path 文件路径
	 * @param name 下载名称
	 * @throws IOException
	 */
	public void down(HttpServletRequest request,
			HttpServletResponse response, String path, String name)
			throws HadoopException;
	
	
	
	/**
	 * 重命名
	 * 
	 * @author ls 2013-8-30 下午04:26:36
	 * @param srcPath
	 * @param destPath
	 * @return
	 * @throws Exception
	 */
	public  boolean rename(String srcPath, String destPath, FileSystem fs)
			throws Exception ;

	public  boolean rename(String srcPath, String destPath)
			throws Exception ;

	/**
	 * 获取文件状态信息
	 * 
	 * @author ls 2013-8-30 下午04:15:44
	 * @param srcPath
	 * @return
	 * @throws IOException
	 */
	public  FileStatus getFileStatus(String srcPath) throws IOException ;

	/**
	 * 删除文件
	 * 
	 * @author ls 2013-8-30 下午04:08:47
	 * @param srcPath
	 * @return
	 * @throws IOException
	 */
	public  boolean deleteFile(String srcPath) throws IOException ;

	/**
	 * 删除目录
	 * 
	 * @author ls 2013-8-30 下午04:09:37
	 * @param srcPath
	 * @param recursive
	 *            是否删除子文件
	 * @return
	 * @throws IOException
	 */
	public  boolean deleteDir(String srcPath, boolean recursive)
			throws IOException ;

	/**
	 * 删除文件
	 * 
	 * @author ls 2013-9-2 下午05:18:49
	 * @param srcPath
	 * @return
	 * @throws IOException
	 */
	public  boolean delete(String srcPath, FileSystem fs)
			throws IOException ;

	public  boolean delete(String srcPath) throws IOException;

	/**
	 * 复制文件
	 * 
	 * @author ls 2013-8-30 下午04:02:22
	 * @param srcPath
	 * @param destPath
	 * @throws IOException
	 */
	public  void copyFile(String srcPath, String destPath, FileSystem fs)
			throws IOException ;

	public  void copyFile(String srcPath, String destPath)
			throws IOException ;
	
	/**
	 * 获取顶级目录
	 * 
	 * @author ls 2013-9-6 上午09:40:50
	 * @return
	 * @throws IOException
	 */
	public  List<FileTreeItem> getRootDirectory() throws IOException;

	/**
	 * 获取顶级目录
	 * 
	 * @author ls 2013-9-6 上午09:41:09
	 * @return
	 * @throws Exception
	 */
	public  List<FileTreeItem> getRootDirectoryByFC() throws Exception ;

	/**
	 * 获取根目录
	 * 
	 * @author ls 2013-8-30 上午09:17:35
	 * @return
	 * @throws IOException
	 * @throws IOException
	 */
	public  List<FileTreeItem> getRootDirectory(FileSystem fs)
			throws IOException ;

	public  List<FileTreeItem> getFiles(String dirPath, int page,
			int pageSize) throws Exception ;

	/**
	 * 获取目录下的文件
	 * 
	 * @author ls 2013-9-4 上午09:15:05
	 * @param dirPath
	 * @param page
	 * @param pageSize
	 * @return
	 * @throws Exception
	 */
	public  List<FileTreeItem> getFiles(String dirPath, int page,
			int pageSize, FileSystem fs) throws Exception ;

	/**
	 * 获取目录下的文件
	 * 
	 * @author ls 2013-8-30 下午05:21:10
	 * @param dirPath
	 * @return
	 * @throws IOException
	 */
	public  List<FileTreeItem> getFiles(String dirPath) throws Exception ;

	/**
	 * 获取目录下的文件
	 * 
	 * @author ls 2013-8-30 下午04:20:04
	 * @param dirPath
	 * @return
	 * @throws IOException
	 */
	public  FileStatus[] getChildFile(String dirPath) throws IOException ;

	public  FileStatus[] getChildFile(String dirPath, FileSystem fs)
			throws IOException ;

	public  FileStatus[] findFile(String dirPath, final String filterStr,
			int page, int pageSize, FileSystem fs) throws Exception ;

	public  RemoteIterator<FileStatus> findFileByFC(String dirPath,
			final String filterStr, int page, int pageSize) throws Exception ;
	public PageBean query(String path, String name, int page, int pageSize)
			throws Exception ;
	public  List<HadoopFileStatus> findHadoopFileStatus(String dirPath,
			int page, int pageSize) throws Exception ;

	public  List<FileTreeItem> findFileTreeItem(String dirPath, int page,
			int pageSize) throws Exception ;

	public  List<FileStatus> findFileStatusByFC(String dirPath, int page,
			int pageSize) throws Exception ;

	/**
	 * 查找符合条件的文件
	 * 
	 * @author ls 2013-8-30 下午04:23:55
	 * @param dirPath
	 * @param filterStr
	 * @return
	 * @throws Exception
	 */
	public  FileStatus[] findFile(String dirPath, final String filterStr,
			int page, int pageSize) throws Exception;

	/**
	 * 过滤掉符合条件的文件
	 * 
	 * @author ls 2013-8-30 下午04:40:09
	 * @param dirPath
	 * @param filterStr
	 * @return
	 * @throws Exception
	 */
	public  FileStatus[] filterFile(String dirPath, final String filterStr)
			throws Exception ;
	
	public String upload(InputStream inStream,String savePath);
	
	public void switchHDFS(String hdfsUrl) throws HadoopException;
	
	public FileContext getFC() throws HadoopException;
	
	public DistributedFileSystem  getDFS() throws HadoopException;
		
	public FileSystem getFS() throws HadoopException;
	
	public DFSClient getDfsClient() throws HadoopException;
	
	public  FileSystem getFileSystem() throws HadoopException;
	
	public void close();


	Namenode getNamenodeList() throws IOException;


	List<Datanode> getDatanodeList(StatusType status) throws IOException;


	void switchConf(String hdfsUrl, Configuration conf) throws HadoopException;
}
