package experiment02;

import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintStream;
//import java.net.URI;
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class Chapter03 {
	
	private Configuration conf=null;
	private String localPath=null;
	private Path remotePath=null;
	
	public Chapter03(String localPath,String remotePath,String remoteserver) {
		this.localPath= localPath;
		this.remotePath=new Path(remotePath);
		init(remoteserver);		
	}
	
	public Chapter03(String remoteserver) {
		init(remoteserver);
	}
	
	public void init(String remoteserver) {
		conf=new Configuration();
		conf.set("fs.defaultFS", remoteserver);
//		conf.set
	}
	
	public void setInputPath(String localPath) {
		this.localPath=localPath;
	}
	
	public void setOutputPath(String remotePath) {
		this.remotePath=new Path(remotePath);
	}
	
	/*
	 * 将本地文件夹下的所有文件（包含子目录下的文件）复制到hdfs指定地点
	 * */
	public void copyFiles2Remote() throws IOException {
		File folder=new File(this.localPath);
		
		listAllFiles(folder);		
	}
	
	public void listAllFiles(File folder) throws IOException {
		if (folder.exists() && folder.isDirectory()) {
            File[] files = folder.listFiles();
            if (files != null) {
                for (File file : files) {
                    if (file.isFile()) {
                        System.out.println("文件: " + file.getAbsolutePath());
                        copyFile2Remote(file.getAbsolutePath());
                    } else if (file.isDirectory()) {
                        System.out.println("文件夹: " + file.getAbsolutePath());
                        listAllFiles(file); // 递归
                    }
                }
            }
        }
    }
	/*
	 * 复制一个文件到hdfs
	 * */
	public void copyFile2Remote(String fileName) throws IOException {
		FileSystem fsDst=FileSystem.get(conf);
		
		File sourceFile=new File(fileName);		
		String remoteFileName=remotePath.toString() + "/"+ sourceFile.getName();

		InputStream in=new FileInputStream(sourceFile);	
		FSDataOutputStream out=fsDst.create(new Path(remoteFileName));
		
		byte[] buffer=new byte[1024];
		int length;
		while((length=in.read(buffer))>0) {
			out.write(buffer,0,length);
		}
		
		System.out.println("Copied ");
		
		out.close();
		in.close();
		fsDst.close();
	}
	
	/*
	 * 显示hdfs文件系统中指定的文件内容
	 * */
	public void cat(String fileName) throws IOException {
		FileSystem fs=FileSystem.get(conf); 
		FSDataInputStream out=fs.open(new Path(fileName));
		
		PrintStream ps=new PrintStream(System.out); 
		
		byte[] data=new byte[1024];
		int read=-1;
		while((read=out.read(data))>0) {
			ps.write(data,0,read);
		}
		ps.close();
		out.close();
	}
	
	/*将hdfs上两个文件的连接成一个新文件
	 * fileDst=file1+file2
	 * */	
	public void concat(String file1,String file2,String fileDst) throws IOException {
		FileSystem fs=FileSystem.get(conf); 
		
		FSDataInputStream in1=fs.open(new Path(file1));
		FSDataInputStream in2=fs.open(new Path(file2));
		
		FSDataOutputStream out=fs.create(new Path(fileDst));
		
		byte[] data=new byte[1024];
		int read=-1;
		while((read=in1.read(data))>0) {
			out.write(data,0,read);
		}
		in1.close();
		
		while((read=in2.read(data))>0) {
			out.write(data,0,read);
		}
		in2.close();
		out.close();
		fs.close();
	}
	
	/*
	 * */
	public List<String> listHDFSFolder(String hdfsFolder) throws IOException {
		FileSystem fsSourceFolder=FileSystem.get(conf);
		
		FileStatus[] srcStatus=fsSourceFolder.listStatus(new Path(hdfsFolder));
		
		List<String> fileNames=new ArrayList<>();

		for(FileStatus sta:srcStatus) {
			if(sta.isFile())
				fileNames.add(sta.getPath().toString());			
		}
		return fileNames;
	}
	
	public void copyFilesFromHDFS(List<String> fileNames,String localFolder) throws IOException {		
		for(String filename:fileNames) {
			copyFileFromHDFS(filename,localFolder);
		}		
	}	
	
	/*
	 * 从HDFS复制一个文件到本地
	 * */

	public void copyFileFromHDFS(String fileName,String localFolder) throws IOException {		
		FileSystem fs=FileSystem.get(conf);
		FSDataInputStream out=fs.open(new Path(fileName));		
	
		String localFileName=localFolder + "/"+ new Path(fileName).getName().toString();

		FileOutputStream in=new FileOutputStream(localFileName);	
		
		byte[] buffer=new byte[1024];
		int length;
		while((length=out.read(buffer))>0) {
			in.write(buffer,0,length);
		}
		
		System.out.println("Copied ");
		
		out.close();
		in.close();
	}
	
	/*
	递归输出hdfs路径下所有文件和文件夹
	 * */
	public void dirTree(Path path) throws IOException {
		FileSystem fs=FileSystem.get(conf); 
		FileStatus[] fileStatus=fs.listStatus(path);
		
		for(FileStatus sta:fileStatus) {
			System.out.println(sta.getPath());
			if(sta.isDirectory()) {
				dirTree(sta.getPath());
			}
		}
		fs.close();
	}

	public static void main(String[] args) throws IOException {
		// TODO Auto-generated method stub
		
		String localPath="/home/abc/input";
		String remotePath="hdfs://localhost:9000/user/abc/input1";
		
		Chapter03 myput=new Chapter03(localPath,remotePath,"hdfs://localhost:9000/");	
		
//		myput.dirTree(new Path("hdfs://localhost:9000/"));
		
		List<String> fileNames=myput.listHDFSFolder("input1");
		myput.copyFilesFromHDFS(fileNames, "/home/abc/");
		
		myput.cat("hdfs://localhost:9000/user/abc/input/hadoop-abc-datanode-u2.log");
		
//		myput.copyFiles2Remote();
	}
}
