package bigdata.hdfs;
//Merge all files with extension txt in the specified path of Hadoop distributed file system into one file
import java.io.IOException;
import java.io.PrintStream;
import java.net.URI;

import org.apache.hadoop.fs.*;

class MyPathFilter implements PathFilter{
	String reg=null;
	public MyPathFilter(String reg) {
		this.reg=reg;
	}
	
	public boolean accept(Path path) {
		if(path.toString().matches(reg))
			return true;
		return false;
	}
}

public class Merge {
	private Path inputPath=null;
	private Path outputPath=null;
	
	public Merge(String input,String output) {
		this.inputPath=new Path(input);
		this.outputPath=new Path(output);
		Connect2HDFS.connect2hdfs();
	}
	
	public Merge() {
		Connect2HDFS.connect2hdfs();
	}
	
	public void setInputPath(String input) {
		this.inputPath=new Path(input);
	}
	
	public void setOutputPath(String output) {
		this.inputPath=new Path(output);
	}
	
	public void doMerge() throws IOException{
		FileSystem fsSrc=FileSystem.get(URI.create(inputPath.toString()),Connect2HDFS.getconf());
		FileSystem fsDst=FileSystem.get(URI.create(outputPath.toString()), Connect2HDFS.getconf());
		
		FileStatus[] srcStatus=fsSrc.listStatus(inputPath,new MyPathFilter(".*txt"));
		FSDataOutputStream fsdos=fsDst.create(outputPath);
		
		PrintStream ps=new PrintStream(System.out);
		
		for(FileStatus sta:srcStatus) {
			System.out.println("Path : "+ sta.getPath()+ " FileSize : "+ sta.getLen() +
					" permission : "+sta.getPermission()+ "Content ");
			FSDataInputStream fsdis=fsSrc.open(sta.getPath());
			
			byte[] data=new byte[1024];
			int read=-1;
			while((read=fsdis.read(data))>0) {
				ps.write(data,0,read);
				fsdos.write(data,0,read);
			}
			fsdis.close();
		}
		ps.close();
		fsdos.close();
		fsDst.close();
		fsSrc.close();		
	}

	public static void main(String[] args) throws IOException{
		// TODO Auto-generated method stub
		
		String inputPath="mergeinput";
		String outputPath="mergeout.txt";
		
		Merge merge=new Merge(inputPath,outputPath);
		merge.doMerge();
	}
}