package bigdata.hdfs;
//Copy files with a specified extension from a folder on HDFS to the local fileSystem
import java.io.IOException;
import java.net.URI;
import java.io.FileOutputStream;

import org.apache.hadoop.fs.*;

public class MyGet {

	private String remotePath = null;
	private String file_extension = null;
	private String localpath = null;

	public MyGet(String remotePath, String file_extension, String localpath) {
		this.remotePath = remotePath;
		this.file_extension = file_extension;
		this.localpath = localpath;
		Connect2HDFS.connect2hdfs();
	}

	
	public void doGet() throws IOException {
		URI uri=URI.create(remotePath);
		FileSystem fs=FileSystem.get(uri,Connect2HDFS.getconf());
		FileStatus[] fileStatuses=fs.listStatus(new Path(remotePath));
		
		for(FileStatus fileStatus:fileStatuses) {
			if(fileStatus.isFile() && fileStatus.getPath().getName().toString().endsWith(file_extension)) {
				System.out.println(fileStatus.getPath().getName().toString());
				//Open hdfs file and copy to local
				FSDataInputStream in=fs.open(fileStatus.getPath());
				
				try (FileOutputStream out = new FileOutputStream(localpath+"/"+fileStatus.getPath().getName().toString())) {
					byte[] data=new byte[1024];
					int read=-1;
					while((read=in.read(data))>0){
						out.write(data,0,read);
					}					
					out.close();
				}
				in.close();
			}
		}
		fs.close();
	}

	public static void main(String[] args) throws IOException {
		// TODO Auto-generated method stub
		String remotePath, file_extension, localpath;

		if (args.length == 3) {
			remotePath = args[0];
			file_extension = args[1];
			localpath = args[2];
		} else {
			remotePath = "hdfspath";
			file_extension = "txt";
			localpath = "/home/abc/Downloads";
		}
		MyGet myget=new MyGet(remotePath,file_extension,localpath);
		myget.doGet();		
	}
}