package tul.cot.job.output;

import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;

import org.apache.hadoop.fs.Path;
import org.apache.log4j.Logger;

public class HDFSGetter_old {

	private static final Logger LOG = Logger.getLogger(HDFSGetter_old.class);
	
	private static final String HADOOP_HOME = System.getenv("HADOOP_HOME");
	private static final String DEFAULT_OUT = "out";
	private static final LinesGrouper BASIC_GROUPER = new LinesGrouper() {
		
		@Override
		public String getFileName(String line) {
			return DEFAULT_OUT;
		}

		@Override
		public String getFileHeader(String fileName) {
			return null;
		}
	};

	String hadoopBin;
	Path hdfsDir;
	File localDir;
	
	public HDFSGetter_old(Path hdfsDir, File localDir) {
		if (HADOOP_HOME == null)
			throw new RuntimeException("HADOOP_HOME not set");
		
		File hadoop = new File(HADOOP_HOME, "bin/hadoop");
		hadoopBin = hadoop.getAbsolutePath().replace('\\', '/');
		System.out.println("hadoop.exists(): " + hadoop.exists());
		System.out.println("new File(hadoopBin).exists(): " + new File(hadoopBin).exists());
		
		this.hdfsDir = hdfsDir;
		this.localDir = localDir;
		
		validate();
		init();
	}
	
	private void validate() {
		// TODO: hdfs dir exists
		
		// localDir does not exist
		if (localDir.exists())
			throw new IllegalArgumentException(String.format("localDir (%s) already exists",
					localDir.getAbsolutePath()));
	}
	
	private void init() {
		localDir.mkdir();
	}

//	public void getTextReduceOutput() throws IOException, InterruptedException {
//		String getCmd = getTextReduceOutputCommand();
//		List<String> lines = getTextLines(getCmd);
//		
//		saveTextOutput(lines, BASIC_GROUPER);
//	}
	
	public void getTextReduceOutput(Comparator<String> linesCmp, LinesGrouper grouper) throws IOException, InterruptedException {
		String getCmd = getTextReduceOutputCommand();
		LOG.info("getCmd: " + getCmd);
		List<String> lines = getTextLines(getCmd);
		
		Collections.sort(lines, linesCmp);
		LineSaver saver = new LineSaver(grouper);
		saver.saveLines(lines, localDir);
	}

	private List<String> getTextLines(String getCmd) throws IOException, InterruptedException {
		
		Runtime rt = Runtime.getRuntime();
		
		Process catProcess = rt.exec(getCmd);
		
		InputStream is = catProcess.getInputStream();
		InputStreamReader isr = new InputStreamReader(is);
		BufferedReader reader = new BufferedReader(isr);
		
		List<String> lines = new ArrayList<String>();
		String l = null;
		while ((l = reader.readLine()) != null) {
			lines.add(new String(l));
		}
		
		reader.close();
		
		int exitVal = catProcess.waitFor();
		if (exitVal != 0) {
			throw new RuntimeException(String.format("Reading output from HDFS exited with error code: %d", exitVal));
		}
		
		return lines;
	}

	private String getTextReduceOutputCommand() {
		String getCmd = String.format("%s fs -cat %s/part-r-*", hadoopBin, hdfsDir.toString());
		
		return getCmd;
	}

}