package cgl.hadoopsensorgrid.sensorgrid.eval;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.File;
import java.util.Properties;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import cgl.sensorgrid.common.PropertyFile;

import cgl.hadoopsensorgrid.sensorgrid.util.*;

/**
 * 
 * evalController class takes the arguments from the mainController, and it
 * creates a job for saving and evaluating ASCII and POS data. It is started and
 * stopped periodically with a TIMEOUT value set in the rtdMapper.
 * 
 * @author Chao Sun
 * 
 */
public class evalController {
	private Properties properties;
	private Configuration conf;
	private Job job;
	private int numberOfReducer = 7;
	private String evalList = null;
	private String inputBase = null;
	private String outputBase = null;

	/**
	 * Constructor.
	 * 
	 * @param dataList
	 *          name of a file which contains the data addresses/ports
	 * @param num
	 *          number of reducers
	 */
	public evalController(String dataList, int num, String outpath) {
		loadProperties();

		if (dataList == null || dataList.equals("")) {
			evalList = inputBase + File.separator + "datalist.txt";
		} else {
			evalList = dataList;
		}
		System.out.println("evalList: " + evalList);

		if (num != 0) {
			numberOfReducer = num;
		}

		if (outpath != null && !outpath.equals("")) {
			outputBase = outpath;
		}
		System.out.println("outputBase: " + outputBase);
	}

	/**
	 * Load properties from a given file.
	 */
	private void loadProperties() {
		try {
			properties = PropertyFile.loadProperties("hadoopsensorgrid.properties");
			inputBase = properties.getProperty("hadoop.eval.inputbase");
			outputBase = properties.getProperty("hadoop.eval.outputbase");
		} catch (Exception e) {
			e.printStackTrace();
		}

		if (properties == null || inputBase == null || outputBase == null) {
			System.err.println("Error: evalController failed to load properties!");
			System.exit(-1);
		}
	}

	/**
	 * Delete the ASCII/POS temp files and the output directory before starting
	 * another EVAL job.
	 */
	private void deleteDir() {
		System.out.println("Deleting EVAL temp files and directory");
		String command1 = "rm " + inputBase + "/*";
		String command2 = "rm -r " + outputBase + " --force";
		Runtime runtime = Runtime.getRuntime();
		try {
			String osname = System.getProperty("os.name");
			String[] args1 = null;
			String[] args2 = null;
			if (osname.equals("Linux")) {
				args1 = new String[] { "sh", "-c", command1 };
				args2 = new String[] { "sh", "-c", command2 };
			} else if (osname.substring(0, 3).equals("Win")) {
				args1 = new String[] { "cmd", "/c", command1 };
				args2 = new String[] { "cmd", "/c", command2 };
			} else {
				System.out.println("Cannot delete temp files/directory for EVAL");
				System.exit(0);
			}
			Process proc1 = runtime.exec(args1);
			Process proc2 = runtime.exec(args2);
		} catch (IOException e) {
			System.out.println("File delet execution error: " + e.getMessage());
			e.printStackTrace();
		}
	}

	/**
	 * This method creates and submits a job.
	 * 
	 * @throws Exception
	 */
	public void deploy() throws Exception {
		File file = new File(evalList);
		while (!file.exists()) {
			Thread.sleep(50);
		}
		BufferedReader in = new BufferedReader(new FileReader(file));
		String list = null;
		int typeNum = 1;

		conf = new Configuration();

		job = new Job(conf, "eval filter");
		job.setJarByClass(evalController.class);
		job.setMapperClass(evalMapper.class);
		job.setCombinerClass(evalReducer.class);
		job.setReducerClass(evalReducer.class);

		// partition and sorting methods
		job.setPartitionerClass(serializedTextPartitioner.class);
		job.setSortComparatorClass(serializedTextComparator.class);

		// the mapper output is serializedText, BytesWritable
		job.setMapOutputKeyClass(serializedText.class);
		job.setMapOutputValueClass(BytesWritable.class);

		// the reducer output is Text, BytesWritable
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(BytesWritable.class);

		job.setNumReduceTasks(numberOfReducer);

		while ((list = in.readLine()) != null) {
			String fileName = inputBase + File.separator + "list" + typeNum + ".txt";
			FileWriter filewriter = new FileWriter(fileName);
			filewriter.write(list);
			filewriter.flush();
			filewriter.close();
			typeNum++;

			FileInputFormat.addInputPath(job, new Path(fileName));
		}
		in.close();

		FileOutputFormat.setOutputPath(job, new Path(outputBase));
		// job.submit();
		job.waitForCompletion(false);
		deleteDir();
	}

}