package cgl.hadoopsensorgrid.sensorgrid.ryotoeval;

import java.io.BufferedReader;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.File;
import java.util.Properties;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import cgl.sensorgrid.common.PropertyFile;

import cgl.hadoopsensorgrid.sensorgrid.util.*;

/**
 * 
 * ryoController class takes the arguments from the mainController, and it
 * creates a job for getting and processing the RYO data. It is started and
 * stopped periodically with a TIMEOUT value set in the rtdMapper.
 * 
 * @author Chao Sun
 * 
 */
public class ryoController {
	private Properties properties;
	private Configuration conf;
	private Job job;
	private int numberOfReducer = 2;
	private String ryoSources = null;
	private String inputBase = null;
	private String outputBase = null;

	/**
	 * Constructor.
	 * 
	 * @param sources
	 *          path of a file which contains all the RYO data addresses/ports
	 * @param num
	 *          number of reducers
	 * @param outpath
	 *          temp directory for the outputs of reducers
	 */
	public ryoController(String sources, int num, String outpath) {
		loadProperties();
		
		if (sources == null || sources.equals("")) {
			ryoSources = inputBase + File.separator + "ryosources.txt";
		} else {
			ryoSources = sources;
		}
		System.out.println("ryoSources: " + ryoSources);
		
		if (num != 0) {
			numberOfReducer = num;
		}
		
		if (outpath != null && !outpath.equals("")) {
			outputBase = outpath;
		}
		System.out.println("outputBase: " + outputBase);
	}

	/**
	 * Load properties from a given file.
	 */
	private void loadProperties() {
		try {
			properties = PropertyFile.loadProperties("hadoopsensorgrid.properties");
			inputBase = properties.getProperty("hadoop.ryo.inputbase");
			outputBase = properties.getProperty("hadoop.ryo.outputbase");
		} catch (Exception e) {
			e.printStackTrace();
		}

		if (properties == null || inputBase == null || outputBase == null) {
			System.err.println("Error: ryoController failed to load properties!");
			System.exit(-1);
		}
	}

	/**
	 * Delete the output directory before starting another RYO job.
	 */
	private void deleteDir() {
		System.out.println("Deleting RYO temp directory");
		String command = "rm -r " + outputBase + " --force";
		Runtime runtime = Runtime.getRuntime();
		try {
			String osname = System.getProperty("os.name");
			String[] args = null;
			if (osname.equals("Linux")) {
				args = new String[] { "sh", "-c", command };
			} else if (osname.substring(0, 3).equals("Win")) {
				args = new String[] { "cmd", "/c", command };
			} else {
				System.out.println("Cannot delete temp directory for RYO");
				System.exit(0);
			}
			Process proc = runtime.exec(args);
		} catch (IOException e) {
			System.out.println("Exec error: " + e.getMessage());
			e.printStackTrace();
		}
	}

	/**
	 * This method creates and submits a job.
	 * 
	 * @throws Exception
	 */
	public void deploy() throws Exception {
		File file = new File(ryoSources);
		while (!file.exists()) {
			Thread.sleep(50);
		}
		BufferedReader in = new BufferedReader(new FileReader(file));
		String source = null;
		int sourceNum = 1;

		conf = new Configuration();

		job = new Job(conf, "ryo filter");
		job.setJarByClass(ryoController.class);
		job.setMapperClass(ryoMapper.class);
		job.setCombinerClass(ryoReducer.class);
		job.setReducerClass(ryoReducer.class);

		// partition and sorting methods
		job.setPartitionerClass(serializedTextPartitioner.class);
		job.setSortComparatorClass(serializedTextComparator.class);

		// the mapper output is (serializedText, BytesWritable)
		job.setMapOutputKeyClass(serializedText.class);
		job.setMapOutputValueClass(BytesWritable.class);

		// the reducer output is (Text, BytesWritable)
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(BytesWritable.class);

		job.setNumReduceTasks(numberOfReducer);

		while ((source = in.readLine()) != null) {
			String fileName = inputBase + File.separator + "source" + sourceNum
					+ ".txt";
			FileWriter filewriter = new FileWriter(fileName);
			filewriter.write(source);
			filewriter.flush();
			filewriter.close();
			sourceNum++;

			FileInputFormat.addInputPath(job, new Path(fileName));
		}
		in.close();

		FileOutputFormat.setOutputPath(job, new Path(outputBase));
		job.waitForCompletion(false);
		deleteDir();
	}

	/**
	 * main method for testing.
	 * 
	 * @param args
	 *          command line string array
	 */
	public static void main(String args[]) {
		if (args.length < 3) {
			System.err.println("Arguments missing. Usage: "
					+ "<ryoSourceList> <ryoReducerNum> <ryoOutPath>");
			System.exit(1);
		}

		// read the args
		String ryoSourceList = args[0];
		int ryoReducerNum = Integer.parseInt(args[1]);
		String ryoOutPath = args[2];

		// start the ryo job
		try {
			ryoController ryo = new ryoController(ryoSourceList, ryoReducerNum,
					ryoOutPath);
			ryo.deploy();
		} catch (Exception ryoE) {
			System.err.println("RYO controller deployment error.");
			ryoE.printStackTrace();
		}
	}

}