package cgl.hadoopsensorgrid.sensorgrid.rtdtohadoop;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Properties;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import cgl.hadoopsensorgrid.sensorgrid.util.executeCommand;
import cgl.hadoopsensorgrid.sensorgrid.util.PropertyFile;

/**
 * 
 * rtdController class takes the arguments from the mainController, and it
 * creates a job for getting and passing the RYO data.
 * 
 * @author Chao Sun
 * 
 */
public class rtdController {
	private Properties properties;
	private Configuration conf;
	private Job job;
	private String networkList = null;
	private String fileBase = null;
	private String inputBase = null;
	private String outputBase = null;
	private String hdfsScript = null;
	private String ryoInputBase = null;

	/**
	 * Constructor.
	 * 
	 * @param networks
	 *          name of a file which contains all the network addresses and ports
	 * @param outpath
	 *          temp directory for the outputs of reducers
	 *          
	 * @throws IOException
	 */
	public rtdController(String networks, String outpath) throws IOException {
		loadProperties();

		if (networks == null || networks.equals("")) {
			networkList = fileBase + File.separator + "networks.txt";
		} else {
			networkList = networks;
		}
		File fileDir = new File(networkList);
		if (!fileDir.exists()) {
			System.err.println("Error: No network list");
			System.exit(-1);
		}
		System.out.println("networkList: " + networkList);

		if (outpath != null && !outpath.equals("")) {
			outputBase = outpath;
		}
		System.out.println("outputBase: " + outputBase);

		deleteDir();
		makeRyoInputDir();
	}

	/**
	 * Load properties from a given file.
	 */
	private void loadProperties() {
		try {
			properties = PropertyFile.loadProperties("hadoopsensorgrid.properties");
			fileBase = properties.getProperty("hadoop.rtd.inputbase");
			inputBase = properties.getProperty("hdfs.rtd.inputbase");
			outputBase = properties.getProperty("hdfs.rtd.outputbase");
			hdfsScript = properties.getProperty("hdfs.hadoop.script");
			ryoInputBase = properties.getProperty("hdfs.ryo.inputbase");
		} catch (Exception e) {
			e.printStackTrace();
		}

		if (properties == null || inputBase == null || outputBase == null
				|| fileBase == null || hdfsScript == null || ryoInputBase == null) {
			System.err.println("Error: rtdController failed to load properties!");
			System.exit(-1);
		}
	}
	
	/**
	 * Delete the RTD output directory and RYO, EVAL input directories left by the
	 * last run.
	 * 
	 * @throws IOException
	 */
	private void deleteDir() throws IOException {
		System.out.println("Deleting " + outputBase + " and " + ryoInputBase);
		String command = hdfsScript + " -rmr " + outputBase + " " + ryoInputBase;
		executeCommand.execute(command);
	}
	
	/**
	 * Create a directory in HDFS.
	 * 
	 * @param hdfsScript
	 *          Hadoop script path
	 * @param ryoInputBase
	 *          Path of the directory to be created
	 * 
	 * @throws IOException
	 */
	private void makeRyoInputDir()
			throws IOException {
		System.out.println("Creating " + ryoInputBase);
		String command = hdfsScript + " -mkdir " + ryoInputBase;
		executeCommand.execute(command);
	}

	/**
	 * This method creates and submits a job.
	 * 
	 * @throws Exception
	 */
	public void deploy() throws Exception {
		conf = new Configuration();

		job = new Job(conf, "rtd filter");
		job.setJarByClass(rtdController.class);
		job.setMapperClass(rtdMapper.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(BytesWritable.class);

		BufferedReader in = new BufferedReader(new FileReader(networkList));
		String network = null;
		int networkNum = 1;

		while ((network = in.readLine()) != null) {
			String fileName = "network" + networkNum + ".txt";
			String filePath = fileBase + File.separator + fileName;
			FileWriter filewriter = new FileWriter(filePath);
			filewriter.write(network);
			filewriter.flush();
			filewriter.close();
			networkNum++;

			String command = hdfsScript + " -put " + filePath + " " + inputBase
					+ fileName;
			executeCommand.execute(command);
			System.out.println("Putting " + filePath + " to " + inputBase);
			Thread.sleep(30000);
			FileInputFormat.addInputPath(job, new Path(inputBase + fileName));
			System.out.println("Adding input " + inputBase + fileName);
		}
		in.close();

		FileOutputFormat.setOutputPath(job, new Path(outputBase));
		// job.submit();
		job.waitForCompletion(false);
		System.out.println("Submitted the RTD job");
	}

	/**
	 * main method for testing.
	 * 
	 * @param args
	 *          command line string array
	 */
	public static void main(String args[]) {
		if (args.length < 2) {
			System.err.println("Arguments not enough. Usage: "
					+ "<rtdNetworkList> <rtdOutPath>");
			System.exit(1);
		}

		// read the args
		String rtdNetworkList = args[0];
		String rtdOutPath = args[1];

		// start the rtd job
		try {
			rtdController rtd = new rtdController(rtdNetworkList, rtdOutPath);
			rtd.deploy();
			// Thread.sleep(100000);
		} catch (Exception rtdE) {
			System.err.println("RTD controller error.");
			rtdE.printStackTrace();
		}
	}

}