package cgl.hadoopsensorgrid.sensorgrid.eval;

import java.io.File;
import java.io.FileWriter;
import java.util.Properties;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

import cgl.sensorgrid.common.PropertyFile;

/**
 * 
 * evalRunner class controls the running of evaluation programs.
 * 
 * @author Chao Sun
 * 
 */
public class evalRunner {
	private Properties properties;
	private String dataDir = null;
	private String outPath = null;
	private String[] stationList;
	private int stationNum;
	private String stationListFileBase;
	private Configuration conf;
	private Job job;

	/**
	 * Constructor.
	 */
	public evalRunner() {
		loadProperties();

		try {
			readStationList();
			while (stationList == null || stationList.length == 0) {
				Thread.sleep(5000);
				System.out.println(" waiting for station list to be read");
				readStationList();
			}
			stationNum = stationList.length;
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
	}

	/**
	 * Load properties from a given file.
	 */
	private void loadProperties() {
		try {
			properties = PropertyFile.loadProperties("hadoopsensorgrid.properties");
			dataDir = properties.getProperty("rdahmm.data.directory");
			outPath = properties.getProperty("hadoop.rdahmm.outputbase");
			stationListFileBase = properties.getProperty("hadoop.station.directory");
		} catch (Exception e) {
			e.printStackTrace();
		}

		File stationDir = new File(stationListFileBase);
		if (!stationDir.exists()) {
			stationDir.mkdirs();
		}

		if (properties == null || dataDir == null || outPath == null
				|| stationListFileBase == null) {
			System.err.println("Fatal Error: failed to load properties!");
			System.exit(-1);
		}
	}

	/**
	 * Read the station names to String array.
	 */
	private void readStationList() throws InterruptedException {
		File dir = new File(dataDir);
		while (!dir.exists()) {
			Thread.sleep(5000);
			System.out.println(" waiting for data directory to be created");
		}
		stationList = dir.list();
	}

	/**
	 * This method creates and submits a job.
	 * 
	 * @throws Exception
	 */
	public void deploy() throws Exception {
		conf = new Configuration();
		job = new Job(conf, "eval runner");
		job.setJarByClass(evalRunner.class);
		job.setMapperClass(posEval.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(BytesWritable.class);

		for (int i = 0; i < stationNum; i++) {
			String siteName = stationList[i];
			String fileName = stationListFileBase + File.separator + siteName;
			FileWriter filewriter = new FileWriter(fileName);
			filewriter.write(siteName);
			filewriter.flush();
			filewriter.close();
			FileInputFormat.addInputPath(job, new Path(fileName));
			// System.out.println("Add evaluation input site: " + siteName);
		}
		FileOutputFormat.setOutputPath(job, new Path(outPath));
		job.submit();
	}

	/**
	 * main method.
	 */
	public static void main(String args[]) {
		try {
			evalRunner runner = new evalRunner();
			runner.deploy();
		} catch (Exception e) {
			System.err.println("Eval runner error.");
			e.printStackTrace();
		}
	}

}