package cgl.hadoopsensorgrid.sensorgrid.eval;

import java.lang.InterruptedException;
import java.io.IOException;
import java.io.File;
import java.io.FileWriter;
import java.io.InputStream;
import java.util.Hashtable;
import java.util.Enumeration;
import java.util.Properties;
import java.util.Calendar;

import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;

import cgl.sensorgrid.common.PropertyFile;

import cgl.hadoopsensorgrid.sensorgrid.util.*;

/***
 * 
 * The evalReducer extends the hadoop Reducer class. It is deployed by the
 * ryoController. It gets data from mapper, categorize it under the name of the
 * station, and save it into files.
 * 
 * @author Chao Sun
 * 
 */
public class evalReducer extends
		Reducer<serializedText, BytesWritable, Text, BytesWritable> {
	private Hashtable sitesTable;
	private Properties properties;
	private String dataDir = null;
	private String histDir = null;
	private String evalDir = null;
	private boolean isHistSaved = false;

	/***
	 * 
	 * Save ASCII data in a file under a directory named after the site name.
	 * 
	 * @author Chao Sun
	 * 
	 */
	class asciiProcessor {
		private String siteName;
		private String modelFilePrefix;
		private String siteModelDir;
		private FileWriter fwModel;

		/**
		 * Constructor. Takes parameters to initialize variables.
		 */
		public asciiProcessor(String site, String modelParentDir) {
			this.siteName = site;
			this.modelFilePrefix = siteName + "_ASCII";
			this.fwModel = null;
			this.siteModelDir = modelParentDir + File.separator + siteName
					+ File.separator;

			File fDir = new File(siteModelDir);
			if (!fDir.exists()) {
				fDir.mkdirs();
			}
		}

		/**
		 * Append a line of data in the file.
		 */
		public void addLine(String line) {
			try {
				if (fwModel == null) {
					fwModel = new FileWriter(siteModelDir + modelFilePrefix + ".xyz",
							true);
				}
				fwModel.write(line);
			} catch (Exception e) {
				e.printStackTrace();
			}
		}

		/**
		 * Write the lines to file.
		 */
		public void writeFile() throws IOException {
			if (fwModel != null) {
				fwModel.flush();
				fwModel.close();
			}
		}

	}

	/***
	 * 
	 * Save POS data in a file under a directory named after the site name.
	 * Evaluate data periodically.
	 * 
	 * @author Chao Sun
	 * 
	 */
	class posProcessor {
		private String siteName;
		private String modelFilePrefix;
		private String siteModelDir;
		private String siteHistDir;
		private String siteEvalDir;
		private FileWriter fwModel;
		private FileWriter fwHist;
		private FileWriter fwEval;
		private Calendar calLastHistData;

		/**
		 * Constructor. Takes parameters to initialize variables.
		 */
		public posProcessor(String site, String modelParentDir,
				String histParentDir, String evalParentDir) {
			this.siteName = site;
			this.modelFilePrefix = siteName + "_POS_model";
			this.fwModel = null;
			this.siteModelDir = modelParentDir + File.separator + siteName
					+ File.separator;
			this.siteHistDir = histParentDir + File.separator + siteName
					+ File.separator;
			this.siteEvalDir = evalParentDir + File.separator + siteName
					+ File.separator;

			File fDir = new File(siteModelDir);
			if (!fDir.exists()) {
				fDir.mkdirs();
			}
			if (isHistSaved) {
				fDir = new File(siteHistDir);
				if (!fDir.exists()) {
					fDir.mkdirs();
				}
			}
			fDir = new File(siteEvalDir);
			if (!fDir.exists()) {
				fDir.mkdirs();
			}

		}

		/**
		 * Append a line of data in the files.
		 */
		public void addLine(String line) {
			// save history data
			if (isHistSaved) {
				addHistData(line);
			}
			// save model and eval data
			try {
				if (fwModel == null) {
					// record the starting time
					String modelFileName = siteModelDir + modelFilePrefix + ".xyz";
					File xyzFile = new File(modelFileName);
					if (!xyzFile.exists()) {
						FileWriter timeWriter = new FileWriter(siteModelDir + siteName
								+ "_input_start_milli", false);
						long modelInputStartMilli = System.currentTimeMillis();
						timeWriter.write(Long.toString(modelInputStartMilli));
						timeWriter.flush();
						timeWriter.close();
						// System.out.println(" Recorded starting time of " + siteName);
					}
					fwModel = new FileWriter(modelFileName, true);
				}
				if (fwEval == null) {
					fwEval = new FileWriter(siteEvalDir + siteName + ".xyz", true);
				}
				fwModel.write(line);
				fwEval.write(line);
			} catch (Exception e) {
				e.printStackTrace();
			}
		}

		/**
		 * Write the lines to files.
		 */
		public void writeFile() throws IOException {
			if (fwModel != null) {
				fwModel.flush();
				fwModel.close();
				fwEval.flush();
				fwEval.close();
			}
		}

		/**
		 * Append a line of data in the history data file.
		 */
		protected void addHistData(String line) {
			Calendar now = Calendar.getInstance();
			if (calLastHistData == null
					|| fwHist == null
					|| now.get(Calendar.YEAR) > calLastHistData.get(Calendar.YEAR)
					|| now.get(Calendar.MONTH) > calLastHistData.get(Calendar.MONTH)
					|| now.get(Calendar.DAY_OF_MONTH) > calLastHistData
							.get(Calendar.DAY_OF_MONTH)) {
				try {
					if (fwHist != null) {
						fwHist.flush();
						fwHist.close();
					}
					String histFileDir = siteHistDir + now.get(Calendar.YEAR)
							+ File.separator + (now.get(Calendar.MONTH) + 1) + File.separator;
					File fDir = new File(histFileDir);
					if (!fDir.exists())
						fDir.mkdirs();
					String histFilePath = histFileDir + siteName + "_"
							+ getDateString(now, false) + ".dat";
					fwHist = new FileWriter(histFilePath, true);
				} catch (Exception e) {
					e.printStackTrace();
				}
			}

			try {
				fwHist.write(line);
			} catch (Exception e) {
				e.printStackTrace();
			}
			calLastHistData = now;
		}

		/**
		 * Convert current day time to string.
		 */
		private String getDateString(Calendar date, boolean timeWanted) {
			StringBuffer ret = new StringBuffer();
			ret.append(date.get(Calendar.YEAR)).append('-').append(
					date.get(Calendar.MONTH) + 1);
			ret.append('-').append(date.get(Calendar.DAY_OF_MONTH));
			if (timeWanted) {
				ret.append(' ').append(date.get(Calendar.HOUR_OF_DAY)).append(':');
				ret.append(date.get(Calendar.MINUTE)).append(':').append(
						date.get(Calendar.SECOND));
			}
			return ret.toString();
		}

	}

	/***
	 * 
	 * Create a processor for every site, each processor containing processors for
	 * multiple data types.
	 * 
	 * @author Chao Sun
	 * 
	 */
	class Processor {
		private asciiProcessor asciiPro;
		private posProcessor posPro;

		/**
		 * Constructor. Initialize variables.
		 */
		public Processor(String site, String dataDir, String histDir, String evalDir) {
			this.asciiPro = new asciiProcessor(site, dataDir);
			this.posPro = new posProcessor(site, dataDir, histDir, evalDir);
		}

		/**
		 * Call the processing method in the relavent processor due to data type.
		 */
		public void process(String type, String data) {
			if (type.equals("ASCII")) {
				asciiPro.addLine(data);
			} else if (type.equals("POS")) {
				posPro.addLine(data);
			} else {
				System.out.println("Data type error. Exit");
				System.exit(0);
			}
		}

		/**
		 * Write the lines to file.
		 */
		public void writeFile() throws IOException {
			asciiPro.writeFile();
			posPro.writeFile();
		}

	}

	/**
	 * Load properties from a given file.
	 */
	private void loadProperties() {
		if (properties != null) {
			return;
		}

		try {
			properties = PropertyFile.loadProperties("hadoopsensorgrid.properties");
			dataDir = properties.getProperty("rdahmm.data.directory");
			histDir = properties.getProperty("rdahmm.hist.directory");
			evalDir = properties.getProperty("rdahmm.eval.directory");
			isHistSaved = Boolean.parseBoolean(properties
					.getProperty("rdahmm.save.hist"));
		} catch (Exception e) {
			e.printStackTrace();
		}

		if (properties == null || dataDir == null || evalDir == null
				|| histDir == null) {
			System.err.println("Fatal Error: failed to load properties!");
			System.exit(-1);
		}
	}

	/**
	 * Override the setup method. Load properties from property file.
	 * 
	 * @param context
	 *          context of this evalReducer object
	 */
	protected void setup(Context context) {
		sitesTable = new Hashtable<String, Processor>();
		loadProperties();
		System.out.println("Eval reducer started");
	}

	/**
	 * This method overrides the reduce() method of class Reducer. It gets data
	 * passed from evalMapper and passes it to a processor.
	 * 
	 * @param key
	 *          contains the data type and station name
	 * @param values
	 *          data contents
	 * @param context
	 *          context of this evalReducer object
	 * 
	 * @throws InterruptedException
	 * 
	 * @throws IOException
	 */
	protected void reduce(serializedText key, Iterable<BytesWritable> values,
			Context context) throws IOException, InterruptedException {
		for (BytesWritable val : values) {
			String site = key.toString();
			String type = key.secToString();
			String data = new String(val.getBytes(), 0, val.getLength()) + "\n";

			if (sitesTable.containsKey(site)) {
				Processor processor = (Processor) sitesTable.get(site);
				processor.process(type, data);
			} else {
				Processor processor = new Processor(site, dataDir, histDir, evalDir);
				sitesTable.put(site, processor);
				processor.process(type, data);
			}
		}
	}

	/**
	 * Override the cleanup method. Write data to files.
	 * 
	 * @param context
	 *          context of this evalReducer object
	 * 
	 * @throws InterruptedException
	 * 
	 * @throws IOException
	 */
	protected void cleanup(Context context) throws IOException,
			InterruptedException {
		Enumeration e = sitesTable.elements();
		while (e.hasMoreElements()) {
			Processor processor = (Processor) e.nextElement();
			processor.writeFile();
		}
	}

}