package cgl.hadoopsensorgrid.sensorgrid.ryotoeval;

import java.lang.InterruptedException;
import java.io.IOException;
import java.io.File;
import java.io.FileWriter;
import java.io.ObjectOutputStream;
import java.nio.ByteBuffer;
import java.net.Socket;
import java.net.ServerSocket;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.StringTokenizer;
import java.util.Properties;

import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;

import cgl.sensorgrid.common.PropertyFile;

import cgl.hadoopsensorgrid.sensorgrid.util.*;

/***
 * 
 * The ryoReducer extends the hadoop Reducer class. It is deployed by the
 * ryoController. It is used for getting data from the ryoMapper, categorizing
 * it into types of ASCII and POS etc. and saving them in temp files.
 * 
 * @author Chao Sun
 * 
 */
public class ryoReducer extends
		Reducer<serializedText, BytesWritable, Text, BytesWritable> {
	private Properties properties;
	private String asciiFile;
	private String posFile;

	/**
	 * Override the setup method.
	 * 
	 * @param context
	 *          context of this ryoReducer object
	 */
	protected void setup(Context context) {
		loadProperties();
		System.out.println("RYO reducer started");
	}

	/**
	 * This method overrides the reduce() method of class Reducer. It gets data
	 * passed from ryoMapper, categorize it and saves them into files.
	 * 
	 * @param key
	 *          contains the data type
	 * @param values
	 *          contents of the data
	 * @param context
	 *          context of this ryoReducer object
	 * 
	 * @throws InterruptedException
	 * 
	 * @throws IOException
	 */
	protected void reduce(serializedText key, Iterable<BytesWritable> values,
			Context context) throws IOException, InterruptedException {
		for (BytesWritable val : values) {
			String type = key.toString();
			String data = new String(val.getBytes(), 0, val.getLength());

			// write data to temp files
			if (type.equals("ASCII")) {
				FileWriter asciiFilewriter = new FileWriter(asciiFile, true);
				asciiFilewriter.write(data);
				asciiFilewriter.flush();
				asciiFilewriter.close();
			} else if (type.equals("POS")) {
				FileWriter posFilewriter = new FileWriter(posFile, true);
				posFilewriter.write(data);
				posFilewriter.flush();
				posFilewriter.close();
			} else {
				System.out.println("Data type error. Exit");
				System.exit(0);
			}
		}
	}

	/**
	 * Load properties from a given file.
	 */
	private void loadProperties() {
		try {
			properties = PropertyFile.loadProperties("hadoopsensorgrid.properties");
			String evalInputBase = properties.getProperty("hadoop.eval.inputbase");
			if (properties == null || evalInputBase == null) {
				System.err.println("Error: evalReducer failed to load properties!");
				System.exit(-1);
			}
			asciiFile = evalInputBase + File.separator + "ascii.txt";
			posFile = evalInputBase + File.separator + "pos.txt";
		} catch (Exception e) {
			e.printStackTrace();
		}
	}
	
}