/**
 * 
 */
package org.apache.hadoop.mapreduce;

import java.io.IOException;

import mapreduce4j.MapContext;
import mapreduce4j.RecordReader;
import mapreduce4j.RecordWriter;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.WritableComparable;

/**
 * The base Mapper class which does an Identity function by default.
 * @author tim
 */
public class Mapper<KEY_IN extends WritableComparable,VALUE_IN extends WritableComparable,KEY_OUT extends WritableComparable,VALUE_OUT extends WritableComparable> {
	
	/**
	 * Simple pass through (e.g. Identity function)
	 * @param key The input key
	 * @param value The input value
	 * @param context The job context
	 * @throws IOException Should the context fail to write or bad key/value types are observed
	 */
	@SuppressWarnings("unchecked")
	protected void map(KEY_IN key, VALUE_IN value, Context context) throws IOException, InterruptedException {
		context.setStatus("Key[" + key + "], Value[" + value + "]");
		context.write((KEY_OUT)key, (VALUE_OUT)value);
	}		
	
	/**
	 * Does nothing
	 */
	protected void setup(Context context) throws IOException, InterruptedException {
	}
	
	/**
	 * This package declaration is required by Hadoop
	 * @author tim
	 */
	public class Context extends MapContext<KEY_IN, VALUE_IN, KEY_OUT, VALUE_OUT> {
		 public Context(Configuration conf, RecordReader<KEY_IN,VALUE_IN> reader, RecordWriter<KEY_OUT,VALUE_OUT> writer) {
			 super(conf, reader, writer);
		 }
	}
	
	/**
	 * Runs the map over the input
	 */
	public void run(Context context) throws IOException, InterruptedException {
		setup(context);
		while (context.nextKeyValue()) {
			map(context.getCurrentKey(), context.getCurrentValue(), context);
		}
		context.close();
	}
}