package MREngine;

import x10.util.*;

public class MapReduceEnginePar[K, V] {

	static val Meg = 1000*1000;
	
	/** the raw data to be processed */
	val data:MREData;
	
	/** number of asyncs */
	val numAsync:Int;
	
	/** function pointers of split, map, reduce, and print functions */
	private var split_func: (data:MREData) => void;
	private var map_func: (data:MREData) => void;
	private var reduce_func: (key:K, value:ArrayList[MREEntry[K, V]]) => void;
	
	/** the list of splitted datablocks */
	private var split_list: ArrayList[MREData];
	
	/** 
	 * Intermediate array to save the emit_intermediate mapping results.
	 * Each element in the array is a list that saves the result from one
	 * mapping async. 
	 * Array Size: numAsync.  
	 */
	private var intermediate_array:Array[ArrayList[MREEntry[K, V]]];
	
	/** 
	 * The array to save the shuffled results for each assync.
	 * Each element in the array is a hash map saves the result from one
	 * shuffling async. 
	 * Array Size: numAsync.  
	 */
	private var shuffled_intermediate:Array[HashMap[K, ArrayList[MREEntry[K, V]]]];
	
	/**
	 * The hashmap to save the result of shuffle.
	 * HashMap Size: number of K.
	 */
	private var shuffled_hashmap:HashMap[K,ArrayList[MREEntry[K, V]]];
	
	/** The array to hold the list of each K for reducing.
	 * Array Size: number of K.
	 * init before use.
	 */
	private var shuffled_array:Array[ArrayList[MREEntry[K, V]]];
	
	/** the list of final results
	 * ArrayList Size: number of K.
	 */
	private var final_list: ArrayList[MREEntry[K, V]];
	
	private var final_array: Array[HashMap[K, MREEntry[K, V]]];
	
	private var workerId_array: Array[Int];
	
	private var ready:Boolean;
	
	/**
	 * The constructor function
	 * init_data: the initial data block to do the map-reduce
	 * _numAsync: the number of asyncs available
	 */
	public def this (_init_data:MREData, _numAsync: Int)
	{
		data = _init_data;
		numAsync = _numAsync;
		ready = false;
	}
	
	private def initialize(): void
	{	
		// initialize the split_list
		split_list = new ArrayList[MREData]();
		
		// initialize the map intermediates
		intermediate_array = new Array[ArrayList[MREEntry[K,V]]](numAsync);
		
		// init the shuffled intermediates
		shuffled_intermediate = new Array[HashMap[K, ArrayList[MREEntry[K,V]]]](numAsync);
		
		// init the shuffled hashmap
		shuffled_hashmap = new HashMap[K, ArrayList[MREEntry[K, V]]]();
		
		final_array = new Array[HashMap[K, MREEntry[K, V]]](numAsync);
		
		for (i in 0..(numAsync-1))
		{
			intermediate_array(i) = new ArrayList[MREEntry[K,V]]();
			shuffled_intermediate(i) = new HashMap[K, ArrayList[MREEntry[K,V]]]();
			final_array(i) = new HashMap[K, MREEntry[K, V]]();
		}
		
		// init the final_list
		final_list = new ArrayList[MREEntry[K, V]]();
		
		// array of worker ids. Used to map the task to a worker.
		workerId_array = new Array[Int](numAsync, Int.MIN_VALUE);
		
		ready = true;	
	}

	private def clear(): void
	{
		split_list = null;
		intermediate_array = null;
		shuffled_intermediate = null;
		shuffled_hashmap = null;
		shuffled_array = null;
		workerId_array = null;
		ready = false;
	}
	
	private def shuffle (_entry:MREEntry[K, V], _label:Int): void
	{
		//Console.OUT.println("DEBUG: _label = " + _label);
		//Console.OUT.println("DEBUG: _entry.key = " + _entry.getKey());
		var tmpBox:Box[ArrayList[MREEntry[K, V]]];
		tmpBox = shuffled_intermediate(_label).get(_entry.getKey());
		var tmpArrList:ArrayList[MREEntry[K, V]];
		
		if (tmpBox == null)		// if not found in hash map, create a new ArrayList
		{
			tmpArrList = new ArrayList[MREEntry[K, V]]();
			shuffled_intermediate(_label).put(_entry.getKey(), tmpArrList);
		}
		else
			tmpArrList = tmpBox.value;
		
		tmpArrList.add(_entry);
		
	}

	private def shuffle_merge_helper(): HashMap[K,ArrayList[MREEntry[K,V]]]
	{
		val total = shuffled_intermediate.size;
		var gap:Int = 1;
		var factor:Int = 2*gap;
		var numWorker:Int = (total+1) / 2;
		
		while (gap < total)
		{
			finish for (worker_i in (0..(numWorker-1)))
			async{
				val cmt_point = worker_i * factor;
				val add_point = cmt_point + gap;
				
				if (add_point < total)
				{
					var set:Set[Map.Entry[K,ArrayList[MREEntry[K,V]]]] = shuffled_intermediate(add_point).entries();
					var i:Iterator[Map.Entry[K,ArrayList[MREEntry[K,V]]]] = set.iterator();
					while(i.hasNext())
					{
						var tmpEnt:Map.Entry[K,ArrayList[MREEntry[K,V]]] = i.next();
						var key:K = tmpEnt.getKey();
						var list:ArrayList[MREEntry[K, V]]= tmpEnt.getValue();
						
						if (shuffled_intermediate(cmt_point).get(key) != null)
						{
							var tmpArr:ArrayList[MREEntry[K, V]]= shuffled_intermediate(cmt_point).get(key).value;
							tmpArr.addAll(list);
						}
						else
						{
							shuffled_intermediate(cmt_point).put(key, list);
						}
					}
				}
			}
			
			gap = (gap << 1);
			factor = 2 * gap;
			numWorker = (numWorker+1) / 2;
		}
		
		return shuffled_intermediate(0);
	}
	
	/** 
	 * Combine the results 
	 * from shuffled_intermediate to shuffled_hashmap;
	 * then traverse the shuffled_hashmap and put enties
	 * to shffled_array.
	 * 
	 * These two things should have been initialized 
	 * before calling shuffle_merge()
	 */
	private def shuffle_merge()
	{
		shuffled_hashmap = shuffle_merge_helper();
		
		/*
		var size:Int = shuffled_intermediate.size;
		for(index in 0..(size-1))
		{
			 var set:Set[Map.Entry[K,ArrayList[MREEntry[K,V]]]] = shuffled_intermediate(index).entries();
			 var i:Iterator[Map.Entry[K,ArrayList[MREEntry[K,V]]]] = set.iterator();
			 while(i.hasNext())
			 {
				 var tmpEnt:Map.Entry[K,ArrayList[MREEntry[K,V]]]=i.next();
				 var key:K = tmpEnt.getKey();
				 var value:ArrayList[MREEntry[K, V]]= tmpEnt.getValue();
				 
				 if (shuffled_hashmap.get(key) != null)
				 {
					 var tmpArr:ArrayList[MREEntry[K, V]]= shuffled_hashmap.get(key).value;
					 tmpArr.addAll(value);
				 }
				 else
				 {
					 shuffled_hashmap.put(key,value);
				 }
			 }
		}
		*/
		
		/* traverse the shuffled_hashmap and put enties
		 * to shffled_array. */
		var set:Set[Map.Entry[K,ArrayList[MREEntry[K,V]]]] = shuffled_hashmap.entries();
		i:Iterator[Map.Entry[K,ArrayList[MREEntry[K,V]]]] = set.iterator();
		shuffled_array = new Array[ArrayList[MREEntry[K,V]]](set.size());
		
		var count:Int = 0;
		while(i.hasNext())
		{
			var value:ArrayList[MREEntry[K, V]]= i.next().getValue();
			shuffled_array(count) = value;
			count++;
		}
	}
	
	private def final_merge()
	{
		/* Merge the final result 
		 * from final_hashmap to final_list.
		 */
		var set:Set[Map.Entry[K, MREEntry[K,V]]];
		
		for (j in (0..(final_array.size-1)))
		{
			set = final_array(j).entries();
			val i:Iterator[Map.Entry[K, MREEntry[K,V]]] = set.iterator();
			while(i.hasNext())
			{
				var value:MREEntry[K, V]= i.next().getValue();
				final_list.add(value);
			}
		}
		
	}
	
	/**
	 * Called by the user's split function.
	 * Add a split of data to the data split list.
	 */
	public def EmitSplit (_data: MREData): void
	{
		split_list.add (_data);
	}
	
	/**
	 * Called by the user's map function.
	 * Add a mapped (K V) pair to the intermediate list.
	 * _label: the index of MREData in split list
	 *         where this (_key, _value) is found. 
	 */
	public def EmitIntermediate (_key:K, _value:V)//, _label:Int): void
	{
		val new_pair = new MREEntry[K,V](_key, _value);
		for (i in (0..(numAsync-1)))
		{
			if (workerId_array(i) == Runtime.workerId())
			{
				intermediate_array(i).add(new_pair);
				break;
			}
		}
		//val ret = intermediate_array(_label).add(new_pair);
		//Console.OUT.println("DEBUG _key= "+_key +"; value= " + _value + "; add = " + ret);
	}
	
	/**
	 * Called by the user's reduce function.
	 * Add a final (K V) pair to the final list.
	 */
	public def EmitFinal (_key:K, _value:V): void
	{
		// Using atomic
		//val new_final = new MREEntry[K,V](_key, _value);
		//atomic final_hashmap.put(_key, new_final);
		
		// Without atomic
		for (i in (0..(numAsync-1)))
		{
			if (workerId_array(i) == Runtime.workerId())
			{
				final_array(i).put(_key, new MREEntry[K,V](_key, _value));
				break;
			}
		}
	}
	
	/**
	 * Return the ArrayList of the final result
	 * of (K, V) pairs.
	 */
	public def GetResults (): ArrayList[MREEntry[K,V]]
	{
		return final_list;
	}
	
	/**
	 * The set functions for three user-defined functions, 
	 * split, map, and reduce.
	 */
	public def SetSplitFunc (_func: (data:MREData) => void): void
	{
		split_func = _func;
	}
	
	public def SetMapFunc (_func: (data:MREData) => void): void
	{
		map_func = _func;
	}
	
	public def SetReduceFunc (_func: (key:K, value:ArrayList[MREEntry[K, V]]) => void): void
	{
		reduce_func = _func;
	}
	
	/**
	 * Return the number of Keys in 
	 * the final result.
	 */
	public def NumberOfKey (): Int
	{
		return final_list.size();
	}
	
	/** 
	 * The parallel version of the actual scheduler.
	 */
	public def MapReduceSchedulerPar()//{V haszero}: void
	{
		var lastStop:Long = System.nanoTime();
		val start_time = lastStop;
		
		while (!ready)
			initialize();
		
		if (data == null)
			return;
		
		// split the raw data
		split_func (data);
		
		val split_time = System.nanoTime() - lastStop;
		lastStop += split_time;
		//Console.OUT.println("DEBUG: split time: " + split_time/Meg);
		
		//Console.OUT.println("DEBUG split_list size: " + split_list.size());

		/* map the data in split_list to (K, V) pairs,
		 * and shuffle the (K, V) pairs to shuffled_intermediate array */
		
		// compute chunks 
		var numTask:Int = split_list.size() / numAsync;
		
		//Console.OUT.println("DEBUG numTask = " + numTask);
		
		lastStop = System.nanoTime();
		
		finish for (i_mapper in (0..(numAsync-1)))
		{
			val start:Int = numTask * i_mapper;
			val end:Int = (i_mapper == numAsync-1) ? (split_list.size()-1) : start + numTask -1;
			// distribute to mapper
			async 
			{
				workerId_array(i_mapper) = Runtime.workerId();
				// mapping
				for (i in (start..end))
				{
					//Console.OUT.println("DEBUG:mapper[" + i_mapper + "]: i = ["+ i +"]");
					val db:MREData = split_list.get(i);
					//db.setLabel(i_mapper);
					map_func (db);
				}
				//Console.OUT.println("DEBUG:mapper[" + i_mapper + "]:\tintermediate_array("+ i_mapper + ")\tsize: " + intermediate_array(i_mapper).size());
				
				// shuffling
				// shuffle the (K V) pairs
				val mapping_result:ArrayList[MREEntry[K, V]] = intermediate_array(i_mapper);
				for (j in (0..(mapping_result.size()-1)))
				{
					shuffle (mapping_result.get(j), i_mapper);
				}
				
				//Console.OUT.println("DEBUG:mapper[" + i_mapper + "]:\tshuffled_intermediate\tsize: " + shuffled_intermediate(i_mapper).size());
			}
		}
		
		val map_shuf_time = System.nanoTime() - lastStop;
		lastStop += map_shuf_time;
		//Console.OUT.println("DEBUG: map and shuffle time: " + map_shuf_time/Meg);
		
		
		/* Merge the shuffle results.
		 */
		shuffle_merge();
		
		val shuf_reorg_time = System.nanoTime() - lastStop;
		lastStop += shuf_reorg_time;
		//Console.OUT.println("DEBUG: shuffle reorganizing time: " + shuf_reorg_time/Meg);
		
		//Console.OUT.println("DEBUG: shuffled_array size: " + shuffled_array.size);
		
		/* Do the reduceing.
		 */
		
		workerId_array.fill(Int.MIN_VALUE);
		
		numTask = shuffled_array.size / numAsync;
		finish for (i_reducer in (0..(numAsync-1)))
		{
			val start:Int = numTask * i_reducer;
			val end:Int = (i_reducer == numAsync-1) ? (shuffled_array.size-1) : start + numTask - 1;
			
			// distribute to reducer
			async 
			{
				workerId_array(i_reducer) = Runtime.workerId();
				
				for (i:Int in (start..end))
				{
					// Console.OUT.println("DEBUG:reducer[" + i_reducer + "]: iteration: " + i);
					val key:K = shuffled_array(i).get(0).getKey();
					reduce_func (key, shuffled_array(i));
				}
			}
		}

		val reduce_time = System.nanoTime() - lastStop;
		lastStop += reduce_time;
		//Console.OUT.println("DEBUG: reducing time: " + reduce_time/Meg);
		
		final_merge();
		
		val total_time = lastStop - start_time;
		
		// output the time statistics
		Console.OUT.println("MRE Par: spliting time: " + split_time/Meg + " [" 
				+ String.format("%.2f", new Array[Any](1, (100 *split_time as Float /total_time))) + "%]");
		Console.OUT.println("MRE Par: map and shuffle time: " + map_shuf_time/Meg + " [" 
				+ String.format("%.2f", new Array[Any](1, (100*map_shuf_time as Float /total_time))) + "%]");
		Console.OUT.println("MRE Par: merge time: " + shuf_reorg_time/Meg + " [" 
				+ String.format("%.2f", new Array[Any](1, (100*shuf_reorg_time as Float /total_time))) + "%]");
		Console.OUT.println("MRE Par: reducing time: " + reduce_time/Meg + " [" 
				+ String.format("%.2f", new Array[Any](1, (100*reduce_time as Float /total_time))) + "%]");
		Console.OUT.println("MRE Par: TOTAL time: " + total_time/Meg);
		
		clear();
		
		//Console.OUT.println("DEBUG final_list size: " + final_list.size());
	}
}
