import MREngine.*;
import x10.util.*;
import x10.io.File;
import x10.io.FileReader;

public class InvertDoc {
	
	/**
	 * This is a DataBlock used in WordCount application.
	 * The input for WordCount is String.
	 * The DataBlock read the document into the memory.
	 */
	static class IDDataBlock extends MREData{
		
		/** size of the string */
		private var size:Int;
		/** the document content in memory */
		private var content:String;
		
		private var indexList:ArrayList[index];
		static class index{
			 public var index:Int;
			 public var docID:Int;
		};

		public def this()
		{
			indexList = new ArrayList[index]();
		}
		// Initial
		
		public def this(var input:String)
		{
			indexList = new ArrayList[index]();
			this.size= input.length();
			this.content= input;
		}
		
		public def setContent(input:String)
		{
			this.content= input;
		}
		
		public def getContent()
		{
			return this.content;
		}
		
		public def addIndex(index:Int, docID:Int)
		{
			var idoc:index= new index();
			idoc.index = index;
			idoc.docID = docID;
			indexList.add(idoc);
		}
		
		public def getDocID(index:Int):Int
		{
			var idx:Int = 0;		
			for(i in 0..(indexList.size()-1))
			{
				if(index <= indexList.get(i).index)
					idx = indexList.get(i).docID;
			}
			
			return idx;
		}
		
		public def getSize():Int = size;
	}
	
	static class DocValue{
		public var docID:Int;
		public var freq:Int;
		def this(_docId:Int, _freq:Int)
		{
			docID = _docId;
			this.freq = _freq;
		}
	}
	
	static class Value{
		public var totalFreq:Int;
	    public var docFreq:ArrayList[DocValue];
	    def this()
	    {
	    	docFreq = new ArrayList[DocValue]();
	    	totalFreq = 0;
	    }
	    
	    def add(docId:Int, delta:Int)
	    {
	    	var it:DocValue;
	    	for (i in (0..(docFreq.size()-1)))
	    	{
	    		it = docFreq.get(i);
	    		if (it.docID == docId)
	    		{
	    			it.freq += delta;
	    			totalFreq += delta;
	    			return;
	    		}
	    	}
	    	it = new DocValue(docId, delta);
	    	docFreq.add(it);
	    	totalFreq += delta;
	    }
	    
	}
	
	/** The raw block data */
	public var data:IDDataBlock;
	/** number of parts to break the raw data into. */
	public var numParts:Int;
	/** number of async */
	public var numAsync:Int;
	/** records the ending index of each file */
	public var inputIndex:Array[Int];
	/** num of input files */
	private val numOfFile:Int;
	
	private val useBinarySearch:Boolean;
	
	/** Initial Function reference **/
	private val mres: MapReduceEngineSeq[String, Value];
	private val mrep: MapReduceEnginePar[String, Value];
	
	
	/** Constructor **/
	public def this(_input:String, _np:Int, _numAsync:Int, 
			_inputIndex:Array[Int], _numOfInput:Int, _binSearch:Boolean)
	{
		data = new IDDataBlock(_input);
		numParts = _np;
		this.inputIndex = _inputIndex;
		this.numOfFile = _numOfInput;
		this.numAsync = _numAsync;
		this.useBinarySearch = _binSearch;
		mres = new MapReduceEngineSeq[String, Value](data);
		mrep = new MapReduceEnginePar[String, Value](data, _numAsync);
	}
	
	/** set the split, map, reduce function pointer for MapReduceEngine */
	private def set_smr_func_ptr(engType:String)
	{	
		/** 
		 * Implementation of the split function
		 * data: the raw DataBlock
		 * output: void
		 */
		val _wcSplit=(data:MREData)=>
		{
			var wcdb:IDDataBlock = data as IDDataBlock;
			var content:String = wcdb.getContent();
			splitString (content, engType, inputIndex);
			return;
		};
		
		/** 
		 * Implementation of the map function 
		 * data: the DataBlock to map
		 * output: void
		 */
		val _wcMap = (data:MREData) => {	
			var wcdb:IDDataBlock = data as IDDataBlock;
			var content:String = wcdb.getContent();
			//Console.OUT.println("DEBUG: content length = " + content.length());
			var last_i:Int = 0;
			var i:Int = 0;
			var length:Int = content.length()-1;
			
			
			while (true)
			{
				var commit:Value = new Value();

				while ((i < length)			// find the start of a word
						&& !content.charAt(i).isLetter())
					++i;
				if (i == length)		// no letter at all
					return;
				
				last_i = i;		// mark the start of a word
				
				while ((i < length)			// find end of the word
						&& (content.charAt(i).isLetter()))
					++i;
				
				
				if (i == length)		// reach the end
				{
					val word = content.substring(last_i, i+1);
					
					if (check(word))
					{
						commit.add(wcdb.indexList.getLast().docID, 1);
						if(engType.equals("s"))
							mres.EmitIntermediate(word, commit);
						else
							mrep.EmitIntermediate(word, commit);
					}
					
					return;
				}
				else		// scan a word
				{
					val word = content.substring(last_i, i);
					if (check(word))
					{
						commit.add(wcdb.getDocID(i-1), 1);
						if(engType.equals("s"))
							mres.EmitIntermediate(word, commit);
						else
							mrep.EmitIntermediate(word, commit);
					}
					
					++i;
					
				}
				
			}
		};
		
		/** 
		 * Implementation of the reduce function
		 * key: the key of the list of values to be reduced
		 * valueList: the list of values to be reduced
		 */
		val _wcReduce = (key:String, valueList:ArrayList[MREngine.MREEntry[String, Value]])=>
		{
			var value:Int=0;
			val length = valueList.size();
			//Console.OUT.println("DEBUG: length = : " + length);
			var cur:Value;
			var cur_id:Int;
			var hash:HashMap[Int, Value] = new HashMap[Int, Value]();
			for(var i:Int = 0; i < length; ++i)
			{
				//Console.OUT.println("DEBUG: i = " + i);
				cur = valueList.get(i).getValue();
				cur_id = cur.docFreq.getFirst().docID;
				var tmpBox:Box[Value];
				//atomic tmpBox = shuffled_hashmap.get(_entry.getKey());
				tmpBox = hash.get(cur_id);
				var temp:Value;
				if (tmpBox == null)
					hash.put(cur_id, cur);
				else
				{
					temp = tmpBox.value;
					temp.add(cur_id, cur.docFreq.getFirst().freq);
				}
			}
			//Console.OUT.println("DEBUG: one reduce done.");
			
			
			val commit = new Value();
			var set:Set[Map.Entry[Int, Value]] = hash.entries();
			i:Iterator[Map.Entry[Int, Value]] = set.iterator();
			
			while(i.hasNext())
			{
				var tmp:Value = i.next().getValue();
				commit.totalFreq += tmp.totalFreq;
				commit.docFreq.add(tmp.docFreq.getFirst());
			}
			
			
			if(engType.equals("s"))
				mres.EmitFinal (key, commit);
			else
				mrep.EmitFinal (key, commit);
			return;
		};
		
		if(engType.equals("s"))
		{
			mres.SetSplitFunc(_wcSplit);
			mres.SetMapFunc(_wcMap);
			mres.SetReduceFunc(_wcReduce);
		}
		else{
			mrep.SetSplitFunc(_wcSplit);
			mrep.SetMapFunc(_wcMap);
			mrep.SetReduceFunc(_wcReduce);
		}
	}
	
	
	/**
	 * Split an input string according to its size and num of parts
	 */ 
	public def splitString(input:String, engType:String, inputIndex:Array[Int])
	{
		var block_size:Int = data.getSize()/numParts;
		var last_i:Int = 0;
		var i:Int = 0;
		var ch:Char;
		var index:Int=0;
		var length:Int = data.getSize()-1;

		//Console.OUT.println("DEBUG: block_size = ["+block_size+"]");
		while (true)
		{
			last_i = i;
			i += block_size;
			
			if (i >= length)
			{
				val subs = new IDDataBlock(input.substring(last_i, input.length()));
				
				while(index < numOfFile)
				{
					subs.addIndex(inputIndex(index) - last_i, index);
					index++;
				}

				//Console.OUT.println("DEBUG: subs = ["+ subs.getContent()+"]");
				if(engType.equals("s"))
					mres.EmitSplit(subs);
				else
					mrep.EmitSplit(subs);
				
				return;
			}
			
			ch = input.charAt(i);
			//Console.OUT.println("DEBUG: ch = ["+ ch +"]");
			while ((i < input.length()-1)
					&& (!ch.isWhitespace()))
			{
				//Console.OUT.println("DEBUG: ch = ["+ ch +"]");
				++i;
				ch = input.charAt(i);
			}
			
			//Console.OUT.println("DEBUG: i = ["+ i +"]");
			//Console.OUT.println("DEBUG: last_i = ["+ last_i +"]");
			//Console.OUT.println("DEBUG: input.length = ["+ input.length() +"]");
			
			if (i == length)		// reached the end
			{
				val subs = new IDDataBlock(input.substring(last_i, i+1));
				//Console.OUT.println("DEBUG: subs = ["+ subs.getContent()+"]");
				
				while(index < numOfFile)
				{
					subs.addIndex(inputIndex(index) - last_i, index);
					index++;
				}
				
				if(engType.equals("s"))
					mres.EmitSplit (subs);
				else 
					mrep.EmitSplit(subs);
				return;
			}
			else				// found a whitespace
			{
				ch = input.charAt(i);
				while ((i < length)		// find the start of the next non-whitespace
						&& (ch.isWhitespace()))
				{
					++i;
					ch = input.charAt(i);
				}
				
				//Console.OUT.println("DEBUG: i = ["+ i +"]");
				if (i == length)
				{	
					val subs = new IDDataBlock(input.substring(last_i, i));
					while(index < numOfFile)
					{
						subs.addIndex(inputIndex(index) - last_i,index);
						index++;
					}
					if(engType.equals("s"))
						mres.EmitSplit (subs);
					else
						mrep.EmitSplit(subs);
					
					return;
				}
				//Console.OUT.println("DEBUG: before subs = []");
				val subs = new IDDataBlock(input.substring(last_i, i));
				
				while(i > inputIndex(index))
				{
					subs.addIndex(inputIndex(index) - last_i, index);
					index++;
				}
				subs.addIndex(i - last_i - 1, index);
				
				//Console.OUT.println("DEBUG: subs = ["+ subs.getContent()+"]");
				if(engType.equals("s"))
					mres.EmitSplit (subs);
				else
					mrep.EmitSplit(subs);
				//++i;
			}
		}
	}
	
	/** Make a sample text input from a file */
	public static def makeInput(var dirname:String, var inputIndex:Array[Int], numOfFile:Int) 
	{
		var filename:String;
		var input:String="";
		var index:Int = 0;
		
		Console.OUT.println( "Reading " + numOfFile + " input files from: "+ dirname + " ... " );
		
		for(k in 0..(numOfFile-1))
		{
			filename = dirname + k.toString()+ ".txt";
			try { 
				val I  = new File(filename);
				var inputArr:Array[Byte] = new Array[Byte]((I.size() as Int)+1);
				
				var i:Int = 0;
				for (byte in I.bytes())
				{
					inputArr(i) = byte;
					++i;
				}
				
				inputArr(i) = ((' ').ord() as Byte);
				
				input += new String(inputArr, 0, inputArr.size);
				index += inputArr.size;
				inputIndex(k) = index - 1;
				//Console.OUT.println("Size of the file " + filename + ": " + inputArr.size/1024 + " KB." + " Index: " + index);
			} 
			catch ( e2 : x10.io.FileNotFoundException ) { 
				Console.OUT.println("ERROR - File reading failed : " + filename);
				
				return null;
			}
			
		}
		Console.OUT.println("Total size : " 
				+ String.format("%.2f", new Array[Any](1, (input.length() as Float)/(1024*1024))) + " MB.");
 
		return input;
	}
	
	
	public def binarysearch(word:String, start:Int, end:Int)
	{
		var min:Int = start;
		var max:Int = end;
		if(start > end)
			return false;
		else
		{
			if (word.equals (dict ((start+end)/2)/*.toLowerCase()*/))
				return true;
			else if(word.compareTo(dict((start+end)/2)/*.toLowerCase()*/) > 0)
			{	
				min=(start+end)/2+1;
			}
			else if(word.compareTo(dict((start+end)/2)/*.toLowerCase()*/) < 0)
			{	
				max=(start+end)/2-1;
			}
			return binarysearch(word, min, max);
		}
	}
	
	public def check(word:String):Boolean 
	{
		if (useBinarySearch)
			return binarysearch(word.toLowerCase(), 0, dict.size-1);
		
		for (i in (0..(dict.size-1)))
			if (dict(i).equals(word.toLowerCase()))
				return true;
		
		return false;
	}
	
	/** local dict used to check words */
	public var dict:Rail[String];
	
	
	public def makeDict(filename:String) 
	{	
		Console.OUT.println("Reading dictionary from: " + filename + " ...");	
		try { 
			val I  = new File(filename);
			var numwords:Int = 0;
			
			for (line in I.lines()) 
				numwords++;
			
			Console.OUT.println(numwords + " words in dictionary.");
			
			val dict = new Rail[String](numwords,"");
			var i:Int = 0;
			for (line in I.lines()) {
				// Note that we are converting all words to lowercase to simplify binary searching
				dict(i) = line.trim().toLowerCase(); 
				i++;
			}
			this.dict=dict;
		} 
		catch (  e2 : x10.io.FileNotFoundException ) { 
			Console.OUT.println("ERROR - File not found : " + filename);
		}
		
	}
	
	private def validate():Boolean
	{
		var rSeq:ArrayList[MREngine.MREEntry[String, Value]] = mres.GetResults();
		var rPar:ArrayList[MREngine.MREEntry[String, Value]] = mrep.GetResults();
		val size = rSeq.size();
		
		if (rSeq.size() != rPar.size())
		{
			Console.OUT.println("Seq Size:"+rSeq.size() +" Par Size:" +  rPar.size());
			return false;
		}
		
		var ens:MREngine.MREEntry[String, Value];
		var enp:MREngine.MREEntry[String, Value];
		for(i in 0..(size-1))
		{
			enp = rPar.get(i);
			ens = mres.FindByKey(enp.getKey());
			
			if (ens != null)
			{
				if(enp.getValue().totalFreq != ens.getValue().totalFreq)
				{
					Console.OUT.println("Inconsist: SFreq: " + ens.getValue().totalFreq +"\tSKey:" + ens.getKey());
					return false;
				}
			}
			else
			{
				Console.OUT.println("Inconsist: Cannot find in serial PVal: " 
						+ enp.getValue().totalFreq +"\tPKey:" + enp.getKey());
				return false;
			}
		}
		return true;
	}
	
	/**
	 * Print out the result
	 */
	public def print_results ():void
	{
		var results:ArrayList[MREngine.MREEntry[String, Value]] = mres.GetResults();
		var resultp:ArrayList[MREngine.MREEntry[String, Value]] = mrep.GetResults();
		
		Console.OUT.println("Sequential WordCount Results:");
		var it:Value;
		for (var i:Int = 0; i < results.size(); ++i)
		{
			it = results.get(i).getValue();
			Console.OUT.println( it.totalFreq+ "\t"+results.get(i).getKey());
			for (var j:Int = 0 ; j < it.docFreq.size(); ++j){
				Console.OUT.println ("\tdocID=" + it.docFreq.get(j).docID + "\tfreq=" + it.docFreq.get(j).freq);
			}
		}
		
		Console.OUT.println("Parallel WordCount Results:");
		for (var i:Int = 0; i < resultp.size(); ++i)
		{
			it = resultp.get(i).getValue();
			Console.OUT.println(resultp.get(i).getValue().totalFreq+ "\t" +resultp.get(i).getKey());

			for (var j:Int = 0 ; j < it.docFreq.size(); ++j)
				Console.OUT.println ("\tdocID=" + it.docFreq.get(j).docID + "\tfreq=" + it.docFreq.get(j).freq);
		}
		
		Console.OUT.println("Total Number of Words: " + results.size());
	}
	
	// for timing
	static val Meg = 1000*1000;
	static val PARTITION_FACTOR = 10;
	
	public static def main(args:Array[String]) 
	{	
		if (args.size < 6) 
		{
			Console.OUT.println("Usage: x10 InvertDoc <NumAsync> <NumTrials>"
					+ " <Type> <DictPath> <InputDir> <NumInputFile>"
					+ " [BinarySearchOption] ");
			Console.OUT.println("\n"
					+ "    NumAsync:       Number of Asyncs to use\n"
					+ "    NumTrials:      Number of trials to run\n"
					+ "    Type:           Type of this run.\n"
					+ "                    'func' for functionality run\n"
					+ "                    'perf' for performance run\n"
					+ "    DictPath:       Path to the dictionary\n"
					+ "    InputDir:       Input file directory\n"
					+ "    NumInputFile:   Number of input files\n"
					+ "    BinarySearch:   optional.\n"
					+ "                    'b' for using binary searching\n"
					+ "                    leave none for default linear searching\n");
			return;  
		}
		
		if (!args(2).equals("perf") && !args(2).equals("func"))
		{
			Console.OUT.println("Fourth Argument should be 'perf' or 'func'.");
			return;
		}
		
		if (args.size == 7 && !args(6).equals("b"))
		{
			Console.OUT.println("Sixth Argument should be 'b' if to use binary search, "
					+ "or none if to use linear search.");
			return;
		}
			
		val numAsync:Int= Int.parseInt(args(0));
		val part:Int = PARTITION_FACTOR * numAsync;	
		val numTrials = Int.parseInt(args(1));
		val option:String = args(2);
		val dictDir = args(3);
		val inputfile:String = args(4);
		val numInputFile = Int.parseInt(args(5));
		val isBinSearch = (args.size == 7 && args(6).equals("b"))? true : false;
		
		var serialTime:Array[Long] = new Array[Long](numTrials);
		var parallelTime:Array[Long] = new Array[Long](numTrials);
		var speedup:Float = 0;
		var inputIndex:Array[Int]= new Array[Int](numInputFile);
		
		val input = InvertDoc.makeInput(inputfile, inputIndex, numInputFile);
		if (input == null)
			return;

		val inv = new InvertDoc(input, part, numAsync, inputIndex, numInputFile, isBinSearch);
		inv.set_smr_func_ptr("s");
		inv.set_smr_func_ptr("p");

		
		inv.makeDict(dictDir);
		// functional run 
		if (option.equals("func"))
		{
			Console.OUT.println( "Functionality Test Run Seq, ... " );
			inv.mres.MapReduceSchedulerSerial();
			Console.OUT.println( "Functionality Test Run Par, ... " );
			inv.mrep.MapReduceSchedulerPar();
			Console.OUT.println( "Functionality Test Done. " );
			
			inv.print_results();
			
			Console.OUT.println( "Validating results... " );
			if(inv.validate())
				Console.OUT.println("Test pass!");
			else
			{
				Console.OUT.println("************************ Test FAILED! ************************");
				return;
			}
			
		}
		
		// performance runs
		if (option.equals("perf"))
		{
			var timeSeq:Long;
			var timePar:Long;
			
			Console.OUT.println( "Performance Runs ... " );
			for ( t in 0..(numTrials-1) ) { 
				Console.OUT.println("Trial " + t + "...");
				
				timeSeq = System.nanoTime();
				inv.mres.MapReduceSchedulerSerial();
				serialTime(t) = (System.nanoTime()-timeSeq)/Meg;			
				
				timePar = System.nanoTime();
				inv.mrep.MapReduceSchedulerPar();
				parallelTime(t) = (System.nanoTime()-timePar)/Meg;
				
				if (t == 0)
				{
					Console.OUT.println( "Validating results... " );
					if(inv.validate())
						Console.OUT.println("Test pass!");
					else
					{
						Console.OUT.println("************************ Test FAILED! ************************");
						return;
					}
				}
			}
			
			var avgSerialTime:Long = 0;
			var avgParallelTime:Long = 0;
			
			for (var i:Int = 0; i < serialTime.size; ++i)
			{
				avgSerialTime += serialTime(i);
				avgParallelTime += parallelTime(i);
			}
			
			avgSerialTime /= numTrials;
			avgParallelTime /= numTrials;
			
			speedup = (avgParallelTime > 0) ? (avgSerialTime as Float)/avgParallelTime : Float.MAX_VALUE;
			
			Console.OUT.println("[Done.] Over " + numTrials + " trials, average time" 
					+ " to compute serially is " + avgSerialTime+",\n average time to compute parallel is "
					+ avgParallelTime+",\n speedup is "+ String.format("%.2f", new Array[Any](1, speedup))+".");
		}
		
		Console.OUT.println("Inverting Index Completed Here.");
	}
	
}