package markov;

import java.lang.reflect.Array;
import java.util.ArrayList;

import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import java.util.Map.Entry;

import java.util.HashMap;

import main.Word;


public class SmartTagger {

    private ArrayList<String[]> dwords;
    ArrayList<String[]> data;
    private ArrayList<Integer> sentenceIndexes;
    HashMap<String, HashMap<String,Double>> posBigram; 
    HashMap<String, HashMap<String,Double>> wordBigramWordIndex;
    public boolean print_debug_info;
    /**
     * Defines the word limit of a sentence for noise channel calculation
     */
    public int N_limit;

    public SmartTagger(ArrayList<Integer> indexes, ArrayList<String[]> devfile, HashMap<String, HashMap<String,Double>> posProbability, HashMap<String, HashMap<String,Double>> wordProbabilityWordIndex) {
	this.sentenceIndexes = indexes;
	this.posBigram = posProbability;
	this.wordBigramWordIndex = wordProbabilityWordIndex;
	print_debug_info = false;
	dwords = devfile;
    }

    public ArrayList<String[]> viterbi(){
	return core(true);
    }
    public ArrayList<String[]> noise_channel(){
	return core(false);
    }

    public ArrayList<String[]> core(boolean viterbi_enabled){
	int n = N_limit;
	if (sentenceIndexes.get(0) < n) {
	    String[] sentence = new String[sentenceIndexes.get(0)];
	    //String[] tags = new String[sentenceIndexes.get(0)];
	    for (int i = 0; i < sentenceIndexes.get(0); i++) {
		sentence[i] = dwords.get(i)[0];
	    }
	    String[] tags = tagSentence(sentence,viterbi_enabled);
	    for (int i = 0; i < sentenceIndexes.get(0); i++) {
		if(tags != null && tags[i] != null){
		    dwords.get(i)[2] = tags[i] ;
		}else{
		    dwords.get(i)[2] = "No type";
		}
	    }
	}
	ArrayList<String> s = null;
	for(int i = 0 ; i < sentenceIndexes.size()-1 ; i++){
	    if( sentenceIndexes.get(i+1) - sentenceIndexes.get(i) < n){
		s = new ArrayList<String>();
		String[] sentence = new String[sentenceIndexes.get(i+1) - sentenceIndexes.get(i)];
		//String[] tags = new String[sentenceIndexes.get(i+1) - sentenceIndexes.get(i)];
		int index = 0;
		for(int j = sentenceIndexes.get(i); j < sentenceIndexes.get(i+1); j++){

		    sentence[index] = (dwords.get(j)[0] );
		    //tags[index] = (dwords.get(j)[0] );
		    index++;
		}
		String[] tags = tagSentence(sentence,viterbi_enabled);
		index = 0;
		for(int j = sentenceIndexes.get(i); j < sentenceIndexes.get(i+1); j++){
		    if(tags != null && tags[index] != null){
			dwords.get(j)[2] = tags[index] ; 
		    }else{
			dwords.get(j)[2] = "No type";
		    }
		    index++;
		}
	    }
	}
	if(sentenceIndexes.get(sentenceIndexes.size() -1)- dwords.size() < n){
	    String[] sentence = new String[sentenceIndexes.get(sentenceIndexes.size() -1)- dwords.size()];
	    //String[] tags = new String[sentenceIndexes.get(sentenceIndexes.size() -1)- dwords.size()];
	    for(int i = sentenceIndexes.get(sentenceIndexes.size() -1); i < dwords.size(); i++){
		sentence[i] = dwords.get(i)[0];
		//tags[i] = (dwords.get(i)[0] );

	    }
	    String[] tags = tagSentence(sentence,viterbi_enabled);
	    for(int i = sentenceIndexes.get(sentenceIndexes.size() -1); i < dwords.size(); i++){
		if(tags != null && tags[i] != null){
		    dwords.get(i)[2] = tags[i];
		}else{
		    dwords.get(i)[2] = "No type";
		}
	    }
	}
	return dwords;
    }

    /**
     * 
     * @param sentence
     * @param VITERBI_ENABLED  viterbi switch
     * @return a list of tags for given sentence
     */
    private String [] tagSentence( final String[] sentence, boolean VITERBI_ENABLED){
	if(sentence.length == 0)
	    return null;

	String [] tags = new String [sentence.length]; //return all Processed POS

	int maxTreeSize = 1;
	if(VITERBI_ENABLED){
	    maxTreeSize = sentence.length + 5;
	}
	else{
	    for(String word : sentence){
		int oldRet  = maxTreeSize;
		if(wordBigramWordIndex.get(word) == null){
		    return null;
		}
		maxTreeSize *= (wordBigramWordIndex.get(word).size()); //TODO: to next single word
		if(maxTreeSize == 0) //one of the word doesn't exists in dictionary
		    return tags;
		maxTreeSize +=  oldRet;
		if( maxTreeSize > 100000 || maxTreeSize < 0){
		    maxTreeSize = 200000; //max array size
		    break;
		}
	    }
	}

	//Tree constructs
	int [] parents = new int[maxTreeSize]; //parent node
	Arrays.fill(parents, -1); //prevent confusion with first node
	String [] types = new String[maxTreeSize]; //type of current node
	int [] depthMatrix = new int[maxTreeSize]; //depth of current node
	Arrays.fill(depthMatrix, -1);//prevent confusion with first level
	double [] probs = new double[maxTreeSize]; //combinational probability of current node
	int i = 0; //master indexer
	int depth = 0; //current depth counter
	int sentenceLength = sentence.length;

	//highest probability monitor
	double currentHighestProb = 0; 
	int lowProbIndex = 0;

	//construct tree
	for(String s : sentence){  //depth loop

	    if(VITERBI_ENABLED){ //clear high prob cache
		currentHighestProb = 0;
	    }

	    //First word init loop
	    if(depth == 0){ 
		for(Entry<String, Double> currentPos : wordBigramWordIndex.get(s).entrySet()){

		    if(VITERBI_ENABLED){
			if(currentPos.getValue() > currentHighestProb){
			    if(depthMatrix[0] == depth) //prevent parent node from being removed
				i--; //erase input node with new input node
			    lowProbIndex = i;
			    currentHighestProb = currentPos.getValue();
			}
			else continue;
		    }

		    probs[i] = currentPos.getValue(); //since there are no predecessor, we use its current probability 
		    parents[i] = i;
		    depthMatrix[i] = depth;
		    types[i] = currentPos.getKey();
		    if(print_debug_info) System.out.println("depth: "+ depthMatrix[i] + " parent: " + parents[i] + " type: " + types[i]+ " i: " + i+" prob: " + probs[i]);
		    i++;
		    continue;
		}
	    }
	    else{
		for(Entry<String, Double> currentPos : wordBigramWordIndex.get(s).entrySet()){ //find all types within word
		    int lastParent = i;
		    while(lastParent >= 0 && depthMatrix[lastParent] != depth-1 ) //bugfix: points to the last closest parent node
			lastParent--;


		    for(int j = (lastParent); j>=0 && depthMatrix[j]  == (depth-1 ); --j){ //find all parent nodes indexes
			if(posBigram.get(
				types[j] //parent type
				).containsKey(currentPos.getKey())){ //check if parents type type have bigram to current type

			    double tmpProb = probs[j] * currentPos.getValue() * posBigram.get(types[j]).get(currentPos.getKey());  //current prob = parent prob * wordBigram * posBigram

			    //detect highest probabilityu
			    if(depth == (sentenceLength - 1) || VITERBI_ENABLED) //only detects last word
			    {
				if(tmpProb > currentHighestProb){
				    if(depthMatrix[i-1] == depth) //prevent parent node from being removed
					i--; //erase input node with new input node
				    lowProbIndex = i;
				    currentHighestProb = tmpProb;

				}
				else continue;
			    }
			    probs[i] = tmpProb;
			    parents[i] = j;
			    types[i] = currentPos.getKey();
			    depthMatrix[i] = depth;

			    if(print_debug_info)System.out.println("depth: "+ depthMatrix[i] + " parent: " + parents[i] + " type: " + types[i]+ " i: " + i+" prob: " + probs[i]);
			    ++i;
			}
		    }
		}
	    }
	    depth++;
	}
	//	    System.out.println("best prob:" + currentLowestProb+ " i:" + lowProbIndex);
	//Path reconstructor
	int j = lowProbIndex;
	int tagsindex = 1;
	while(true){
	    if(print_debug_info)System.out.println(types[j]);
	    tags[tags.length-tagsindex] = types[j]; //filling tags array reversely
	    if(depthMatrix[j] == 0) //TODO check if the length is same as 
		break;
	    j = parents[j];
	    tagsindex++;
	}
	return tags;
    }
}
