package geppetto.phraseHMM;


import geppetto.cat.alignments.Alignment;
import geppetto.cat.alignments.AlignmentEvaluator;
import geppetto.cat.alignments.AlignmentStats;
import geppetto.cat.alignments.AlignmentsSet;
import geppetto.cat.alignments.AlignmentEvaluator.Evaluation;
import geppetto.cat.common.MyArrays;
import geppetto.cat.common.StaticTools;
import geppetto.cat.constrains.ConstrainedProjectionStats;
import geppetto.cat.constrains.SentenceConstrainedProjectionStats;
import geppetto.cat.corpus.BilingualCorpus;
import geppetto.cat.models.AbstractDoubleDistortionTable;
import geppetto.cat.models.AbstractSparseTranslationTable;
import geppetto.cat.models.M1;
import geppetto.cat.models.SparseCountTable;
import geppetto.cat.models.SparseTranslationTable;

import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;





public class RegularHMM extends IBMM1{

	
	
	
	// Forward Backward tables
	public Trellis forward;
	public Trellis backward;
	
	//Contains for each position t the 
	// sum over all N of forward*backward
	// This is used when computing posteriors
	//Kind of likelihood but not quite since now
	//We have the likelihood scalled.
	
	
	public AbstractDoubleDistortionTable _distortion;
	public AbstractDoubleDistortionTable _countDist;
	public int distortionSize = 11;
	
	

//	SparseTranslationTable _tb;
//	SparseCountTable _count;
	
	// Cache for probabilities
	public Trellis transitionCache;
	public boolean transitionCacheValid = false;
	public boolean initCacheValid = false;
	public double[] initCache;

	public Trellis[] _transitionPosteriors;
	
	
	public RegularHMM(BilingualCorpus corpus, double smoothing) {
		this(corpus, null, smoothing);
	}

	public RegularHMM(BilingualCorpus corpus, AbstractSparseTranslationTable tt, double smoothing) {
		this(corpus, tt, null,smoothing);
	}

	public RegularHMM(BilingualCorpus corpus, AbstractSparseTranslationTable tt,
			AbstractDoubleDistortionTable distortion,double smoothing) {
		super(corpus,  smoothing);
		int maxsSize = corpus.getMaxSourceLen();
		int maxfSize = corpus.getMaxForeignLen();
		//Forward trellis 
		forward = new Trellis(maxfSize, maxsSize * 2, true);
		backward = new Trellis(maxfSize, maxsSize * 2, true);		
	
		// Note posteriors[fSise][sourceIndex] contains the row sum which is the expected number of times in state i
		_statePosteriors = new Trellis(maxfSize + 1, maxsSize * 2, true); 
		_transitionPosteriors = new Trellis[maxfSize];
		for (int i = 0; i < maxfSize; i++) {
			_transitionPosteriors[i] =  new Trellis(maxsSize*2,maxsSize*2+1);//+1 keeps the sums
		}	
		// Make caches
		_probCache = new Trellis(maxfSize, maxsSize + 1);
		transitionCache = new Trellis(maxsSize, maxsSize * 2);
		initCache = new double[maxsSize * 2];
		java.util.Arrays.fill(initCache, -1);
		int biggestSent = Math.max(corpus.getMaxSourceLen(), corpus
				.getMaxForeignLen()) * 2;
		if (distortion != null) {
			_distortion = distortion;
		} else {
			_distortion = new DoubleDistortionTable(distortionSize, 1,
					biggestSent);
		}
		_countDist = new DoubleDistortionTable(distortionSize, biggestSent);
		if (tt != null) {
			_tb = tt;
		} else {
			_tb = new SparseTranslationTable(corpus);
			_tb.initializeCoOcurrences();
		}
		_count = new SparseCountTable(_corpus);
	}

	
	public RegularHMM(){
			
	}
	
	public String getName() {
		return "Regular HMM";
	}

	/** Initializes structures before loading the tables 
	 * after loading*/
	public void initializeStructures(){
		int maxsSize = _corpus.getMaxSourceLen();
		int maxfSize = _corpus.getMaxForeignLen();
		forward = new Trellis(maxfSize, maxsSize * 2, true);
		backward = new Trellis(maxfSize, maxsSize * 2, true);
		// Note posteriors[fSise][sourceIndex] contains the row sum which is the expected number of times in state i
		_statePosteriors = new Trellis(maxfSize + 1, maxsSize * 2, true); 
		
		// Make caches
		_probCache = new Trellis(maxfSize, maxsSize + 1);
		transitionCache = new Trellis(maxsSize, maxsSize * 2);
		initCache = new double[maxsSize * 2];
		java.util.Arrays.fill(initCache, -1);


		//TODO this shold be in IBMM1 initializa structures
		_inverseLikelihoodScalors = new double[maxfSize];
	}
	
//	 ///// Save and load models from file
	public void saveModel(String directory) {
		super.saveModel(directory);
		
		//Test required for descende classes that have no translation table but instead 
		// a phrase table
		if(_tb != null){
			((SparseTranslationTable)_tb).saveTable(directory);
		}
		((DoubleDistortionTable)_distortion).saveTable(directory);
	}

	public void printStamp(PrintStream file){
		super.printStamp(file);
		file.println("Distortion Nr Bins: " + distortionSize);
	}
	
	public static RegularHMM loadModel(BilingualCorpus corpus, String directory) {
		System.out.println(corpus.getName());
		System.out.println(directory);
		RegularHMM model = new RegularHMM();
		model._corpus = corpus;
		if (!corpus.checkDescription(directory)) {
			System.out.println("Corpus is not the same");
			System.exit(1);
		}
		model._tb = SparseTranslationTable.LoadTranslationTable(corpus,
				directory);
		model._distortion = new DoubleDistortionTable(directory);
		model._trained = true;
		model.initializeStructures();
		return model;
	}
	
	



	/**
	 * Sum the forward table at time t for all possible states to get the likelihood of a ginve sentence
	 * A debug check can be made by comparing agains all other position by suming forward*backward
	 * @param sSize
	 * @param fSize
	 * @return
	 */
	public void makeLikelihood(int sSize, int fSize) {
//		_scaledLikelihood = 0;
//		for (int i = 0; i < sSize * 2; i++) {
//			_scaledLikelihood += forward.getProb(fSize - 1, i);
//		}
		//checkNumberErrors(_scaledLikelihood, "", "Making likelihood ");

		/*
		  System.out.println("Printing likelihood for positions"); 
		  double[] likelihoodA = new double[fSize]; 
		  for(int f = 0; f < fSize-1; f++){
			  for(int i = 0; i < sSize*2; i++){
				  likelihoodA[f]+=forward.getProb(f,i)*backward.getProb(f, i); 
				  } 
			  }
		  for(int i = 0; i < sSize*2; i++){
			  likelihoodA[fSize-1]+=forward.getProb(fSize-1,i); 
			  } 
		  	for(int f = 0; f <fSize; f++){ 
		  		System.out.println("Position " + f + " = " +likelihoodA[f]); 
			 }
		*/
	}

	//TODO Should do same protocol as m1 or dissapear
	public void initializeTrain() {
		super.initializeTrain();
		int biggestSent = Math.max(_corpus.getMaxSourceLen(), _corpus
				.getMaxForeignLen()) * 2;
		_countDist = new DoubleDistortionTable(distortionSize, biggestSent);
	}
	//TODO Should do same protocol as m1 or dissapear
	public void finalizeTrain() {
		_trained = true;
	}
	
	
	/**
	 * Compute all posteriors required
	 *
	 */
	public void makePosteriors(int sSize, int fSize){
		//Here we need extra information required to compute the posteriors
	
		makeForwardTables(sSize, fSize);
		makeBacwardTables(sSize, fSize);
		makeLikelihood(sSize, fSize);
		makeStatePosterior(sSize, fSize);
//		makeTransitionPosteriors();
//		System.out.println("Sentence Number " + _sentenceNumber);
//		_statePosteriors.print(fSize, sSize*2);
	}
	
	
	/*
	 * Epsilon - Probability of being in state i at time t and at state j at time t+1
	 * Rabineri 37
	 */
//	public void makeTransitionPosteriors(){
////		System.out.println(" Transition cahe");
////		printTransitionCache(_sSize, _fSize);
//		_transitionPosteriors.clear();
//		for (int currentState = 0; currentState < _sSize * 2; currentState++) {
//			for (int nextState = 0; nextState < _sSize * 2; nextState++) {
//				double transition = getTransitionProbability(currentState,
//						nextState, _sSize, _fSize);
//				double epsilon = 0;
//				for (int pos = 0; pos < _fSize - 1; pos++) {
//					double observation = getObservationProbability(pos + 1,
//							nextState);
//					double alpha = forward.getProb(pos, currentState);
//					double beta = backward.getProb(pos + 1, nextState);
//					// System.out.println("observation" + observation + "aplha"
//					// + alpha + " beta " + beta + " transition " + transition);
//					epsilon += alpha * transition * observation * beta;
//				}
//				epsilon = epsilon / _scaledLikelihood; // Numerator
//				double prob = epsilon
//						/ _statePosteriors.getProb(_fSize, currentState);
//				_transitionPosteriors.setProb(currentState, nextState, prob);
//			}
//		}
//		System.out.println("Make transition posteriors");
//		_transitionPosteriors.printWithSum(_sSize*2, _sSize*2);
		
		
		//Sanity check. They have to sum to one over the next state
//		for (int currentState = 0; currentState < _sSize * 2; currentState++) {
//			double sum = 0;
//			for (int nextState = 0; nextState < _sSize * 2; nextState++) {
//				sum += _transitionPosteriors.getProb(currentState, nextState);
//			}
//			if (Math.abs(1 - sum) > 1.0E-1) {
//				//printParametersTables();
//				//printForwardBackwardTables();
//				System.out.println("transition posterior for position is far from one" +  sum);
//				System.exit(-1);
//			}
//
//		}
//		
//	}
	
	/**
	 * Update the counts for each sentence. 
	 *
	 */
	public void updateFractionalCounts() {
		updateObservationCounts();
		updateTransitionCounts();
		updateInitCounts();
	}

	public void makeCaches(){
		makeWordTranslationProbCache();
		// No need to check if initchace is valid since they are invalidated
		// at the same time
		if (_previousSSize != _sSize || !transitionCacheValid) { 
        //No need to invalidate if sSize is the same
			makeTransitionCache();
			makeInitCache();
		}
	}
	
	public void initializeCounts(){
		super.initializeCounts();
		_countDist.clear();
	}
	public void mStep() {
		updateTranslationProbabilities((SparseTranslationTable)_tb, _count);
		//sanityCheckTranslationProbs();
		updateTransitionProbs();
	//	((DoubleDistortionTable)_distortion).printTable();
	//	sanityCheckTransitionProbs();
		updateInitProbs();
	//	sanityCheckInitProbs();
	}

	public void printParametersTables() {
		System.out.println("Init");
		printInitCache();
		// _initProbs.printTable();
		System.out.println();
		// System.out.println("distortion");
		//printTransitionCache(sourceSentenceIDS.length,foreignSentenceIDS.length);
		// _distortion.print();
		 ((DoubleDistortionTable)_distortion).printTable();
		System.out.println();
		System.out.println("translation");
	//	printWordTranslationProbChache();
	//	 ((SparseTranslationTable)_tb).printTable(System.out);
		System.out.println();
	}

	/** Print all posteriors tables */
	public void printPosteriors() {
		System.out.println("posteriors");
		_statePosteriors.print(_foreignSentenceIDS.length,
				_sourceSentenceIDS.length * 2);
		
	}

	
	
	
	public void printForwardBackwardTables() {
		geppetto.cat.common.MyArrays.printIntArray(_foreignSentenceIDS,
				"Foreing Sentence");
		MyArrays.printIntArray(_sourceSentenceIDS, "Source Sentence");
		System.out.println();
		System.out.println("forwardr");
		forward.print(_foreignSentenceIDS.length, _sourceSentenceIDS.length * 2);
		System.out.println();
		System.out.println("backward");
		backward.print(_foreignSentenceIDS.length, _sourceSentenceIDS.length * 2);
		System.out.println();
	}
	
java.text.DecimalFormat fmt = new java.text.DecimalFormat();
	
	private String formatTime(long duration) {
		StringBuilder sb = new StringBuilder();
		double d = duration / 1000;
		fmt.applyPattern("00");
		sb.append(fmt.format((int) (d / (60 * 60))) + ":");
		d -= ((int) d / (60 * 60)) * 60 * 60;
		sb.append(fmt.format((int) (d / 60)) + ":");
		d -= ((int) d / 60) * 60;
		fmt.applyPattern("00.0");
		sb.append(fmt.format(d));
		return sb.toString();
	}
	

	public void makeTransitionCache() {
		// Foreing Len is not used here but will be in the models that inherit
		// from here
		for (int curr = 0; curr < _sSize; curr++) { 
			double norm = 0;
			for (int next = 0; next < _sSize * 2; next++) { 
				double prob;
				if (next < _sSize) {
					prob = _distortion.getDistProb(next, curr, _sSize);
				} else {
					prob = _distortion.getNullDistProb(next - _sSize, curr,
							_sSize);
				}
				 checkNumberErrors(prob, "", "Prob on making dist cache ",false);
				 checkNumberErrors(norm, "", "Norm on making dist cache ",false);
				transitionCache.setProb(curr, next, prob);
				// transitionCache[curr][next] = prob;
				norm += prob;
			}
			for (int next = 0; next < _sSize * 2; next++) {
				if (norm < 1.0E-200) {
					// System.out.println("Norm on makeDistChace" + norm);
					// transitionCache[curr][next] = 0;
					transitionCache.setProb(curr, next, 0);
				} else {
					double prob = transitionCache.getProb(curr, next) / norm;
					transitionCache.setProb(curr, next, prob);
					// transitionCache[curr][next] /= norm;

				}
			}
		}
		transitionCacheValid = true;
	}

	void printTransitionCache(int sourceLen, int foreingLen) {
		System.out.print("\t");
		for (int next = 0; next < sourceLen * 2; next++) {
			if (next < sourceLen)
				System.out.print("      s" + next + "     \t");
			else
				System.out.print("     ns" + (next - sourceLen) + "    \t");
		}
		System.out.println();
		for (int curr = 0; curr < sourceLen; curr++) { // current state (state
														// at time t)
			System.out.print("s" + curr + "     \t");
			for (int next = 0; next < sourceLen * 2; next++) { // next state
																// (state at
																// t+1)
				System.out.print(StaticTools.prettyPrint(transitionCache
						.getProb(curr, next), "00.00E00", 6)
						+ "\t");
				// System.out.print(StaticTools.prettyPrint(transitionCache[curr][next],
				// "00.00E00", 6) + "\t");
			}
			System.out.println();
		}

	}

	/**
	 * 
	 * @param currentPos
	 * @param nextPosition
	 * @return
	 */
	public final double getTransitionProbability(int currentPos, int nextPosition,
			int sourceLen, int foreignLen) {

		
		double prob = transitionCache.getProb(currentPos % sourceLen,
				nextPosition);
//		 FIXME debug code
		/*
		if (Double.isNaN(prob)) {
			printTransitionCache(sourceLen, foreignLen);
			System.out.println("Prev " + currentPos + "next " + nextPosition
					+ " sourceLen " + sourceLen + " new prve "
					+ (currentPos % sourceLen));
			System.exit(-1);
		}
		*/
		return prob;
	}

	/**
	 * Rabiner 40b and scaled version of 98
	 * Epsilon is the probability of being in state i at time t and state j at time t+1
	 */
	public void updateTransitionCounts() {
//		NOTE javg 15-5-2009 removed normalizer since it was not correct. results previous to this date
		// are not correct
		if (_fSize == 1)
			return;
		
		for (int pos = 0; pos < _fSize - 1; pos++) {
			for (int currentState = 0; currentState < _sSize * 2; currentState++) {
				for (int nextState = 0; nextState < _sSize * 2; nextState++) {
					double transition = getTransitionProbability(currentState,nextState, _sSize, _fSize);                                                                                                        					
					 double observation = getObservationProbability(pos + 1,  nextState);                                                                                                                
					 double alpha = forward.getProb(pos, currentState);                                                                                         
					 double beta = backward.getProb(pos + 1, nextState);                                                                                        
					 double epsilon = alpha * transition * observation * beta;						  				                                
				 _transitionPosteriors[pos].setProb(currentState, nextState, epsilon);
			
				}	 
			}
//			System.out.println("Epsilon at time t " + pos);
//			_transitionPosteriors[pos].printWithSum(_sSize*2, _sSize*2);
		}
		
		
		for (int currentState = 0; currentState < _sSize * 2; currentState++) {
			for (int nextState = 0; nextState < _sSize * 2; nextState++) {
				double epsilonSum = 0;
				for (int pos = 0; pos < _fSize - 1; pos++) {
					epsilonSum += _transitionPosteriors[pos].getProb(currentState, nextState);
				}
				double prob = epsilonSum;
				if(Double.isNaN(prob) || Double.isInfinite(prob)){
					System.out.println("Updating counts for transition prob not a number");
					prob =0;
				}
				if (nextState < _sSize) {
					_countDist.addToCount(prob, currentState % _sSize,
					nextState);
				} else {
					_countDist.addToNullCount(prob, currentState % _sSize,nextState - _sSize);
				}
			}
		}
		
		
		//Sanity check for a given current source position they have to sum to one.
//		for (int currentState = 0; currentState < _sSize * 2; currentState++) {
//			double sum = 0;
//			for (int nextState = 0; nextState < _sSize * 2; nextState++) {
//				double epsilonSum = 0;
//				double gammaSum = 0;
//				for (int pos = 0; pos < _fSize - 1; pos++) {
//					epsilonSum += _transitionPosteriors[pos].getProb(currentState, nextState);
//					gammaSum += _statePosteriors.getProb(pos, currentState);
//				}
//				sum+=epsilonSum/gammaSum;
//			}
//			if (Math.abs(1 - sum) > 1.0E-1) {
//				System.out.println("update of transition counts is not summing to one "  + " " + sum);
//				System.exit(-1);
//			}
//		}
		
	}

	public void makeInitCache() {
		double sum = 0;
		for (int i = 0; i < _sSize * 2; i++) {
			double prob = _distortion.getFirstProb(i, _sSize);
			initCache[i] = prob;
			sum += prob;
		}
		// checkNumberErrors(sum, "", "Init prob cache normailizer " );
		for (int i = 0; i < _sSize * 2; i++) {
			initCache[i] /= sum;
		}
		initCacheValid = transitionCacheValid;
	}

	// This represtes the probability table pi(j)

	void printInitCache() {
		System.out.println("Init Cache");
		for (int i = 0; i < initCache.length; i++) {
			System.out.print(initCache[i] + " ");
		}
		System.out.println();
	}

	public final double getInitProb(int sourceIndex, int sourcelen) {
		/*
		 * if (!initCacheValid || initCache.length != sourcelen*2){ initCache =
		 * new double[sourcelen*2]; initCacheValid = true;
		 * 
		 * double sum =0; for (int i = 0; i < sourcelen*2; i++) { double prob =
		 * _distortion.getFirstProb(i,sourcelen); initCache[i] = prob; sum+=
		 * prob; } //checkNumberErrors(sum, "", "Init prob cache normailizer " );
		 * for (int i = 0; i < sourcelen*2; i++) { initCache[i] /=sum; } }
		 */
		return initCache[sourceIndex];
	}

	// Rabiner 40(a)
	public void updateInitCounts() {
		for (int sourceIndex = 0; sourceIndex < _sourceSentenceIDS.length * 2; sourceIndex++) {
			double prob =  _statePosteriors.getProb(0, sourceIndex);
			if(Double.isInfinite(prob) || Double.isNaN(prob)){
				System.out.println("Update init counts not a number");
				prob=0;
			}
			_countDist.addToFirst(prob,
					sourceIndex);
		}
		// Debug code
		/*
		 * double sum = 0; for(int sourceIndex=0; sourceIndex<
		 * sourceSentenceIDS.length*2; sourceIndex++){ sum +=
		 * posteriors.getProb(0, sourceIndex); } if(!closeToOne(sum)){
		 * System.out.println("Update Init Counts do not sum to one " + sum);
		 * System.exit(-1); }
		 */
	}


	public final double getObservationProbability(int foreignIndex, int sourceIndex) {
		double prob = 0;
		if (sourceIndex < _sourceSentenceIDS.length) {
			prob = _probCache.getProb(foreignIndex, sourceIndex);
		} else {
			prob = _probCache.getProb(foreignIndex, _sourceSentenceIDS.length); 
		}
		// case of unseen worlds
		if (prob < 1.0E-50) {
			return 1.0E-50;
		} else
			return prob;
	}

	// Rabiner 40c
	public void updateObservationCounts() {
		double smoothValue = _smoothing;
		for (int sourceIndex = 0; sourceIndex < _sSize * 2; sourceIndex++) {
			for (int pos = 0; pos < _fSize; pos++) {
				double prob = _statePosteriors.getProb(pos, sourceIndex);
				if(Double.isInfinite(prob) || Double.isNaN(prob)){
					System.out.println("Updating counts for translation prob not a number");
					prob=0;
				}
				if (sourceIndex < _sSize) {
					_count.addToCount(_sourceSentenceIDS[sourceIndex],
							_foreignSentenceIDS[pos], prob+smoothValue);
				} else {
					_count.addToNullCount(_foreignSentenceIDS[pos], prob+smoothValue);
				}
			}
		}
	}

	public final void checkNumberErrors(double prob, String quantities,
			String place, boolean checkZero) {
		if (Double.isNaN(prob) || (prob < 1.0E-200 && checkZero)) {
			printForwardBackwardTables();
			printParametersTables();
			System.out.println(quantities);
			System.out.println(place + prob);
			System.exit(-1);
		}

	}

	public final void checkNumberErrors(double prob, String quantities,
			String place) {
		if (Double.isNaN(prob) || prob < 1.0E-200) {
			System.out.println("Sentence Number " + _sentenceNumber + " fsize " + _fSize + " sSize " + _sSize);
			printCaches();
			printForwardBackwardTables();
			//printParametersTables();
			System.out.println(quantities);
			System.out.println(place + prob);
			System.exit(-1);
		}
	}

	
	//Return the unscaled likelihood 
	//Rabineri 102
	public double getRealLikelihood(){
		return _scaledLikelihood;
	}



	public void printCaches(){
		printInitCache();
		printTransitionCache(_sSize, _fSize);
		printWordTranslationProbChache();
	}
	
	/**
	 * Scaled forward table.
	 * Rabineri92
	 */
	public void makeForwardTables(int sSize, int fSize) {
		java.util.Arrays.fill(_inverseLikelihoodScalors, 0);
		_scaledLikelihood =1.0;	
		_logLikelihood = 0;
		double inverseLikelihoodScalor = 0;
		for (int sourceIndex = 0; sourceIndex < sSize * 2; sourceIndex++) {
			double prob = getInitProb(sourceIndex, sSize)
					* getObservationProbability(0, sourceIndex);
			forward.setProb(0, sourceIndex, prob);
			inverseLikelihoodScalor+=prob;
		}
		for (int sourceIndex = 0; sourceIndex < sSize * 2; sourceIndex++) {
			forward.setProb(0, sourceIndex, forward.getProb(0, sourceIndex)/inverseLikelihoodScalor);
			
		}
		_inverseLikelihoodScalors[0] = inverseLikelihoodScalor;
		_scaledLikelihood *= inverseLikelihoodScalor;
		_logLikelihood += Math.log(inverseLikelihoodScalor);
		
		for (int pos = 1; pos < fSize; pos++) {
			inverseLikelihoodScalor =0;
			for (int sourceIndex = 0; sourceIndex < sSize * 2; sourceIndex++) {
				double observation = getObservationProbability(pos, sourceIndex);
				double prob = 0;
				for (int prevSourceIndex = 0; prevSourceIndex < sSize * 2; prevSourceIndex++) {
					double alpha = forward.getProb(pos - 1, prevSourceIndex);
					if(alpha == 0) continue;
					double dist = getTransitionProbability(prevSourceIndex,sourceIndex, sSize, fSize);
					//if(dist == 0) continue;
					prob += alpha * dist;
				}
				prob = prob * observation;
				forward.setProb(pos, sourceIndex, prob);
				inverseLikelihoodScalor+=prob;
			}
			for (int sourceIndex = 0; sourceIndex < sSize * 2; sourceIndex++) {
				forward.setProb(pos, sourceIndex, forward.getProb(pos, sourceIndex)/inverseLikelihoodScalor);
			}
			_inverseLikelihoodScalors[pos]=inverseLikelihoodScalor;
			_scaledLikelihood *= inverseLikelihoodScalor;
			_logLikelihood += Math.log(inverseLikelihoodScalor);
		}
//		System.out.println("Scaled Likelihood" + _scaledLikelihood);
//		forward.printWithSum(fSize, sSize*2);
		
	}

	
	/**
	 * Scaled backwarr tables. Rabineri 97
	 */
	public void makeBacwardTables(int sSize, int fSize) {
		double inverseLikelihoodScalor = _inverseLikelihoodScalors[fSize-1];
		for (int sourceIndex = 0; sourceIndex < sSize * 2; sourceIndex++) {
			backward.setProb(fSize-1, sourceIndex,1/inverseLikelihoodScalor);			
		}
		for (int pos = fSize - 2; pos >= 0; pos--) {
			inverseLikelihoodScalor = _inverseLikelihoodScalors[pos];
			for (int sourceIndex = 0; sourceIndex < sSize * 2; sourceIndex++) {
				double prob = 0;
				for (int nextSourceIndex = 0; nextSourceIndex < sSize * 2; nextSourceIndex++) {
					double back = backward.getProb(pos + 1, nextSourceIndex);
					if(back == 0) continue;
					double transition = getTransitionProbability(sourceIndex,
							nextSourceIndex, sSize, fSize);
					//if(transition == 0) continue;
					double observation = getObservationProbability(pos + 1,
							nextSourceIndex);
					prob += transition * observation * back;
					//System.out.println(" trans " + transition + " obs " + observation + " back " + back + " prob " + prob);
				}
				//System.out.println(" accumm prob " + prob);
				backward.setProb(pos, sourceIndex, prob/inverseLikelihoodScalor);				
			}
		}
		
		
		//Debug code should go to checkBackward models
		//Scale likelihood
		
//		double _likelihoodPerPosition[] = new double[fSize];
//		java.util.Arrays.fill(_likelihoodPerPosition,0);
//		for (int pos = 0; pos < fSize; pos++) {
//			for (int sourceIndex = 0; sourceIndex < sSize * 2; sourceIndex++) {
//				//Since the scallor is 1/c
//				_likelihoodPerPosition[pos]+=backward.getProb(pos, sourceIndex)*forward.getProb(pos, sourceIndex)*_scaledLikelihood/_likelihoodScalors[pos];
//			}
//		}

		//Sanity check 
		//Likelihood per position should all be one
//		for (int pos = 0; pos < fSize; pos++) {
//			if(Math.abs(1 - _likelihoodPerPosition[pos]) > 1.0E-1){
//				System.out.println("Sentence Nr" + _sentenceNumber + "Likelihood per position is not close to 1 " + _likelihoodPerPosition[pos]);
//				System.out.println("Forward");
//				forward.printWithSum(_fSize, _sSize*2);
//				System.out.println("Backward");
//				forward.printWithSum(_fSize, _sSize*2);
//				MyArrays.printDoubleArray(_likelihoodScalors,"Likelihood scallors");
//				MyArrays.printDoubleArray(_likelihoodPerPosition,"Likelihood per position");
//				System.out.println("Sclaed likelihood" + _scaledLikelihood);
//				System.exit(-1);
//			}
//		}
		
	}

	/**
	 * State occupation posterior.
	 * Gama is the probability of being in state si at time t. 
	 * Rabineri formula 47
	 * 
	 * @param sSize
	 * @param fSize
	 * @return
	 */
	public void makeStatePosterior(int sSize, int fSize) {
		_statePosteriors.clear(0);		
		//Calculating the gamma
		for (int pos = 0; pos < fSize; pos++) {
			
			for (int sourceIndex = 0; sourceIndex < sSize * 2; sourceIndex++) {
				//Must multiply by the likelihood scallor since its overcounting it.
				double prob = forward.getProb(pos, sourceIndex)*backward.getProb(pos, sourceIndex)*_inverseLikelihoodScalors[pos];
				if (prob < 0) {
					_statePosteriors.setProb(pos, sourceIndex, 0);
				} else {
					_statePosteriors.setProb(pos, sourceIndex, prob);
				}
				_statePosteriors.addProb(fSize, sourceIndex, prob);
			}
		}
		

		// Sanity check

		// 1 posteriors should sum to one if we sum all observatios. Rabineri 43c
		
//		 for (int pos = 0; pos < fSize; pos++) {
//			 double sum = 0;
//			 for (int sourceIndex = 0; sourceIndex < sSize * 2; sourceIndex++) {
//				 sum += _statePosteriors.getProb(pos, sourceIndex);
//			}
//			if (Math.abs(1 - sum) > 1.0E-1) {
//				//printParametersTables();
//				//printForwardBackwardTables();
//				System.out.println(" sentence nr " + _sentenceNumber + " posterior for position is " + pos + " is far from one " + sum);
//				_statePosteriors.printWithSum(_fSize, _sSize);
//				System.exit(-1);
//			}
//		}
	}

	public void updateTransitionProbs() {
		((DoubleDistortionTable)_distortion).copy((DoubleDistortionTable)_countDist);
	}

	public void sanityCheckTransitionProbs() {
		System.out.println("Performing sanity chek for Distortion");
		for (int prevState = 0; prevState < _corpus.getMaxSourceLen(); prevState++) {
			double sum = 0;
			for (int currentState = 0; currentState < _corpus.getMaxSourceLen(); currentState++) {
				double prob = getTransitionProbability(prevState, currentState,
						_corpus.getMaxSourceLen(), _corpus.getMaxForeignLen());
				if (prob < 0) {
					System.out.println("Dist probs negative prob");
					System.exit(-1);
				}
				// sum+=_distortion.getProbability(sourceId,
				// prev,_corpus.getMaxSourceLen());
				sum += _distortion.getDistProb(currentState, prevState, _corpus
						.getMaxSourceLen());
			}
			// TODO should be closer
			/*
			 * Fails this test if(Math.abs(1-sum) > 1.0E-2){
			 * System.out.println("Distorition probs do not sum to one " + sum); //
			 * _distortion.print(); _distortion.printTable(); System.exit(-1); }
			 */
		}

		System.out.println("Passed sanity chek for Distortion");
		System.out.println();
	}

	public void updateInitProbs() {
		//Dune in update translation probs
	}

	// Init probs should sum to one
	public void sanityCheckInitProbs() {
		System.out.println("Performing sanity chek for Init");
		double sum = 0;
		for (int sourceId = 0; sourceId < _corpus.getMaxSourceLen(); sourceId++) {
			// double prob = _initProbs.getProbability(sourceId);
			double prob = _distortion.getFirstProb(sourceId, _corpus
					.getMaxSourceLen());
			if (prob < 0) {
				System.out.println("Init probs negative prob");
				System.exit(-1);
			}
			sum += prob;
		}
		/*
		 * if(Math.abs(1-sum) > 1.0E-8){ System.out.println("Init probs do not
		 * sum to one " + sum); System.exit(-1); }
		 */
		System.out.println("Passed sanity chek for Init");
		System.out.println();
	}


	/// GET CODE FROM OLD HMM
	
	public double getNullPhrasePosterior(int sentenceNumber, byte sentenceSource, int[] foreingSentence, int[] sourceSentence, int startForeignIndex, int endForeignIndex) {
		System.out.println("GetNullPhrasePosterior not implemented");
		System.exit(-1);
		return 0;
	}

	
	public double getPhrasePosterior(int sentenceNumber, byte sentenceSource, int[] foreingSentence, int[] sourceSentence, int startSourceIndex, int endSourceIndex, int startForeignIndex, int endForeignIndex) {
		System.out.println("GetPhrasePosterior not implemented");
		System.exit(-1);
		return 0;
	}


		
	// TODO the make forward backward likelihoddo etc could just be called once
	// both for viterbi and posteiror
	public Alignment viterbiAlignment(int sentenceNumber, byte sentenceSource, boolean projectPosteriors, ConstrainedProjectionStats stats) {
		
//		initSentence(sentenceNumber, sentenceSource);		
//		makeCaches();
//		makePosteriors(_sSize, _fSize);
	
		initDecoding(sentenceNumber, sentenceSource);
		if(projectPosteriors){
			SentenceConstrainedProjectionStats ss = projectPosteriors();
			if(ss != null){
				stats.add(ss);
			}
		}
		
		//Viterbi trellis keeps the maximum of getting to a state at each position
		Trellis viterbi = new Trellis(_fSize, _sSize * 2, true);
		// Backtrack pointers for each position, for each
		//state where was the state where it came before
		int[][] states = new int[_fSize][_sSize * 2];
		
		//Fill the backpointers to -1 to signal a error
		for (int i = 1; i < _fSize; i++) {
			java.util.Arrays.fill(states[i], -1);
		}
		
		for (int sourceIndex = 0; sourceIndex < _sSize * 2; sourceIndex++) {
			double prob = getInitProb(sourceIndex, _sSize)
					* getObservationProbability(0, sourceIndex);
			viterbi.setProb(0, sourceIndex, prob);
			states[0][sourceIndex] = 0;
		}
		
		for (int pos = 1; pos < _fSize; pos++) {
			for (int sourceIndex = 0; sourceIndex < _sSize * 2; sourceIndex++) {
				double observation = getObservationProbability(pos, sourceIndex);
				double prob = 0;
				double max = -1;
				int maxState = -2;
				for (int prevSourceIndex = 0; prevSourceIndex < _sSize * 2; prevSourceIndex++) {
					double viter = viterbi.getProb(pos - 1, prevSourceIndex);
					double dist = getTransitionProbability(prevSourceIndex,
							sourceIndex, _sSize, _fSize);
					prob = viter * dist;
					if (prob > max) {
						max = prob;
						maxState = prevSourceIndex;
					}

				}
				if(maxState == -2){
					System.out.println("Sentence  " + _sentenceNumber + " sentence source" + _sentenceSource );
					System.out.println("Fsize " + _fSize + " ssize " + _sSize );
					System.out.println("Corpus max Fsize " + _corpus.getMaxForeignLen() + " ssize " + _corpus.getMaxSourceLen());
					for (int prevSourceIndex = 0; prevSourceIndex < _sSize * 2; prevSourceIndex++) {
						double viter = viterbi.getProb(pos - 1, prevSourceIndex);
						double dist = getTransitionProbability(prevSourceIndex,
								sourceIndex, _sSize, _fSize);
						System.out.println("viter " + viter + " dist " + dist);					
					}
					
					System.out.println("Get Observation prob " +observation );
					System.out.println("Viterbi ");
					viterbi.print(_fSize, _sSize*2);
					printForwardBackwardTables();
					printPosteriors();
					printCaches();
					System.out.println("states");
					 MyArrays.printIntArray(states, "states", _fSize, _sSize*2);
					//System.out.println("viterbi");
					//viterbi.print(_fSize, _sSize*2);
					throw new RuntimeException("Viterbi decoding, adding a state of -2");
				}
				viterbi.setProb(pos, sourceIndex, max * observation);
				states[pos][sourceIndex] = maxState;
			}
		}
//		 viterbi.print(_fSize,_sSize);
//		 cat.common.StaticTools.printMatrix(states, _fSize, _sSize, "states", System.out);

		int[] viterbiPath = new int[_fSize];
		java.util.Arrays.fill(viterbiPath, -2);

		double max = -1;
		for (int sourceIndex = 0; sourceIndex < _sSize * 2; sourceIndex++) {
			double prob = viterbi.getProb(_fSize - 1, sourceIndex);
			if (prob > max) {
				max = prob;
				viterbiPath[_fSize - 1] = sourceIndex;
			}
		}

		// System.out.println(fSize);
		for (int pos = _fSize - 2; pos >= 0; pos--) {
			int currentState = viterbiPath[pos + 1];
			//Debug code
			if(currentState == -2){
				System.out.println("Sentence  " + _sentenceNumber + " sentence source" + _sentenceSource );
				System.out.println("Fsize " + _fSize + " ssize " + _sSize );
				System.out.println("Corpus max Fsize " + _corpus.getMaxForeignLen() + " ssize " + _corpus.getMaxSourceLen());
				printForwardBackwardTables();
				printPosteriors();
				System.out.println("states");
				 MyArrays.printIntArray(states, "states", _fSize, _sSize*2);
				//System.out.println("viterbi");
				//viterbi.print(_fSize, _sSize*2);
				throw new RuntimeException("Viterbi decoding, path as a -2 value");
			}
			viterbiPath[pos] = states[pos + 1][currentState];
		}

		 //MyArrays.printIntArray(viterbiPath,"ViterbiPath");

		Alignment a = new Alignment(sentenceNumber, sentenceSource, _sSize,
				_fSize);
		for (int i = 0; i < viterbiPath.length; i++) {
			if (viterbiPath[i] < _sSize) {
				a.add(viterbiPath[i], i); // Caso em que nao e null				
			}
		}
		
		//Add posteriors for all positions
		for(int i = 0; i < _sSize; i++){
			for(int j = 0; j < _fSize; j++){
				a.addPosterior(i, j, _statePosteriors.getProb(j,i));
			}
		}
		
		return a;
	}

	//TODO This method is the same as m1
	public Alignment posteriorDecodingAlignment(int sentenceNumber, byte sentenceSource,
			float tresh,boolean projectPosteriors, ConstrainedProjectionStats stats) {

		initDecoding(sentenceNumber, sentenceSource);
		if(projectPosteriors && stats != null){
			SentenceConstrainedProjectionStats ss = projectPosteriors();
			if(ss != null){
				stats.add(ss);
			}
		}
		Alignment a = new Alignment(sentenceNumber, sentenceSource, _sSize,
				_fSize);
		for (int fIndex = 0; fIndex < _fSize; fIndex++) {
			for (int sIndex = 0; sIndex < _sSize; sIndex++) {
				if (_statePosteriors.getProb(fIndex, sIndex) > tresh) {
					a.add(sIndex, fIndex);
				}
				a.addPosterior(sIndex, fIndex, _statePosteriors.getProb(
						fIndex, sIndex));
			}
		}
		return a;

	}

//	public AlignmentsSet viterbiAlignments(byte sentenceSource) {
//		AlignmentsSet set = new AlignmentsSet();
//		// System.out.println(_corpus.getNumSentences(sentenceSource));
//		for (int i = 0; i < _corpus.getNumSentences(sentenceSource); i++) {
//			// System.out.println("Doing viterbi for sentence " + i);
//			set.addAlignment(viterbiAlignment(i, sentenceSource));
//		}
//		return set;
//	}
//
//	public AlignmentsSet posteriorAlignments(byte sentenceSource, float treshold) {
//		System.out.println("Starting posterior decoding \n\n");
//		AlignmentsSet set = new AlignmentsSet();
//		for (int i = 0; i < _corpus.getNumSentences(sentenceSource); i++) {
//			// System.out.println("Doing poteriros for sentence " + i);
//			set.addAlignment(posteriorDecodingAlignment(i, sentenceSource, treshold));
//		}
//		return set;
//	}
//
//	public AlignmentsSet posteriorAlignments(int[] sentences,
//			byte sentenceSource, float treshold) {
//		AlignmentsSet set = new AlignmentsSet();
//		for (int i = 0; i < sentences.length; i++) {
//			// System.out.println("Doing poteriros for sentence " + i);
//			set.addAlignment(posteriorDecodingAlignment(sentences[i], sentenceSource,
//					treshold));
//		}
//		return set;
//	}





//	public float tuneTreshholdAER(byte sentenceSource) {
//		int[] sentences = new int[_corpus.getNumSentences(sentenceSource)];
//		for (int i = 0; i < sentences.length; i++) {
//
//			sentences[i] = i;
//		}
//		return tuneTresholdAERAux(sentences, sentenceSource);
//	}
//
//	public float tuneTreshholdF1(byte sentenceSource, float alpha) {
//		int[] sentences = new int[_corpus.getNumSentences(sentenceSource)];
//		for (int i = 0; i < sentences.length; i++) {
//			sentences[i] = i;
//		}
//		return tuneTreshholdBalancedF1Aux(sentences, sentenceSource, alpha);
//	}
//
//	public float tuneTresholdAERAux(int[] sentences, byte sentenceSource) {
//		System.out.print("\t\tTunning T: ");
//		System.out.flush();
//		float treshhold = 0f;
//		int divisions = 100;
//		float aer = 1;
//		for (int i = 1; i <= divisions; i++) {
//		//	System.out.println("Doing tunning iter " + i);
//			// System.out.println(i);
//			AlignmentsSet predicted = posteriorAlignments(sentences,
//					sentenceSource, 1f * i / divisions);
//			float[] results = AlignmentEvaluator.calculateMeasures(predicted,
//					_corpus.getAlignments(sentences, sentenceSource));
//			// System.out.println(" " + results[2] + " "+ (1f*i/divisions) );
//			if (results[2] < aer) {
//				aer = results[2];
//				treshhold = 1f * i / divisions;
//			}
//			// System.out.println(treshhold+", "+aer);
//		}
//		System.out.println(treshhold + ", " + aer);
//		return treshhold;
//	}
//
//	public float tuneTreshholdBalancedF1Aux(int[] sentences,
//			byte sentenceSource, float alpha) {
//		System.out.print("\t\tTunning T: ");
//		System.out.flush();
//		float treshhold = 0f;
//		int divisions = 100;
//		float current = 0;
//		float precisionS = 0;
//		float recallS = 0;
//		for (int i = 1; i <= divisions; i++) {
//			// System.out.println(i);
//			AlignmentsSet predicted = posteriorAlignments(sentences,
//					sentenceSource, 1f * i / divisions);
//			float[] results = AlignmentEvaluator.calculateMeasures(predicted,
//					_corpus.getAlignments(sentences, sentenceSource));
//			// System.out.println(" " + results[2] + " "+ (1f*i/divisions) );
//			float precision = results[0];
//			float recall = results[1];
//			float f1 = 1 / (alpha / precision + (1 - alpha) / recall);
//			if (f1 > current) {
//				current = f1;
//				precisionS = precision;
//				recallS = recall;
//				treshhold = 1f * i / divisions;
//			}
//		}
//		System.out.println(treshhold + ", " + current + "Precision "
//				+ precisionS + " Recall " + recallS);
//		return treshhold;
//	}

	


	
	public static void main(String[] args) throws IOException {
		String corpusDescription = args[0];
		int size = Integer.parseInt(args[1]); // 100k
		int maxSentenceSize = Integer.parseInt(args[2]); // 40
		int numberIterations = Integer.parseInt(args[3]); // 5
		double smoothing = Double.parseDouble(args[4]);
		boolean trainWithResults = Boolean.parseBoolean(args[5]);
		int numberIterationsWithResults = Integer.parseInt(args[6]);
		boolean saveModel = Boolean.parseBoolean(args[7]);
		String saveModelDir = args[8];
		
		// System.out.println("Corpus "+corpusName);
		System.out.println("Size " + size);
		System.out.println("Max Sentence size " + maxSentenceSize);
		System.out.println("Number of iterations " + numberIterations);
		System.out.println("Smoothing " + smoothing);
		System.out.println("Train with results " + trainWithResults);
		System.out.println("Number of Iterations with results "
				+ numberIterationsWithResults);
		System.out.println("Saving model " + saveModel + " to " + saveModelDir);

		BilingualCorpus corpus = BilingualCorpus.getCorpusFromFileDescription(
				corpusDescription, size, maxSentenceSize);

		M1 m1 = new M1(corpus);
		m1.train(numberIterations,false,"");
		String baseDir = saveModelDir + "/regularHMM/" + corpus.getName() + "/" + size + "/";
		if(saveModel){
			StaticTools.createDir(baseDir);
		}
		RegularHMM mhmm = new RegularHMM(corpus, m1._tb,smoothing);
		
		//RegularHMM mhmm = new RegularHMM(corpus);
		if (!trainWithResults) {
			mhmm.train(numberIterations,saveModel,baseDir);
		} else {
			
			ArrayList<Evaluation[]> evalsList = mhmm.trainWithResults(
					numberIterationsWithResults, BilingualCorpus.DEV_CORPUS,saveModel,baseDir);
			System.out.println("Viterbi all");
			Evaluation[] evals = evalsList.get(0);
			for (int i = 0; i < evals.length; i++) {
				System.out.println("Iter " + i + evals[i]);
			}
			System.out.println(" ----- ");
			System.out.println("Viterbi Rare");
			evals = evalsList.get(1);
			for (int i = 0; i < evals.length; i++) {
				System.out.println("Iter " + i + evals[i]);
			}
			System.out.println(" ----- ");
			System.out.println("Viterbi Common");
			evals = evalsList.get(2);
			for (int i = 0; i < evals.length; i++) {
				System.out.println("Iter " + i + evals[i]);
			}
			System.out.println(" ----- ");
			
			System.out.println("Precision all");
			 evals = evalsList.get(3);
			for (int i = 0; i < evals.length; i++) {
				System.out.println("Iter " + i + evals[i]);
			}
			System.out.println(" ----- ");
			System.out.println("Precision Rare");
			evals = evalsList.get(4);
			for (int i = 0; i < evals.length; i++) {
				System.out.println("Iter " + i + evals[i]);
			}
			System.out.println(" ----- ");
			System.out.println("Precision Common");
			evals = evalsList.get(5);
			for (int i = 0; i < evals.length; i++) {
				System.out.println("Iter " + i + evals[i]);
			}
			System.out.println(" ----- ");
		}

		System.out.println("Done with training");



		if(saveModel){
			mhmm.saveModel(baseDir);
		}
		
		AlignmentsSet sa = mhmm.viterbiAlignments(BilingualCorpus.TEST_CORPUS);
		AlignmentsSet gold = corpus.getGold();
		System.out.println("Gold size" + gold.size() + " viterbi size "
				+ sa.size());
		Evaluation eval2 = AlignmentEvaluator.evaluate(sa, corpus.getGold());
		AlignmentStats.printPhrases(sa, true, false);
		System.out.println("Segment viterbi " + eval2);
//		mhmm.outputAlignments(sa, gold, corpus, new PrintStream(baseOutput
//				+ "/latex/" + outputFile + "-viter.tex"));
		
		
		// Viterbi with projection
		AlignmentsSet saP = mhmm.viterbiAlignments(BilingualCorpus.TEST_CORPUS,true);
		Evaluation eval2P = AlignmentEvaluator.evaluate(saP, corpus.getGold());
		AlignmentStats.printPhrases(saP, true, false);
		System.out.println("Segment viterbi with projection " + eval2P);
//		mhmm.outputAlignments(sa, gold, corpus, new PrintStream(baseOutput
//				+ "/latex/" + outputFile + "-viter.tex"));

		float treshold = mhmm.tuneTreshholdAER(BilingualCorpus.DEV_CORPUS,false);
		
		AlignmentsSet sa2 = mhmm.posteriorAlignments(
				BilingualCorpus.TEST_CORPUS, treshold,false,false);
		Evaluation eval22 = AlignmentEvaluator.evaluate(sa2, corpus.getGold());
		// sa2.outputWithStatistics(corpus, System.out);
		AlignmentStats.printPhrases(sa2, true, false);
		System.out.println("Posterioir decoding " + eval22);
//		mhmm.outputAlignments(sa2, gold, corpus, new PrintStream(baseOutput
//				+ "/latex/" + outputFile + "-post.tex"));
		
		float tresholdP = mhmm.tuneTreshholdAER(BilingualCorpus.DEV_CORPUS,true);
		AlignmentsSet sa2P = mhmm.posteriorAlignments(
				BilingualCorpus.TEST_CORPUS, tresholdP,true,false);
		Evaluation eval22P = AlignmentEvaluator.evaluate(sa2P, corpus.getGold());
		// sa2.outputWithStatistics(corpus, System.out);
		AlignmentStats.printPhrases(sa2P, true, false);
		System.out.println("Posterioir decoding projection" + eval22P);
//		mhmm.outputAlignments(sa2, gold, corpus, new PrintStream(baseOutput
//				+ "/latex/" + outputFile + "-post.tex"));

	
		System.out.println("Recall curves");
		mhmm.printPosteriorCurves(corpus, BilingualCorpus.TEST_CORPUS, true, "Forward-P",100);
	
		////////////////////////////////
		/// Test load and save model
		///////////////////////////////
		
		//Load model
//		RegularHMM lhmm = RegularHMM.loadModel(corpus,baseDir);
//		System.out.println("Models after loading");
//		AlignmentsSet lsa = lhmm.viterbiAlignments(BilingualCorpus.TEST_CORPUS);
//		Evaluation leval2 = AlignmentEvaluator.evaluate(lsa, corpus.getGold());
//		//AlignmentStats.printPhrases(lsa, true, false);
//		System.out.println("Segment viterbi " + leval2);
//		//mhmm.outputAlignments(sa, gold, corpus, new PrintStream(baseOutput
//		//		+ "/latex/" + outputFile + "-viter.tex"));
//		
//		if(!lhmm._tb.equals(mhmm._tb)){
//			System.out.println("Translation tables are not the same");
//			System.exit(-1);
//		}
//		if(!lhmm._distortion.equals(mhmm._distortion)){
//			System.out.println("Distortion tables are not the same");
//			System.exit(-1);
//		}
	}

	
}
