package geppetto.cat.models;


import geppetto.cat.alignments.Alignment;
import geppetto.cat.alignments.AlignmentEvaluator;
import geppetto.cat.alignments.AlignmentsSet;
import geppetto.cat.alignments.AlignmentEvaluator.Evaluation;
import geppetto.cat.corpus.BilingualCorpus;
import geppetto.cat.models.stats.EStepStats;

import java.io.IOException;
import java.io.PrintStream;



/**
 * Implements HMM alignment Model
 * @author javg
 *
 */
public class HMM extends M1 {
	
	public DistortionTable _countDist;
	public DistortionTable _distortion;
	public int distortionSize = 11;


	public HMM(BilingualCorpus corpus) {
		this(corpus, 0.0);
	}
	
	public HMM(BilingualCorpus corpus, double smooth) {
		super(corpus, smooth);
		int biggestSent = Math.max(corpus.getMaxSourceLen(), corpus
				.getMaxForeignLen()) * 2;
		_countDist = new DistortionTable(distortionSize, biggestSent);
		_distortion = new DistortionTable(distortionSize, 1f, biggestSent);
	}

	public HMM(BilingualCorpus corpus, AbstractSparseTranslationTable tb) {
		this(corpus, tb, 0.0);
	}

	public HMM(BilingualCorpus corpus, AbstractSparseTranslationTable tb, double smooth) {
		super(corpus, tb, smooth);
		int biggestSent = Math.max(corpus.getMaxSourceLen(), corpus
				.getMaxForeignLen()) * 2;
		_countDist = new DistortionTable(distortionSize, biggestSent);
		_distortion = new DistortionTable(distortionSize, 1f, biggestSent);
	}

	HMM() {

	}

	// ///// Save and load models from file
	public void saveModel(String directory) {
		super.saveModel(directory);
		_distortion.saveTable(directory);
	}

	public void printStamp(PrintStream file){
		super.printStamp(file);
		file.println("Distortion Nr Bins: " + distortionSize);
	}
	
	public static HMM loadModel(BilingualCorpus corpus, String directory) {
		System.out.println(corpus.getName());
		System.out.println(directory);
		HMM model = new HMM();
		model._corpus = corpus;
		if (!corpus.checkDescription(directory)) {
			System.out.println("Corpus is not the same");
			System.exit(1);
		}
		model._tb = SparseTranslationTable.LoadTranslationTable(corpus,
				directory);
		model._distortion = new DistortionTable(directory);
		model._trained = true;
		return model;
	}

	public String getName() {
		return "HMM Model";
	}

	public void mStep() {
		_distortion.copy(_countDist);
		_distortion.printTable();
		updateTranslationProbabilities(_tb, _count);
	}

	/**
	 * Forward step of HMM
	 * 
	 * @param sSize
	 * @param fSize
	 * @param probCache
	 * @return
	 */
	public final double[][] makeForward(int sSize, int fSize,
			double[][] probCache) {
		double[][] forward = new double[fSize][sSize * 2]; 
		
		/* index sSize..2*sSize corresponds to null word probability */
		for (int si = 0; si < sSize * 2; si++) {	
			forward[0][si] = probCache[0][si]
					* _distortion.getFirstProb(si, sSize);
			assert forward[0][si] >= 0 : forward[0][si]
					+ " is not a probability at si= " + si + " sSize=" + sSize;
		}

		for (int fi = 1; fi < fSize; fi++) {
			/* sSize..2*sSize is null word */
			for (int si = 0; si < sSize * 2; si++) {
				double probFiGivenSi = probCache[fi][si];
				assert probFiGivenSi >= 0 : probFiGivenSi
						+ " is not a probability";
				for (int sprev = 0; sprev < sSize * 2; sprev++) {
					double distProb;
					/* sSize..2*sSize is null word */
					if (si < sSize)
						distProb = _distortion.getDistProb(si, sprev % sSize,
								sSize);
					else
						distProb = _distortion.getNullDistProb(si - sSize,
								sprev % sSize, sSize);
					assert distProb >= 0 : distProb + " is not a probability";
					forward[fi][si] += forward[fi - 1][sprev] * distProb;
				}
				forward[fi][si] *= probFiGivenSi;
				assert forward[fi][si] >= 0 : forward[fi][si]
						+ " is not a probability ";
			}
		}
		return forward;
	}

	/**
	 * Backward Step of HMM
	 * 
	 * @param sSize
	 * @param fSize
	 * @param probCache
	 * @return
	 */
	public final double[][] makeBackward(int sSize, int fSize,
			double[][] probCache) {
		/* index sSize..2*sSize corresponds to null word probability */
		double[][] backward = new double[fSize][sSize * 2]; // beta in HMM
															// literature
		/* index sSize..2*sSize corresponds to null word probability */
		for (int si = 0; si < sSize * 2; si++) {
			backward[fSize - 1][si] = _distortion.getLastProb(si, sSize);
			assert backward[fSize - 1][si] >= 0
					&& backward[fSize - 1][si] < 1.0 : backward[fSize - 1][si]
					+ " not a probability";
		}
		for (int fi = fSize - 2; fi >= 0; fi--) {
			/* sSize..2*sSize is null word */
			for (int si = 0; si < sSize * 2; si++) {
				double bsum = 0;
				for (int snext = 0; snext < sSize; snext++) {
					double distProb = _distortion.getDistProb(snext,
							si % sSize, sSize);
					bsum += distProb * backward[fi + 1][snext]
							* probCache[fi + 1][snext];
				}
				/* sSize..2*sSize is null word */
				for (int snext = sSize; snext < sSize * 2; snext++) {
					double distProb = _distortion.getNullDistProb(
							snext - sSize, si % sSize, sSize);
					bsum += distProb * backward[fi + 1][snext]
							* probCache[fi + 1][snext];
				}
				backward[fi][si] = bsum;
				assert backward[fi][si] >= 0 : backward[fi][si]
						+ " is not a probability ";

			}
		}
		return backward;
	}

	/**
	 * cache the translation probabilities (since these are expensive to compute
	 * 
	 * @param f
	 * @param s
	 * @return
	 */
	public double[][] makeProbCache(int[] f, int[] s) {
		final int sSize = s.length;
		final int fSize = f.length;
		double[][] probCache = new double[fSize][sSize * 2];
		for (int fi = 0; fi < fSize; fi++) {
			for (int si = 0; si < sSize; si++) {
				probCache[fi][si] = _tb.getProbability(s[si], f[fi]);
			}
			/* index sSize..2*sSize corresponds to null word probability */

			double val = _tb.getNullProbability(f[fi]);
			for (int si = sSize; si < sSize * 2; si++) {
				probCache[fi][si] = val;
			}
		}
		return probCache;
	}

	/**
	 * Calculates the likelihood
	 * 
	 * @param forward
	 * @param backward
	 * @return
	 */
	public double makeLikelihood(double[] forward, double[] backward) {
		double likelihood = 0f;
		final int ssize = forward.length;
		for (int si = 0; si < ssize; si++) {
			double tmp = backward[si] * forward[si];
			assert tmp >= 0 : tmp + " is not a probability \n";
			likelihood += tmp;
		}
		return likelihood;
	}

	public void clearCounts() {
		_count.initializeToSmoothingValue();
		_countDist.clear();
	}

	/* indexed by[fi][si] sSize..2*sSize corresponds to null word probability */
	public double[][] makePosterior(double[][] forward, double[][] backward,
			double likelihood) {
		assert likelihood >= 1e-300 : "Numerical underflow on likelihood";
		int fSize = forward.length;
		int sSize = forward[0].length / 2;
		double[][] posterior = new double[fSize][sSize * 2];
		// prob (f_fi came from s_si) =
		// forward[fi][si]*backward[fi][si]/likelihood
		for (int fi = 0; fi < fSize; fi++) {
			/* sSize..2*sSize is null word */
			for (int si = 0; si < sSize * 2; si++) {
				double newProb = (forward[fi][si] * backward[fi][si])
						/ likelihood;
				posterior[fi][si] = newProb;
			}
		}
		return posterior;
	}

	public void addToCounts(int[] s, int[] f, double[][] posteriors) {
		int sSize = s.length;
		int fSize = f.length;
		for (int fi = 0; fi < fSize; fi++) {
			for (int si = 0; si < sSize; si++) {
				_count.addToCount(s[si], f[fi], (float) posteriors[fi][si]);
				if (fi == 0)
					_countDist.addToFirst((float) posteriors[fi][si], si);
				if (fi == fSize - 1)
					_countDist.addToLast((float) posteriors[fi][si], si, sSize);
			}
			// sSize..2*sSize is null word
			double newProb = 0;
			for (int si = sSize; si < sSize * 2; si++) {
				newProb += posteriors[fi][si];
			}
			assert (newProb < 1.1);
			_count.addToNullCount(f[fi], (float) newProb);
		}

	}

	public void addToTransitions(double[][] probCache, double[][] forward,
			double[][] backward, double likelihood) {
		int fSize = forward.length;
		int sSize = forward[0].length / 2;
		for (int fi = 0; fi < fSize - 1; fi++) {
			for (int sprev = 0; sprev < sSize * 2; sprev++) {
				for (int si = 0; si < sSize; si++) {
					double trans = _distortion.getDistProb(si, sprev % sSize,
							sSize);
					double prob = (forward[fi][sprev] * trans
							* probCache[fi + 1][si] * backward[fi + 1][si])
							/ likelihood;
					_countDist.addToCount((float) prob, sprev % sSize, si);
					assert (prob < 1.0001) : prob + " is not a probability";
				}
				// sSize..2*sSize is null word
				for (int si = sSize; si < sSize * 2; si++) {
					double trans = _distortion.getNullDistProb(si - sSize,
							sprev % sSize, sSize);
					double prob = (forward[fi][sprev] * trans
							* probCache[fi + 1][si] * backward[fi + 1][si])
							/ likelihood;
					assert (prob < 1.0001);
					_countDist.addToNullCount((float) prob, sprev % sSize, si
							- sSize);
				}
			}
		}
	}

	public EStepStats eStep() {
		clearCounts();
		double totalLikelihood = 0;
		for (int i = 0; i < _nSentences; i++) {
			int[] s = _corpus
					.getSourceSentence(i, BilingualCorpus.TRAIN_CORPUS);
			final int sSize = s.length;
			int[] f = _corpus.getForeignSentence(i,
					BilingualCorpus.TRAIN_CORPUS);
			final int fSize = f.length;
			double[][] probCache = makeProbCache(f, s);
			double[][] forward = makeForward(sSize, fSize, probCache);
			double[][] backward = makeBackward(sSize, fSize, probCache);
			double likelihood = makeLikelihood(forward[0], backward[0]);
			// check for underflow
			if (likelihood < 1.0e-200) {
				_numericUnderFlow++;
				continue;
			}
			 totalLikelihood += Math.log(likelihood);

			double[][] posteriors = makePosterior(forward, backward, likelihood);
			addToCounts(s, f, posteriors);
			addToTransitions(probCache, forward, backward, likelihood);
		}
		EStepStats d = new EStepStats();
		d.logLikelihood = totalLikelihood;
		d.numSents = _nSentences;
		return d;
	}

	
	public void printPosteriors(int sentenceNumber, byte sentenceSource,
			int[] sourceSentence, int[] foreignSentence, PrintStream out) {

		double[][] probCache = makeProbCache(foreignSentence, sourceSentence);
		int sSize = sourceSentence.length;
		int fSize = foreignSentence.length;
		double[][] forward = makeForward(sSize, fSize, probCache);
		double[][] backward = makeBackward(sSize, fSize, probCache);
		double likelihood = makeLikelihood(forward[0], backward[0]);
		double[][] posteriors = makePosterior(forward, backward, likelihood);

		
		// Print headers
		System.out.print("\t");
		for (int sourcePos = 0; sourcePos < sourceSentence.length; sourcePos++) {
			System.out.print(sourcePos + "\t");
		}
		for (int sourcePos = 0; sourcePos < sourceSentence.length; sourcePos++) {
			System.out.print(sourcePos + "N\t");
		}
		System.out.println();
		for (int foreignPos = 0; foreignPos < foreignSentence.length; foreignPos++) {

			System.out.print(foreignPos + "\t");
			for (int sourcePos = 0; sourcePos < sourceSentence.length; sourcePos++) {
				System.out.printf("%2.2f\t", posteriors[foreignPos][sourcePos]); 
			}
			for (int sourcePos = 0; sourcePos < sourceSentence.length; sourcePos++) {
				System.out.printf("%2.2f\t", posteriors[foreignPos][sourcePos
						+ sSize]);

			}
			System.out.println();
		}
	}

	
//	 / DECODING ////////////////

	public Alignment posteriorDecodingAlignment(int sentenceNumber,
			byte sentenceSource, float treshhold) {
		int[] s = _corpus.getSourceSentence(sentenceNumber, sentenceSource);
		int[] f = _corpus.getForeignSentence(sentenceNumber, sentenceSource);
		final int sSize = s.length;
		final int fSize = f.length;
		double[][] probCache = makeProbCache(f, s);
		double[][] forward = makeForward(sSize, fSize, probCache);
		double[][] backward = makeBackward(sSize, fSize, probCache);
		double likelihood = makeLikelihood(forward[0], backward[0]);

		double[][] posteriors = makePosterior(forward, backward, likelihood);
		Alignment a = new Alignment(sentenceNumber, sentenceSource, sSize,
				fSize);

		if (likelihood == 0) {
			return a;
		}
		for (int fi = 0; fi < fSize; fi++) {
			for (int si = 0; si < sSize; si++) {
				a.addPosterior(si, fi, posteriors[fi][si]);
				if (posteriors[fi][si] > treshhold) {
					a.add(si, fi);
				}
			}
		}
		return a;
	}

	
	public Alignment viterbiAlignment(int sentenceNumber, byte sentenceSource, boolean projectPosterior) {
		int[] s = _corpus.getSourceSentence(sentenceNumber, sentenceSource);
		int[] f = _corpus.getForeignSentence(sentenceNumber, sentenceSource);
		/* s.length..2*s.length is null word */
		double[][] totalScores = new double[s.length * 2][f.length];
		int[][] backPointers = new int[s.length * 2][f.length];

		// Fill position zero of posteriors
		for (int i = 0; i < s.length; i++) {
			totalScores[i][0] = _tb.getProbability(s[i], f[0]);
		}
		/* s.length..2*s.length is null word */
		for (int i = s.length; i < s.length * 2; i++) {
			totalScores[i][0] = _tb.getNullProbability(f[0]);
		}

		// Fill the rest of the positions
		for (int foreignPos = 1; foreignPos < f.length; foreignPos++) {
			/* s.length..2*s.length is null word */
			for (int nextSourceState = 0; nextSourceState < s.length * 2; nextSourceState++) {
				double maxScore = 0;
				int maxPosition = 0;
				/* s.length..2*s.length is null word */
				for (int currentSourceState = 0; currentSourceState < s.length * 2; currentSourceState++) {
					double currScore;
					if (nextSourceState < s.length) {
						currScore = totalScores[currentSourceState][foreignPos - 1]
								* _distortion
										.getDistProb(nextSourceState,
												currentSourceState % s.length,
												s.length);
					} else {
						/* s.length..2*s.length is null word */
						currScore = totalScores[currentSourceState][foreignPos - 1]
								* _distortion.getNullDistProb(nextSourceState
										- s.length, currentSourceState
										% s.length, s.length);
					}

					if (maxScore < currScore) {
						maxScore = currScore;
						maxPosition = currentSourceState;
					}
				}
				if (nextSourceState < s.length) {
					totalScores[nextSourceState][foreignPos] = _tb
							.getProbability(s[nextSourceState], f[foreignPos])
							* maxScore;
				} else {
					/* s.length..2*s.length is null word */
					totalScores[nextSourceState][foreignPos] = _tb
							.getNullProbability(f[foreignPos])
							* maxScore;
				}
				backPointers[nextSourceState][foreignPos] = maxPosition;
			}
		}
		double max = 0;
		int max_ind = 0;
		/* s.length..2*s.length is null word */
		for (int ind = 0; ind < s.length * 2; ind++) {
			double curr = totalScores[ind][f.length - 1];
			if (max < curr) {
				max = curr;
				max_ind = ind;
			}
		}
		int[] alignments = new int[f.length];
		Alignment a = new Alignment(sentenceNumber, sentenceSource, s.length,
				f.length);
		alignments[f.length - 1] = max_ind;
		for (int j = f.length - 2; j >= 0; j--) {
			alignments[j] = backPointers[alignments[j + 1]][j + 1];
		}
		for (int j = 0; j < f.length; j++) {
			/* s.length..2*s.length is null word */
			if (alignments[j] < s.length)
				a.add(alignments[j], j);
		}

		double[][] probCache = makeProbCache(f, s);
		int sSize = s.length;
		int fSize = f.length;
		double[][] forward = makeForward(sSize, fSize, probCache);
		double[][] backward = makeBackward(sSize, fSize, probCache);
		double likelihood = makeLikelihood(forward[0], backward[0]);
		double[][] posteriors = makePosterior(forward, backward, likelihood);
		for (int fi = 0; fi < f.length; fi++) {
			for (int si = 0; si < s.length; si++) {
				a.addPosterior(si, fi, posteriors[fi][si]);
			}
		}

		return a;
	}

	
	/** 
	 * This code is used for the method getPhrasePosterior or getNullPhrase posterior
	 * We cache all the common parameters so that we don't have to compute them again
	 */
	private int currentSentenceNumber = -1;
	private byte currentSentenceSource = -1;
	double[][] currentForward;
	double[][] currentBackward;
	double[][] currentProbCache;
	double likelihood;
	private void initializeTablesForPhrase(int sentenceNumber,
			byte sentenceSource, int[] foreingSentence, int[] sourceSentence) {
		// To avoid making forward backward mode than once for the same sentence
		if (currentSentenceNumber == sentenceNumber
				&& currentSentenceSource == sentenceSource)
			return;
		currentSentenceNumber = sentenceNumber;
		currentSentenceSource = sentenceSource;
		final int sSize = sourceSentence.length;
		final int fSize = foreingSentence.length;
		currentProbCache = makeProbCache(foreingSentence, sourceSentence);
		currentForward = makeForward(sSize, fSize, currentProbCache);
		currentBackward = makeBackward(sSize, fSize, currentProbCache);
		likelihood = makeLikelihood(currentForward[0], currentBackward[0]);
	}

	public double getNullPhrasePosterior(int sentenceNumber,
			byte sentenceSource, int[] foreingSentence, int[] sourceSentence,
			int startForeignIndex, int endForeignIndex) {
		initializeTablesForPhrase(sentenceNumber, sentenceSource,
				foreingSentence, sourceSentence);
		int fPhraseSize = endForeignIndex - startForeignIndex + 1;
		int sPhraseSize = sourceSentence.length;
		double[][] phraseForward = new double[fPhraseSize][sPhraseSize]; 
		for (int i = 0; i < fPhraseSize; i++) {
			java.util.Arrays.fill(phraseForward[i], 0);
		}
		for (int si = sPhraseSize; si < sPhraseSize * 2; si++) {
			phraseForward[0][si - sPhraseSize] = currentForward[startForeignIndex][si]
					* currentProbCache[startForeignIndex][si];
		}
		for (int fi = startForeignIndex + 1; fi <= endForeignIndex; fi++) {
			for (int si = sPhraseSize; si < sPhraseSize * 2; si++) {
				double probFiGivenSi = currentProbCache[fi][si];
				for (int sprev = sPhraseSize; sprev < sPhraseSize * 2; sprev++) {
					double distProb;
					/* sSize..2*sSize is null word */
					distProb = _distortion.getNullDistProb(si - sPhraseSize,
							sprev % sPhraseSize, sPhraseSize);
					phraseForward[fi - startForeignIndex][si - sPhraseSize] += phraseForward[fi
							- 1 - startForeignIndex][sprev - sPhraseSize]
							* distProb;
				}
				phraseForward[fi - startForeignIndex][si - sPhraseSize] *= probFiGivenSi;
			}
		}
		double prob = 0;
		for (int si = 0; si < sPhraseSize; si++) {
			double phraseF = phraseForward[fPhraseSize - 1][si];
			double back = currentBackward[endForeignIndex][si + sPhraseSize];
			prob += phraseF * back;
		}
		return prob / likelihood;
	}

	public double getPhrasePosterior(int sentenceNumber, byte sentenceSource,
			int[] foreingSentence, int[] sourceSentence, int startSourceIndex,
			int endSourceIndex, int startForeignIndex, int endForeignIndex) {
		initializeTablesForPhrase(sentenceNumber, sentenceSource,
				foreingSentence, sourceSentence);

		int fPhraseSize = endForeignIndex - startForeignIndex + 1;
		int sPhraseSize = endSourceIndex - startSourceIndex + 1;
		double[][] phraseForward = new double[fPhraseSize][sPhraseSize];// 
		for (int i = 0; i < fPhraseSize; i++) {
			java.util.Arrays.fill(phraseForward[i], 0);
		}
		for (int si = startSourceIndex; si <= endSourceIndex; si++) {
			phraseForward[0][si - startSourceIndex] = currentForward[startForeignIndex][si]
					* currentProbCache[startForeignIndex][si];
		}
		for (int fi = startForeignIndex + 1; fi <= endForeignIndex; fi++) {
			for (int si = startSourceIndex; si <= endSourceIndex; si++) {
				double probFiGivenSi = currentProbCache[fi][si];
				for (int sprev = startSourceIndex; sprev <= endSourceIndex; sprev++) {
					double distProb;
					/* sSize..2*sSize is null word */
					distProb = _distortion.getDistProb(si, sprev
							% sourceSentence.length, sourceSentence.length);
					phraseForward[fi - startForeignIndex][si - startSourceIndex] += phraseForward[fi
							- startForeignIndex - 1][sprev - startSourceIndex]
							* distProb;
				}
				phraseForward[fi - startForeignIndex][si - startSourceIndex] *= probFiGivenSi;
			}
		}
		double prob = 0;
		for (int si = startSourceIndex; si <= endSourceIndex; si++) {
			double phraseF = phraseForward[fPhraseSize - 1][si
					- startSourceIndex];
			double back = currentBackward[endForeignIndex][si];
			prob += phraseF * back;
		}
		double divider = Math.pow((endSourceIndex - startSourceIndex + 1),
				(endForeignIndex - startForeignIndex + 1));
		return prob / (likelihood * divider); 
	}


	
	public static void main(String[] args) throws IOException {
		String corpusDescription = args[0];
		int size = Integer.parseInt(args[1]); // 100k
		int maxSentenceSize = Integer.parseInt(args[2]); // 40
		int numberIterations = Integer.parseInt(args[3]); // 5
		// System.out.println("Corpus "+corpusName);
		System.out.println("Size " + size);
		System.out.println("Max Sentence size " + maxSentenceSize);
		System.out.println("Number of iterations " + numberIterations);

		BilingualCorpus corpus = BilingualCorpus.getCorpusFromFileDescription(
				corpusDescription, size, maxSentenceSize);

		M1 m1 = new M1(corpus);
		m1.train(numberIterations,false,"");

		HMM mhmm = new HMM(corpus, m1._tb);
		mhmm.train(numberIterations,false,"");

		System.out.println("Done with training");

		AlignmentsSet sa = mhmm.viterbiAlignments(BilingualCorpus.TEST_CORPUS);
		AlignmentsSet gold = corpus.getGold();
		System.out.println("Gold size" + gold.size() + " viterbi size "
				+ sa.size());
		Evaluation eval2 = AlignmentEvaluator.evaluate(sa, corpus.getGold());
		System.out.println("Segment viterbi " + eval2);

		float tresh = m1.tuneTreshholdAER(BilingualCorpus.DEV_CORPUS,false);
		AlignmentsSet sa2 = mhmm.posteriorAlignments(
				BilingualCorpus.TEST_CORPUS, tresh,false,false);
		Evaluation eval22 = AlignmentEvaluator.evaluate(sa2, corpus.getGold());
		// sa2.outputWithStatistics(corpus, System.out);
		System.out.println("Posterioir decoding " + eval22);

		// mhmm._distortion.printTable(); the same
		// mhmm._tb.printTable(System.out); the same
		/*
		 * AlignmentsSet mhmmA =
		 * mhmm.viterbiAlignments(BilingualCorpus.TEST_CORPUS); float aux[] =
		 * new float[3];
		 * 
		 * for(Alignment a: mhmmA._alignments){ AlignerOutput.outputWithGold(a,
		 * corpus.getGold().get(a.getSentenceNumber()), corpus, System.out,
		 * aux); }
		 * 
		 * Evaluation2 eval2 =
		 * AlignmentEvaluator.evaluate(mhmmA,corpus.getGold()); //
		 * System.out.println(eval2.toString());
		 * 
		 * end = System.currentTimeMillis(); System.gc(); System.gc();
		 * System.gc(); endM = (Runtime.getRuntime().totalMemory() -
		 * Runtime.getRuntime().freeMemory())/(1024*1024);
		 * 
		 * System.out.println(); System.out.println(); System.out.println();
		 * System.out.println("Spent Memory " + (endM - initM) + " time " +
		 * (end-init)); System.out.println(); System.out.println();
		 * System.out.println();
		 * 
		 * System.gc(); System.gc(); System.gc(); initM =
		 * (Runtime.getRuntime().totalMemory() -
		 * Runtime.getRuntime().freeMemory())/(1024*1024); init =
		 * System.currentTimeMillis(); NewHMM nmhmm = new NewHMM(corpus);
		 * nmhmm.train(numberIterations); //nmhmm._tb.printTable(System.out);
		 * The same //nmhmm._distortion.printTable(); The same AlignmentsSet
		 * nmhmmA = nmhmm.viterbiAlignments(BilingualCorpus.TEST_CORPUS);
		 * 
		 * for(Alignment a: nmhmmA._alignments){ AlignerOutput.outputWithGold(a,
		 * corpus.getGold().get(a.getSentenceNumber()), corpus, System.out,
		 * aux); }
		 * 
		 * Evaluation2 neval2 =
		 * AlignmentEvaluator.evaluate(nmhmmA,corpus.getGold());
		 * System.out.println(neval2.toString()); end =
		 * System.currentTimeMillis(); System.gc(); System.gc(); System.gc();
		 * endM = (Runtime.getRuntime().totalMemory() -
		 * Runtime.getRuntime().freeMemory())/(1024*1024);
		 * 
		 * 
		 * System.out.println(); System.out.println(); System.out.println();
		 * System.out.println("Spent Memory " + (endM - initM) + " time " +
		 * (end-init)); System.out.println(); System.out.println();
		 * System.out.println();
		 */
	}

}
