package geppetto.cat.models;


import geppetto.cat.alignments.AlignmentEvaluator;
import geppetto.cat.alignments.AlignmentsSet;
import geppetto.cat.alignments.AlignmentEvaluator.Evaluation;
import geppetto.cat.common.Common;
import geppetto.cat.corpus.BilingualCorpus;
import geppetto.cat.models.stats.EStepStats;

import java.io.IOException;
import java.io.PrintStream;



public class SubstochasticHMM extends HMM {

	int _projectionIterations = 5;

	public SubstochasticHMM(BilingualCorpus corpus, int projectionIterations) {
		this(corpus, 0.0, projectionIterations);
	}

	public SubstochasticHMM(BilingualCorpus corpus, double smooth, int projectionIterations) {
		this(corpus, null,smooth, projectionIterations);
	}

	public SubstochasticHMM(BilingualCorpus corpus, AbstractSparseTranslationTable tb, int projectionIterations) {
		this(corpus, tb, 0.0, projectionIterations);
	}

	public SubstochasticHMM(BilingualCorpus corpus, AbstractSparseTranslationTable tb,
			double smooth, int projectionIterations) {
		super(corpus, tb, smooth);
		int biggestSent = Math.max(corpus.getMaxSourceLen(), corpus
				.getMaxForeignLen()) * 2;
		_countDist = new DistortionTable(distortionSize, biggestSent);
		_distortion = new DistortionTable(distortionSize, 1f, biggestSent);
		_projectionIterations = projectionIterations;
	}

	SubstochasticHMM() {

	}

	public void printStamp(PrintStream file){
		file.println("Projection Iterations: " + _projectionIterations);
	}
	
	public static SubstochasticHMM loadModel(BilingualCorpus corpus,String directory) {
		System.out.println(corpus.getName());
		System.out.println(directory);
		SubstochasticHMM model = new SubstochasticHMM();
		model._corpus = corpus;
		if (!corpus.checkDescription(directory)) {
			System.out.println("Corpus is not the same");
			System.exit(1);
		}
		model._tb = SparseTranslationTable.LoadTranslationTable(corpus,
				directory);
		model._distortion = new DistortionTable(directory);
		// _tb.printSmallTable(System.out);
		model._trained = true;
		return model;
	}

	public EStepStats eStep() {
		clearCounts();
		double totalLikelihood = 0;
		for (int i = 0; i < _nSentences; i++) {
			int[] s = _corpus
					.getSourceSentence(i, BilingualCorpus.TRAIN_CORPUS);
			final int sSize = s.length;
			int[] f = _corpus.getForeignSentence(i,
					BilingualCorpus.TRAIN_CORPUS);
			final int fSize = f.length;
			if (fSize > 200 || sSize > 200)
				continue;

			/*
			 * indexed by[fi][si] sSize..2*sSize corresponds to null word
			 * probability
			 */
			double[][] probCache = makeProbCache(f, s);
			double[][] forward = makeForward(sSize, fSize, probCache);
			double[][] backward = makeBackward(sSize, fSize, probCache);

			double likelihood = makeLikelihood(forward[0], backward[0]);
			if (likelihood < 1.0e-200) {
				_numericUnderFlow++;
				continue;
			}
			totalLikelihood += Math.log(likelihood);
			double[][] posteriors = makePosterior(forward, backward, likelihood);
			processPosteriors(posteriors, s, f, probCache);
			addToCounts(s, f, posteriors);
			addToTransitions(probCache, forward, backward, likelihood);
		}
		EStepStats d = new EStepStats();
		d.logLikelihood = totalLikelihood;
		d.numSents = _nSentences;

		return d;
	}

	public double processPosteriors(double[][] posteriors, int[] s, int[] f,
			double[][] origProbCache) {
		int sSize = s.length;
		int fSize = f.length;

		// we start with a phi of zero, meaning that we start at the
		// distribution defined by the untampered HMM model
		double[] phi = new double[sSize];
		double[][] oldPosteriors = posteriors;

		for (int k = 0; k < _projectionIterations; k++) {
			updatePhi(s, f, posteriors, phi);
			double[][] probCache = new double[fSize][sSize * 2];
			for (int si = 0; si < sSize; si++) {
				double expPhi = Math.exp(phi[si]);
				for (int fi = 0; fi < fSize; fi++) {
					probCache[fi][si] = origProbCache[fi][si] * expPhi;
				}
			}
			
			//fill null positions
			for (int si = sSize; si < sSize*2; si++) {
				for (int fi = 0; fi < fSize; fi++) {
					probCache[fi][si] = origProbCache[fi][si];
				}
			}
			double[][] forward = makeForward(sSize, fSize, probCache);
			double[][] backward = makeBackward(sSize, fSize, probCache);
			double likelihood = makeLikelihood(forward[0], backward[0]);
			
			if (likelihood <= 1.0e-300) {
				System.out
						.println("Oh no! likelhood became too small! -- failing this instance");
				for (int si = 0; si < sSize * 2; si++) {
					for (int fi = 0; fi < fSize; fi++) {
						oldPosteriors[fi][si] = posteriors[fi][si];
					}
				}
				return 0;
			}
			
			posteriors = makePosterior(forward, backward, likelihood);
		}
		double kl = Common.KLDistancePrime(oldPosteriors, posteriors, sSize,
				fSize);
		for (int si = 0; si < sSize * 2; si++) {
			for (int fi = 0; fi < fSize; fi++) {
				oldPosteriors[fi][si] = posteriors[fi][si];
			}
		}
		return kl;
	}

	public void updatePhi(int[] s, int[] f, double[][] post, double[] phi) {
		int sSize = s.length;
		int fSize = f.length;
		// compute the gradient.
		double[] gradient = new double[sSize];
		for (int si = 0; si < sSize; si++) {
			double sum = 0;
			for (int fi = 0; fi < fSize; fi++)
				sum += post[fi][si];
			assert !Double.isNaN(sum) : "Hmm updatePhie: Sum is NaN";
			gradient[si] = 1 - sum;
		}
		for (int si = 0; si < sSize; si++) {
			// arbitrary ste=-size. Really we should be doing a line-search.
			phi[si] = phi[si] + gradient[si];
			if (phi[si] > 0)
				phi[si] = 0;
		}
	}

	public static void main(String[] args) throws IOException {
		String corpusDescription = args[0];
		int size = Integer.parseInt(args[1]); // 100k
		int maxSentenceSize = Integer.parseInt(args[2]); // 40
		int numberIterations = Integer.parseInt(args[3]); // 5
		// System.out.println("Corpus "+corpusName);
		System.out.println("Size " + size);
		System.out.println("Max Sentence size " + maxSentenceSize);
		System.out.println("Number of iterations " + numberIterations);

		BilingualCorpus corpus = BilingualCorpus.getCorpusFromFileDescription(
				corpusDescription, size, maxSentenceSize);

		SubstochasticM1 m1 = new SubstochasticM1(corpus,5);
		m1.train(numberIterations,false,"");

		SubstochasticHMM mhmm = new SubstochasticHMM(corpus, m1._tb,5);
		mhmm.train(numberIterations,false,"");

		System.out.println("Done with training");

		AlignmentsSet sa = mhmm.viterbiAlignments(BilingualCorpus.TEST_CORPUS);
		AlignmentsSet gold = corpus.getGold();
		System.out.println("Gold size" + gold.size() + " viterbi size "
				+ sa.size());
		Evaluation eval2 = AlignmentEvaluator.evaluate(sa, corpus.getGold());
		System.out.println("Segment viterbi " + eval2);

		float tresh = m1.tuneTreshholdAER(BilingualCorpus.DEV_CORPUS,false);
		AlignmentsSet sa2 = mhmm.posteriorAlignments(
				BilingualCorpus.TEST_CORPUS, tresh,false,false);
		Evaluation eval22 = AlignmentEvaluator.evaluate(sa2, corpus.getGold());
		System.out.println("Posterior decoding " + eval22);
	}

}
