package geppetto.cat.models;


import geppetto.cat.alignments.AlignmentEvaluator;
import geppetto.cat.alignments.AlignmentsSet;
import geppetto.cat.common.Common;
import geppetto.cat.corpus.BilingualCorpus;
import geppetto.cat.models.stats.EStepStats;

import java.io.IOException;
import java.io.PrintStream;



public class SubstochasticM1 extends M1 {

	public int _projectionIterations;

	public BilingualCorpus getCorpus() {
		return _corpus;
	}

	public String getName() {
		return "Substochastic M1 ";
	}


	public SubstochasticM1() {

	}

	public SubstochasticM1(BilingualCorpus corpus, int projectionIterations) {
		this(corpus, 0, projectionIterations);
	}

	public SubstochasticM1(BilingualCorpus corpus, SparseTranslationTable tb, int projectionIterations) {
		this(corpus, tb, 0, projectionIterations);
	}

	public SubstochasticM1(BilingualCorpus corpus, double smoothing, int projectionIterations) {
		this(corpus,null,0, projectionIterations);
	}

	public SubstochasticM1(BilingualCorpus corpus, SparseTranslationTable tb,
			double smoothing, int projectionIterations) {
		super(corpus,tb,smoothing);
		_projectionIterations = projectionIterations;
	}

	// ///// Save and load models from file
	
	public void printStamp(PrintStream file){
		file.println("Projection Iterations: " + _projectionIterations);
	}

	public static SubstochasticM1 loadModel(BilingualCorpus corpus,
			String directory) {
		SubstochasticM1 model = new SubstochasticM1();
		model._corpus = corpus;
		if (!corpus.checkDescription(directory)) {
			System.out.println("Corpus is not the same");
			System.exit(1);
		}
		model._tb = SparseTranslationTable.LoadTranslationTable(corpus,
				directory);
		// _tb.printSmallTable(System.out);
		model._trained = true;
		return model;
	}

	public EStepStats eStep() {
		double totalLikelihood = 0.0;
		clearCounts();
		for (int i = 0; i < _nSentences; i++) {
			// Calculate Posterirors
			int[] s = _corpus
					.getSourceSentence(i, BilingualCorpus.TRAIN_CORPUS);
			int[] f = _corpus.getForeignSentence(i,
					BilingualCorpus.TRAIN_CORPUS);
			double[][] probCache = makeProbCache(f, s);
			double[][] posteriors = new double[s.length + 1][f.length];
			totalLikelihood += calculatePosteriors(s, f, posteriors, probCache);
			processPosteriors(posteriors, s, f, probCache);
			addCounts(s, f, posteriors);
		}
		EStepStats d = new EStepStats();
		d.logLikelihood = totalLikelihood;
		d.numSents = _nSentences;
		return d;
	}

	public double processPosteriors(double[][] posteriors, int[] s, int[] f,
			double[][] origProbCache) {
		int sSize = s.length;
		int fSize = f.length;

		// we start with a phi of zero, meaning that we start at the
		// distribution defined by the untampered HMM model
		double[] phi = new double[sSize];
		double[][] oldPosteriors = posteriors;

		for (int k = 0; k < _projectionIterations; k++) {
			updatePhi(s, f, posteriors, phi);
			double[][] probCache = new double[fSize][sSize * 2];
			for (int si = 0; si < sSize; si++) {
				double expPhi = Math.exp(phi[si]);
				for (int fi = 0; fi < fSize; fi++) {
					probCache[fi][si] = origProbCache[fi][si] * expPhi;
				}
			}
			for (int fi = 0; fi < fSize; fi++) {
				probCache[fi][sSize] = origProbCache[fi][sSize];
			}

			
			calculatePosteriors(s, f, posteriors,probCache);
		}
		double kl = Common.KLDistancePrime(oldPosteriors, posteriors, fSize,
				sSize);
		for (int si = 0; si < sSize; si++) {
			for (int fi = 0; fi < fSize; fi++) {
				oldPosteriors[si][fi] = posteriors[si][fi];
			}
		}
		return kl;
	}

	public void updatePhi(int[] s, int[] f, double[][] post, double[] phi) {
		int sSize = s.length;
		int fSize = f.length;
		// compute the gradient.

		double[] gradient = new double[sSize];
		for (int si = 0; si < sSize; si++) {
			double sum = 0;
			for (int fi = 0; fi < fSize; fi++)
				sum += post[si][fi];
			assert !Double.isNaN(sum) : "Hmm updatePhie: Sum is NaN";
			gradient[si] = 1 - sum;
		}
		for (int si = 0; si < sSize; si++) {
			// arbitrary ste=-size. Really we should be doing a line-search.
			phi[si] = phi[si] + gradient[si];
			if (phi[si] > 0)
				phi[si] = 0;
		}
	}

	public static void main(String[] args) throws IOException {
		String corpusDescription = args[0];
		int size = Integer.parseInt(args[1]); // 100k
		int maxSentenceSize = Integer.parseInt(args[2]); // 40
		int numberIterations = Integer.parseInt(args[3]); // 5
		// System.out.println("Corpus "+corpusName);
		System.out.println("Size " + size);
		System.out.println("Max Sentence size " + maxSentenceSize);
		System.out.println("Number of iterations " + numberIterations);

		BilingualCorpus corpus = BilingualCorpus.getCorpusFromFileDescription(
				corpusDescription, size, maxSentenceSize);
		SubstochasticM1 m1 = new SubstochasticM1(corpus,5);
		m1.train(numberIterations,false,"");

		AlignmentsSet viter = m1.viterbiAlignments(BilingualCorpus.TEST_CORPUS);
		System.out.println("Viterbi Decoding"
				+ AlignmentEvaluator.evaluate(viter, corpus.getGold()));
		float tresh = m1.tuneTreshholdAER(BilingualCorpus.DEV_CORPUS, false);
		AlignmentsSet post = m1.posteriorAlignments(
				BilingualCorpus.TEST_CORPUS, tresh,false,false);
		System.out.println("Posterior Decoding "
				+ AlignmentEvaluator.evaluate(post, corpus.getGold()));
	}
	

}
