package geppetto.cat.models;


import geppetto.cat.alignments.Alignment;
import geppetto.cat.alignments.AlignmentEvaluator;
import geppetto.cat.alignments.AlignmentsSet;
import geppetto.cat.alignments.AlignmentEvaluator.Evaluation;
import geppetto.cat.common.Common;
import geppetto.cat.common.StaticTools;
import geppetto.cat.constrains.ConstrainedProjectionStats;
import geppetto.cat.corpus.BilingualCorpus;
import geppetto.cat.models.stats.AgreeEStepStats;
import geppetto.cat.models.stats.ProjectionStats;

import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintStream;




public class AgreementM1 extends AbstractModel {

	public M1 forward;

	public M1 backward;

	double epsilon;

	int projectionIterations;

	boolean _trained = false;

	public AgreementM1(BilingualCorpus forwardCorpus,
			BilingualCorpus backwardCorpus, double epsilon, int projectionIterations) {
		this(forwardCorpus, backwardCorpus, epsilon, 0, projectionIterations);
	}

	
	public AgreementM1(BilingualCorpus forwardCorpus,
			BilingualCorpus backwardCorpus, double epsilon,double smooth, int projectionIterations) {
		this.epsilon = epsilon;
		this.projectionIterations = projectionIterations;
		this._corpus = forwardCorpus;
		forward = new M1(forwardCorpus,smooth);
		backward = new M1(backwardCorpus,smooth);
	}
	
	AgreementM1() {

	}

	public void saveModel(String directory) {
		forward.saveModel(directory + "forward");
		backward.saveModel(directory + "backward");
		try {
			PrintStream file = new PrintStream(new FileOutputStream(directory+ "epsilon"));
			file.println(projectionIterations);
			file.println(epsilon);
		} catch (FileNotFoundException e) {
			System.out.println(getName()+": Could not save model");
			System.exit(-1);
		}
	}

	public static AgreementM1 loadModel(BilingualCorpus corpus, String directory) {
		AgreementM1 model = new AgreementM1();
		try {
			BufferedReader file = new BufferedReader(new FileReader(directory+ "epsilon"));
			model.projectionIterations = Integer.parseInt(file.readLine());
			model.epsilon = Double.parseDouble(file.readLine());
			System.out.println("Reading Forward: " + directory + "forward");
			model.forward = M1.loadModel(corpus, directory + "forward");
			System.out.println("Reading Forward: " + directory + "backward");
			model.backward = M1.loadModel(corpus.reverse(), directory
					+ "backward");
		} catch (FileNotFoundException e) {
			System.out
					.println(model.getName()+ ": Could not read model file not found "
							+ e.toString());
			System.exit(-1);
		} catch (NumberFormatException e) {
			System.out
					.println(model.getName()+": Could not read model number conversion exception"
							+ e.toString());
			System.exit(-1);

		} catch (IOException e) {
			System.out
					.println(model.getName()+": Could not read model"
							+ e.toString());
			System.exit(-1);
		}
		return model;
	}

	public String getName() {
		return "Agreement Model 1";
	}

	// Calcultes the projection statistics for just one instance
	public ProjectionStats projectPosteriors(double[][][] posteriors, int sSize, int fSize) {
		ProjectionStats stats = new ProjectionStats(projectionIterations);

		
		
		double[][] posteriorsF = posteriors[0];
		double[][] posteriorsB = posteriors[1];
	
		double[][] posteriorFOriginal = StaticTools.copyMatrix(posteriorsF,  sSize+1,fSize);
		double[][] posteriorBOriginal = StaticTools.copyMatrix(posteriorsB,  fSize+1,sSize);
		
		
		double phi[][] = new double[sSize][fSize];
		for (int grI = 0; grI < projectionIterations; grI++) {
			double lRate = 10.0 / (10.0 + grI);
			double totalViolation = 0;
			for (int fi = 0; fi < fSize; fi++) {
				for (int si = 0; si < sSize; si++) {
					// Update phi and lambda
					double ePsi = posteriorsF[si][fi] - posteriorsB[fi][si];
					double phiUpdate = lRate * (-ePsi);
					totalViolation += Math.abs(-ePsi);
					double newlambda = Math.abs(phi[si][fi]) - lRate * epsilon;
					double newPhi = phiUpdate + phi[si][fi];

					// Project
					if (-newPhi <= newlambda && newlambda <= newPhi) {
						newPhi = (newPhi + newlambda) / 2;
						newlambda = newPhi;
					} else if (newPhi <= newlambda && newlambda <= -newPhi) {
						newPhi = (newPhi - newlambda) / 2;
						newlambda = -newPhi;
					} else {
						newPhi = 0;
						newlambda = 0;
					}
					posteriorsF[si][fi] = posteriorFOriginal[si][fi]
							* Math.exp(newPhi);
					posteriorsB[fi][si] = posteriorBOriginal[fi][si]
							* Math.exp(-newPhi);
					phi[si][fi] = newPhi;
				}
			}

			// Renormalizing
			posteriorsF = Common.normalize(posteriorsF, fSize, sSize + 1);
			posteriorsB = Common.normalize(posteriorsB, sSize, fSize + 1);

			// Adding Stats
			stats.addViolation(totalViolation, grI);
			// Calculate thw KL(q|p) which is what we are minimizing
			stats.addForwardKL(geppetto.cat.common.StaticTools.KLDistance(posteriorsF,
					posteriorFOriginal, sSize + 1, fSize), grI);
			stats.addBackwardKL(geppetto.cat.common.StaticTools.KLDistance(posteriorsB,
					posteriorBOriginal, fSize + 1, sSize), grI);
		}
		posteriors[0] = posteriorsF; posteriors[1]=posteriorsB;
		return stats;
	}

	public AgreeEStepStats eStep() {
		double totalLikelihoodF = 0;
		double totalLikelihoodB = 0;
		forward.clearCounts();
		backward.clearCounts();
		AgreeEStepStats d = new AgreeEStepStats();
		ProjectionStats totalPStats = new ProjectionStats(projectionIterations);
		for (int i = 0; i < forward._corpus.getNumberOfTrainingSentences(); i++) {
			int[] s = forward._corpus.getSourceSentence(i,
					BilingualCorpus.TRAIN_CORPUS);
			final int sSize = s.length;
			int[] f = forward._corpus.getForeignSentence(i,
					BilingualCorpus.TRAIN_CORPUS);
			final int fSize = f.length;
			double[][] forwardProbCache = forward.makeProbCache(f, s);
			double[][] backwardProbCache = backward.makeProbCache(s, f);
			// indexed by[si][fi] sSize corresponds to null word probability
			double[][] posteriorsF = new double[sSize + 1][fSize];
			totalLikelihoodF += forward.calculatePosteriors(s, f, posteriorsF, forwardProbCache);
			// indexed by[fi][si] fSize corresponds to null word probability
			double[][] posteriorsB = new double[fSize + 1][sSize];
			totalLikelihoodB += backward.calculatePosteriors(f, s, posteriorsB,backwardProbCache);
			double [][][] posteriors = {posteriorsF,posteriorsB};
			totalPStats.add(projectPosteriors(posteriors, sSize,fSize));
			posteriorsF = posteriors[0];
			posteriorsB = posteriors[1];
			forward.addCounts(s, f, posteriorsF);
			backward.addCounts(f, s, posteriorsB);
		}
		d.logLikelihoodF = totalLikelihoodF;
		d.logLikelihoodB = totalLikelihoodB;
		d.numSents = forward._corpus.getNumberOfTrainingSentences();
		d.pStats = totalPStats;
		return d;
	}

	
	
	// Its here just because of the stats. Should be removed.
	public void train(int iterations) {
		_numberTrainingIterations = iterations;
		System.out.println("Starting " + getName() + " Training");
		initializeTrain();
		AgreeEStepStats d = new AgreeEStepStats(), old = new AgreeEStepStats();
		for (int k = 0; k < iterations; k++) {
			// /DEBUG CODE
			System.out.println("Iteration " + (k + 1));
			System.out.flush();
			// E-Step
			System.out.println(" e Step");
			d.startTime();
			d.add(eStep());
			d.stopTime();
			System.out.println(d.makeVerbose(old));
			// M-Step
			old = d;
			d = new AgreeEStepStats();
			mStep();
			System.out.println("m Step");
			System.out.flush();
		}
		setTrained();
		finalizeTrain();
		System.out.println();
	}
	
	public void setTrained() {
		_trained = true;
		forward.setTrained();
		backward.setTrained();
	}

	public void mStep() {
		forward.mStep();
		backward.mStep();
	}

	public Alignment posteriorDecodingAlignment(int sentenceNumber,
			byte sentenceSource, float treshhold,boolean projectPosteriors, ConstrainedProjectionStats stats) {
		return forward.posteriorDecodingAlignment(sentenceNumber,
				sentenceSource, treshhold, projectPosteriors,stats);
	}

	public Alignment viterbiAlignment(int sentenceNumber, byte sentenceSource,boolean projectPosteriors, ConstrainedProjectionStats stats) {
		if(projectPosteriors){
			System.out.println("Ignoring project posteriors");
		}
		return forward.viterbiAlignment(sentenceNumber, sentenceSource, projectPosteriors,null);
	}

	public void initializeTrain() {
		forward.initializeTrain();
		backward.initializeTrain();
	}

	public void finalizeTrain() {
		forward.finalizeTrain();
		backward.finalizeTrain();
	}

	// TODO fornow just use the forward model
	public double getNullPhrasePosterior(int phraseNumber, byte phraseSource,
			int[] foreingSentence, int[] sourceSentence, int startForeignIndex,
			int endForeignIndex) {
		return forward.getNullPhrasePosterior(phraseNumber, phraseSource,
				foreingSentence, sourceSentence, startForeignIndex,
				endForeignIndex);
	}

	// TODO fornow just use the forward model
	public double getPhrasePosterior(int phraseNumber, byte phraseSource,
			int[] foreingSentence, int[] sourceSentence, int startSourceIndex,
			int endSourceIndex, int startForeignIndex, int endForeignIndex) {
		return forward.getPhrasePosterior(phraseNumber, phraseSource,
				foreingSentence, sourceSentence, startSourceIndex,
				endSourceIndex, startForeignIndex, endForeignIndex);
	}
	
	public static void main(String[] args) throws IOException {
		String corpusDescription = args[0];
		int size = Integer.parseInt(args[1]); // 100k
		int maxSentenceSize = Integer.parseInt(args[2]); // 40
		int numberIterations = Integer.parseInt(args[3]); // 5
		// System.out.println("Corpus "+corpusName);
		System.out.println("Size " + size);
		System.out.println("Max Sentence size " + maxSentenceSize);
		System.out.println("Number of iterations " + numberIterations);

		BilingualCorpus corpus = BilingualCorpus.getCorpusFromFileDescription(
				corpusDescription, size, maxSentenceSize);
		BilingualCorpus backwardCorpus = corpus.reverse();
		
		AgreementM1 m1 = new AgreementM1(corpus,backwardCorpus,0,5);
		m1.train(numberIterations);

				System.out.println("Done with training");

		AlignmentsSet sa = m1.viterbiAlignments(BilingualCorpus.TEST_CORPUS);
		AlignmentsSet gold = corpus.getGold();
		System.out.println("Gold size" + gold.size() + " viterbi size "
				+ sa.size());
		Evaluation eval2 = AlignmentEvaluator.evaluate(sa, corpus.getGold());
		System.out.println("Segment viterbi " + eval2);

		float tresh = m1.tuneTreshholdAER(BilingualCorpus.DEV_CORPUS,false);
		AlignmentsSet sa2 = m1.posteriorAlignments(
				BilingualCorpus.TEST_CORPUS, tresh,false,true);
		Evaluation eval22 = AlignmentEvaluator.evaluate(sa2, corpus.getGold());
		System.out.println("Posterior decoding " + eval22);
	}
}
