package geppetto.phraseHMM;


import geppetto.cat.alignments.AlignmentEvaluator;
import geppetto.cat.alignments.AlignmentStats;
import geppetto.cat.alignments.AlignmentsSet;
import geppetto.cat.alignments.AlignmentEvaluator.Evaluation;
import geppetto.cat.common.Common;
import geppetto.cat.common.ContinuosDistributions;
import geppetto.cat.common.GaussianDistribution;
import geppetto.cat.common.MyArrays;
import geppetto.cat.common.StaticTools;
import geppetto.cat.constrains.BijectivityConstrains;
import geppetto.cat.constrains.ConstrainedProjectionStats;
import geppetto.cat.constrains.EqualityStochasticFertilityConstrains;
import geppetto.cat.constrains.SentenceConstrainedProjectionStats;
import geppetto.cat.corpus.BilingualCorpus;
import geppetto.cat.models.AbstractSparseTranslationTable;
import geppetto.cat.models.SparseTranslationTable;
import geppetto.cat.models.stats.EStepStats;

import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;




public class EqualityStochasticFertilityHMM extends RegularHMM{

	EqualityStochasticFertilityConstrains constrains;
	public double _epsilon;
	public double _slack;
	public int _maxStepSize; 
	public int _maxNumberIterations;
	ContinuosDistributions[] dists;
	double[] sumFracFertilities;
	double[] squareSumFracFertilities;
	double[] numberFracFertilities;
	double smoothing=1;
	double fertility[];
	double fertilityVariance[];
	//Remove -1 and zeros (if !useZeros)
	double processedFertilities[];
	double processedFertilityVariance[];
	boolean useFertility;
	boolean useZeros;
	boolean learnMean;
	boolean learnVariance;
	int updatableFreq = 200;
	double defaultVariance;
	
	
	public EqualityStochasticFertilityHMM(BilingualCorpus corpus, double smoothing, 
			double epsilon, double slack, 
			int maxStepSize, int maxNumberProjectionIterations,
			boolean useFertility, boolean testCorpus, boolean useZeros,
			boolean learnMean, boolean learnVariance, int updateFreq,double defaultVariance) {
		this(corpus, null, smoothing,epsilon,slack,maxStepSize,maxNumberProjectionIterations,
				useFertility,testCorpus,useZeros,learnMean,learnVariance,updateFreq,defaultVariance);
	}

	public EqualityStochasticFertilityHMM(BilingualCorpus corpus, AbstractSparseTranslationTable tt, double smoothing,
			double epsilon, double slack, int maxStepSize, int maxNumberProjectionIterations,
			boolean useFertility, boolean testCorpus, boolean useZeros,
			boolean learnMean, boolean learnVariance, int updateFreq,double defaultVariance) {
		this(corpus, tt, null, smoothing,epsilon,slack,maxStepSize,maxNumberProjectionIterations,
				useFertility,testCorpus,useZeros,learnMean,learnVariance,updateFreq,defaultVariance);
	}

	public EqualityStochasticFertilityHMM(BilingualCorpus corpus, AbstractSparseTranslationTable tt,
			DoubleDistortionTable distortion, 
			double smoothing,
			double epsilon, double slack, int maxStepSize, int maxNumberProjectionIterations,
			boolean useFertility, boolean testCorpus, boolean useZeros,
			boolean learnMean, boolean learnVariance, int updateFreq,double defaultVariance) {
		super(corpus,tt,distortion,smoothing);
		_epsilon = epsilon;
		_slack = slack;
		_maxStepSize = maxStepSize;
		_maxNumberIterations = maxNumberProjectionIterations;
		dists = new ContinuosDistributions[corpus.getSourceSize()];
		
		sumFracFertilities = new double[_corpus.getSourceSize()];
		squareSumFracFertilities = new double[_corpus.getSourceSize()];
		numberFracFertilities = new double[_corpus.getSourceSize()];
		double[][] fertilityaux;
		if(testCorpus){
			fertilityaux= corpus.getFertileSource(BilingualCorpus.TEST_CORPUS);
			
		}else{
			fertilityaux = corpus.getFertileSource(BilingualCorpus.DEV_CORPUS);
		}
		fertility = fertilityaux[0];
		fertilityVariance = fertilityaux[1];
		this.useZeros = useZeros;
		this.useFertility = useFertility;
		this.learnMean = learnMean;
		this.learnVariance = learnVariance;
		processedFertilities = new double[_corpus.getSourceSize()];
		processedFertilityVariance = new double[_corpus.getSourceSize()];
		for(int i = 0; i < _corpus.getSourceSize(); i++){
			if(useFertility && fertility[i] !=-1){
				if(!useZeros && fertility[i] ==0){
					processedFertilities[i]=1;
					processedFertilityVariance[i]=defaultVariance;
				}else{
					processedFertilities[i]=fertility[i];
					if(fertilityVariance[i] != 0){
						processedFertilityVariance[i]=fertilityVariance[i];
					}else{
						processedFertilityVariance[i]=defaultVariance;
					}
				}
			}else{
				processedFertilities[i]=1;
				processedFertilityVariance[i]=defaultVariance;
			}
			if(Double.isNaN(processedFertilityVariance[i]) || processedFertilityVariance[i]==0){
				System.out.println("variance is zero or NAN " + processedFertilityVariance[i]);
				System.exit(-1);
			}
		}

		//debug code
//		for(int i = 0; i < _corpus.getSourceSize(); i++){
//			if(fertility[i] ==0){
//				System.out.println("Fer " + fertility[i] + " after " + processedFertilities[i]);
//			}
//		}
//		System.exit(-1);
		updatableFreq = updateFreq;
		initializeDists(processedFertilities,processedFertilityVariance);
		this.defaultVariance = defaultVariance;
	}
	
	public void initializeDists(double[] fertility, double[] fertilityVariance){
		for(int i = 0; i < _corpus.getSourceSize(); i++){
			dists[i] = new GaussianDistribution(processedFertilities[i],processedFertilityVariance[i]);
		}
	}
	
	public void printStamp(PrintStream file){
		super.printStamp(file);
		file.println("Epsilon: " + _epsilon);
		file.println("Slack: " + _slack);
		file.println("Max Step Size: " + _maxStepSize);
		file.println("Max Number of Projection : " + _maxNumberIterations);
	}

	
	EqualityStochasticFertilityHMM(){
			
	}
	public String getName() {
		return "Stochastic Fertility Constrain HMM";
	}

	public void updateFractionalCounts() {
		super.updateFractionalCounts();
		for(int i =0; i < _sSize; i++){
			double fertilityi=0;
			for(int j =0; j < _fSize; j++){
				fertilityi += _statePosteriors.getProb(j, i);
			}
			numberFracFertilities[_sourceSentenceIDS[i]]++;
			sumFracFertilities[_sourceSentenceIDS[i]]+=fertilityi;
			squareSumFracFertilities[_sourceSentenceIDS[i]]+=fertilityi*fertilityi;
		}
	}

	
	public void mStep() {
		super.mStep();
		//Update the constraint distribution
		updateConstraintDistribution();
	}
	
	public void updateConstraintDistribution(){
		double average = 0;
		double average2 = 0;
		double counts2 = 0;
		for(int i =0; i < _corpus.getSourceSize(); i++){
			double n = numberFracFertilities[i] + smoothing;
			double counts = sumFracFertilities[i] + smoothing;
			double squareCounts = squareSumFracFertilities[i] + smoothing;
			double mean = counts/n;
			double variance = squareCounts/(n-1) - mean*mean*n/(n-1);
			average+=mean;
			average2+=counts;
			counts2+=n;
			if(_corpus.getSourceWordCounts(i)<updatableFreq){
				if(learnMean && learnVariance){
					((GaussianDistribution)dists[i]).update(mean, variance);
				}else if(learnMean){
					((GaussianDistribution)dists[i]).update(mean, processedFertilityVariance[i]);
				}else if(learnVariance){
					((GaussianDistribution)dists[i]).update(processedFertilities[i], variance);
				}else{
					((GaussianDistribution)dists[i]).update(processedFertilities[i], processedFertilityVariance[i]);
				}
			}
			if(fertility[i] != 1 &&  fertility[i] != -1 && numberFracFertilities[i] > 5){
				System.out.println(_corpus.getSourceWordById(i) + "("+ (numberFracFertilities[i]) +")"+ " emp " + fertility[i] + " mean " + mean + " var " + variance);
			}
		}
		System.out.println("Average " + average/_corpus.getSourceSize());
		System.out.println("Average2 " + average2/counts2);
		java.util.Arrays.fill(numberFracFertilities, 0);
		java.util.Arrays.fill(sumFracFertilities, 0);
		java.util.Arrays.fill(squareSumFracFertilities, 0);
		
	}
	
	
	
	public EStepStats createModelStats(){
		ConstrainedProjectionStats pstats = new ConstrainedProjectionStats();
		EStepStats stats = new EStepStats();
		stats.pstats = pstats;
		return stats;
	}
	
	public SentenceConstrainedProjectionStats projectPosteriors(){
		GaussianDistribution[] sentenceDists = new GaussianDistribution[_sSize];
		for(int i =0; i < _sSize; i++){
			sentenceDists[i] = (GaussianDistribution) dists[_sourceSentenceIDS[i]];
		}
		constrains = new EqualityStochasticFertilityConstrains(this,_epsilon,_slack,_maxStepSize,_maxNumberIterations,sentenceDists);
		
	//	SentenceConstrainedProjectionStats stats = constrains.steepestAscentProjection();
		SentenceConstrainedProjectionStats stats = constrains.conjugateGradientProjection2();
		//		for(int i = 0; i < _sSize; i++){
//			double variance = ((GaussianDistribution)dists[i]).variance;
//			double mean = ((GaussianDistribution)dists[i]).mean;
//			double lambdai = constrains.lambda.getProb(i,0);
//			double expectedi = 0;
//			for(int j = 0; j < _fSize ; j++){
//				expectedi += _statePosteriors.getProb(j, i);
//			}
//			System.out.print(" " +(lambdai*variance + mean) + "("+lambdai+")"+ "["+expectedi+"]");
//		}
//		System.out.println();
		return stats;
	}
	
	public static EqualityStochasticFertilityHMM loadModel(BilingualCorpus corpus, String directory, double epsilon, double slack, int maxSteps, int maxIterations) {
		System.out.println(corpus.getName());
		System.out.println(directory);
		EqualityStochasticFertilityHMM model = new EqualityStochasticFertilityHMM();
		model._corpus = corpus;
		if (!corpus.checkDescription(directory)) {
			System.out.println("Corpus is not the same");
			System.exit(1);
		}
		model._tb = SparseTranslationTable.LoadTranslationTable(corpus,
				directory);
		model._distortion = new DoubleDistortionTable(directory);
		model._trained = true;
		model._epsilon=epsilon;
		model._slack=slack;
		model._maxStepSize = maxSteps;
		model._maxNumberIterations = maxIterations;
		model.initializeStructures();
		return model;
	}
	
	public static void main(String[] args) throws IOException {
		String corpusDescription = args[0];
		int size = Integer.parseInt(args[1]); 
		int maxSentenceSize = Integer.parseInt(args[2]); 
		int numberIterations = Integer.parseInt(args[3]); 
		double smoothing = Double.parseDouble(args[4]); 
		double slack = Double.parseDouble(args[5]); 
		double epsilon = Double.parseDouble(args[6]); 
		int maxStepSize = Integer.parseInt(args[7]);
		int maxNumberOfProjectionIterations = Integer.parseInt(args[8]);
		boolean trainWithResults = Boolean.parseBoolean(args[9]);
		int numberIterationsWithResults = Integer.parseInt(args[10]);
		boolean saveModel = Boolean.parseBoolean(args[11]);
		String saveModelDir = args[12];
		boolean useBistochasticM1 = Boolean.parseBoolean(args[13]);
		boolean useFertility = Boolean.parseBoolean(args[14]);
		boolean useTestSetFertilities = Boolean.parseBoolean(args[15]);
		boolean useZeros = Boolean.parseBoolean(args[16]);
		boolean learnMean = Boolean.parseBoolean(args[17]);
		boolean learnVariance = Boolean.parseBoolean(args[18]);
		int updateFreq = Integer.parseInt(args[19]);
		double defaultVariance = Integer.parseInt(args[20]);
		// System.out.println("Corpus "+corpusName);
		System.out.println("Size " + size);
		System.out.println("Max Sentence size " + maxSentenceSize);
		System.out.println("Number of iterations " + numberIterations);
		System.out.println("smoothing " + smoothing);
		System.out.println("Slack " + slack);
		System.out.println("Epsilon " + epsilon);
		System.out.println("Max Step Size" + maxStepSize);
		System.out.println("Max Number of Iterations" + maxNumberOfProjectionIterations);
	//	System.out.println("BaseOutpup " + baseOutput);
	//	System.out.println("OutputFile " + outputFile);
		System.out.println("Train with results " + trainWithResults);
		System.out.println("Number of Iterations with results "
				+ numberIterationsWithResults);
		System.out.println("Saving model " + saveModel + " to " + saveModelDir);
		System.out.println("Use Symmetric M1 " + useBistochasticM1);
		System.out.println("Use Fertilities " + useFertility);
		System.out.println("Use Test Corpus " + useTestSetFertilities);
		System.out.println("Allow zero fertilities " + useZeros);
		System.out.println("Learn Mean " + learnMean);
		System.out.println("Learn Variance " + learnVariance);
		System.out.println("Update distr parameteres for words that occur more than " + updateFreq);
		System.out.println("DefaultVariance " + defaultVariance);
		BilingualCorpus corpus = BilingualCorpus.getCorpusFromFileDescription(
				corpusDescription, size, maxSentenceSize);
		
		
		IBMM1 m1;
		if(useBistochasticM1){
			m1 = new BijectiveM1(corpus,smoothing,epsilon,slack,maxStepSize,maxNumberOfProjectionIterations);
		}else{
			m1 = new IBMM1(corpus,smoothing);
		}
		m1.train(numberIterations,false,"");
		String baseDir = saveModelDir + "equalityStochasticHMM/" + corpus.getName() + "/" + size + "/";
		if(saveModel){
			StaticTools.createDir(baseDir);
		}
		EqualityStochasticFertilityHMM mhmm = 
			new EqualityStochasticFertilityHMM(corpus, m1._tb,smoothing,epsilon,slack,
					maxStepSize,maxNumberOfProjectionIterations,
					useFertility,useTestSetFertilities,useZeros, learnMean, learnVariance,updateFreq,defaultVariance);
		if (!trainWithResults) {
			mhmm.train(numberIterations,saveModel,baseDir);
		} else {
			ArrayList<Evaluation[]> evalsList = mhmm.trainWithResults(
					numberIterationsWithResults, BilingualCorpus.DEV_CORPUS,saveModel,baseDir);
			System.out.println("Viterbi all");
			Evaluation[] evals = evalsList.get(0);
			for (int i = 0; i < evals.length; i++) {
				System.out.println("Iter " + i + evals[i]);
			}
			System.out.println(" ----- ");
			System.out.println("Viterbi Rare");
			evals = evalsList.get(1);
			for (int i = 0; i < evals.length; i++) {
				System.out.println("Iter " + i + evals[i]);
			}
			System.out.println(" ----- ");
			System.out.println("Viterbi Common");
			evals = evalsList.get(2);
			for (int i = 0; i < evals.length; i++) {
				System.out.println("Iter " + i + evals[i]);
			}
			System.out.println(" ----- ");
			
			System.out.println("Precision all");
			 evals = evalsList.get(3);
			for (int i = 0; i < evals.length; i++) {
				System.out.println("Iter " + i + evals[i]);
			}
			System.out.println(" ----- ");
			System.out.println("Precision Rare");
			evals = evalsList.get(4);
			for (int i = 0; i < evals.length; i++) {
				System.out.println("Iter " + i + evals[i]);
			}
			System.out.println(" ----- ");
			System.out.println("Precision Common");
			evals = evalsList.get(5);
			for (int i = 0; i < evals.length; i++) {
				System.out.println("Iter " + i + evals[i]);
			}
			System.out.println(" ----- ");
		}

		// mhmm.printParametersTables();
		System.out.println("Done with training");
		if(saveModel){
			StaticTools.createDir(baseDir);
			mhmm.saveModel(baseDir);
		}
		
		// mhmm.printParametersTables();
		AlignmentsSet sa = mhmm.viterbiAlignments(BilingualCorpus.TEST_CORPUS);
		AlignmentsSet gold = corpus.getGold();
		System.out.println("Gold size" + gold.size() + " viterbi size "
				+ sa.size());
		Evaluation eval2 = AlignmentEvaluator.evaluate(sa, corpus.getGold());
		AlignmentStats.printPhrases(sa, true, false);
		System.out.println("Segment viterbi " + eval2);
//		mhmm.outputAlignments(sa, gold, corpus, new PrintStream(baseOutput
//				+ "/latex/" + outputFile + "-viter.tex"));
		
		
		// Viterbi with projection
		AlignmentsSet saP = mhmm.viterbiAlignments(BilingualCorpus.TEST_CORPUS,true);
		Evaluation eval2P = AlignmentEvaluator.evaluate(saP, corpus.getGold());
		AlignmentStats.printPhrases(saP, true, false);
		System.out.println("Segment viterbi with projection " + eval2P);
//		mhmm.outputAlignments(sa, gold, corpus, new PrintStream(baseOutput
//				+ "/latex/" + outputFile + "-viter.tex"));

		float treshold = mhmm.tuneTreshholdAER(BilingualCorpus.DEV_CORPUS,false);
		
		AlignmentsSet sa2 = mhmm.posteriorAlignments(
				BilingualCorpus.TEST_CORPUS, treshold,false,false);
		Evaluation eval22 = AlignmentEvaluator.evaluate(sa2, corpus.getGold());
		// sa2.outputWithStatistics(corpus, System.out);
		AlignmentStats.printPhrases(sa2, true, false);
		System.out.println("Posterioir decoding " + eval22);
//		mhmm.outputAlignments(sa2, gold, corpus, new PrintStream(baseOutput
//				+ "/latex/" + outputFile + "-post.tex"));
		
		float tresholdP = mhmm.tuneTreshholdAER(BilingualCorpus.DEV_CORPUS,true);
		AlignmentsSet sa2P = mhmm.posteriorAlignments(
				BilingualCorpus.TEST_CORPUS, tresholdP,true,false);
		Evaluation eval22P = AlignmentEvaluator.evaluate(sa2P, corpus.getGold());
		// sa2.outputWithStatistics(corpus, System.out);
		AlignmentStats.printPhrases(sa2P, true, false);
		System.out.println("Posterioir decoding " + eval22P);
//		mhmm.outputAlignments(sa2, gold, corpus, new PrintStream(baseOutput
//				+ "/latex/" + outputFile + "-post.tex"));

		//Debug try load models
//		BijectiveHMM lhmm = BijectiveHMM.loadModel(corpus,baseDir);
//		System.out.println("Models after loading");
//		AlignmentsSet lsa = lhmm.viterbiAlignments(BilingualCorpus.TEST_CORPUS);
//		Evaluation leval2 = AlignmentEvaluator.evaluate(lsa, corpus.getGold());
//		System.out.println("Segment viterbi " + leval2);
//		float ltreshold = lhmm.tuneTreshholdAER(BilingualCorpus.DEV_CORPUS);
//		AlignmentsSet lsa2 = mhmm.posteriorAlignments(
//				BilingualCorpus.TEST_CORPUS, ltreshold);
//		Evaluation leval22 = AlignmentEvaluator.evaluate(lsa2, corpus.getGold());
//		
//		if(!lhmm._tb.equals(mhmm._tb)){
//			System.out.println("Translation tables are not the same");
//			System.exit(-1);
//		}
//		if(!lhmm._distortion.equals(mhmm._distortion)){
//			System.out.println("Distortion tables are not the same");
//			System.exit(-1);
//		}

	
	}

	
}
