package edu.kit.csl.pisa.training;

/*
This file is part of the PISA Alignment Tool.

Copyright (C) 2013
Karlsruhe Institute of Technology
Cognitive Systems Lab (CSL)
Felix Stahlberg

PISA is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.

PISA is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with PISA. If not, see <http://www.gnu.org/licenses/>.
*/

import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;

import edu.kit.csl.pisa.datatypes.Alignment;
import edu.kit.csl.pisa.datatypes.BadParameterValueException;
import edu.kit.csl.pisa.datatypes.Corpus;
import edu.kit.csl.pisa.datatypes.Sentence;
import edu.kit.csl.pisa.datatypes.SentenceAlignment;
import edu.kit.csl.pisa.datatypes.SentencePair;
import edu.kit.csl.pisa.io.Logger;
import edu.kit.csl.pisa.models.AlignmentModel;
import edu.kit.csl.pisa.ui.Configuration;

/**
 * This class deals with the EM training. It provides a high level 
 * implementation of the EM algorithm and uses the GoF strategy design
 * pattern for particular E-step and M-step implementations
 * 
 * @see MaximizationStrategy
 * @see SuccessorStrategy
 */
public class EMTrainer extends Trainer {
	
	/*
	 * Natural logarithm of 2 for calculating logarithm dualis
	 */
	private static double LOG2 = Math.log(2);
	
	/*
	 * Perplexity exponent.
	 */
	private double perpExp;
	
	/*
	 * Iterator for worker threads.
	 */
	private Iterator<SentencePair> it;
	
	private final int nThreads;
	
	private class EMTrainerWorker implements Runnable {
		@Override
		public void run() {
			SentencePair pair = getNextSentencePair();
			while (pair != null) {
				// Maximize
				maxStrategy.maximize(pair);
				// Collect fractional counts
				Sentence srcSen = pair.getSourceSentence();
				Sentence trgtSen = pair.getTargetSentence();
				Set<Alignment> neighbors = getNeighbors(
						pair.getViterbiAlignment().getAlignment(),
						pair.getAlignments(),
						srcSen.getLength(), trgtSen.getLength());
				pair.removeAllAlignments();
				double[] scores = new double[neighbors.size()];
				int i = 0;
				// Calculate score for each neighbor
				for (Alignment a : neighbors) {
					SentenceAlignment sa = new SentenceAlignment(a, srcSen,
							trgtSen, 0.0);
					double score = model.calculateAlignmentProbability(sa);
					scores[i++] = score;
					pair.addAlignment(sa.getAlignment(), score);
				}
				neighbors = null;
				// Collect fractional counts
				double logSum = logSum(scores);
				for (SentenceAlignment sa : pair) {
					model.aggregateFractionalCount(sa, Math.exp(
							sa.getScore() - logSum));
				}
				synchronized(it) {
					perpExp += logSum / LOG2;
				}
				succStrategy.findSuccessors(pair);
				pair = getNextSentencePair();
			}
		}
	}
	
	/*
	 * Calculates \log(\sum(a_i)) with a recursive D&C algorithm to avoid 
	 * underflows. The a_i are stored in the argument with their log values.
	 */
	private double logSum(double[] a) {
		if (a.length == 0) {
			return 999;
		}
		return logSum(a, 0, a.length);
	}
	
	/*
	 * Helper function for logSum(double[]). Subsum from start (inclusive) to
	 * end (exclusive).
	 */
	private double logSum(double[] a, int start, int end) {
		if (end - start == 1) {
			return a[start];
		}
		int med = start + (end - start) / 2;
		// log(x+y) = log(x) + log(1+exp(log(y)-log(x)))
		double logX = logSum(a, start, med);
		double logY = logSum(a, med, end);
		double res = (logY - logX < logX - logY)
			? logX + Math.log(1.0 + Math.exp(logY - logX))
			: logY + Math.log(1.0 + Math.exp(logX - logY));
		return res;
	}
	
	/*
	 * Worker threads call this method in order to build up the neighborhood
	 * to a sentence pair, which then is used to calculate the perplexity and
	 * collect fractional counts. nTrgtWords and nSrcWords are used to guess
	 * the initial size of the neighborhood set.
	 */
	private Set<Alignment> getNeighbors(Alignment viterbi, 
			Collection<Alignment> alignments, int nSrcWords, int nTrgtWords) {
		HashSet<Alignment> roots = new HashSet<Alignment>(alignments);
		HashSet<Alignment> neighborhood = new HashSet<Alignment>(roots.size()
				* (nSrcWords + nTrgtWords));
		neighborhood.addAll(roots);
		viterbi.collectNeighbors(neighborhood);
		for (Alignment a : roots) {
			if (a != viterbi) { // not using equals() is okay here
				a.collectNeighbors(neighborhood);
			}
		}
		return neighborhood;
	} 
	
	/**
	 * Sole constructor. Initializes the internal strategies according to which
	 * model should be trained.
	 * 
	 * @param model Which model should be trained?
	 * @throws BadParameterValueException if one of the configured strategies
	 * 			are unknown.
	 */
	public EMTrainer(AlignmentModel model) throws BadParameterValueException {
		super(model);
		nThreads = Configuration.getSingleton().getInteger("nThreads");
	}
	
	/* (non-Javadoc)
	 * Worker threads call this method to obtain a new sentence pair to
	 * optimize. If all pairs are done, return null.
	 * 
	 * @throws NullPointerException if the trainer does not iterate right now
	 */
	synchronized private SentencePair getNextSentencePair() {
		if (it.hasNext()) {
			return it.next();
		}
		return null;
	}

	/**
	 * This method carries out a single iteration of the EM algorithm - i.e.
	 * one expectation and one maximization step.
	 * {@inheritDoc}
	 */
	@Override
	protected void iterate(Corpus corpus) {
		Logger log = Logger.getSingleton();
		log.debug("Allocate memory for fractional counts...");
		perpExp = 0.0;
		model.initializeFractionalCounts();
		log.notice("Start maximization with " + nThreads + " thread(s)...");
		// Maximization
		it = corpus.iterator();
		// Start worker threads
		Thread[] workers = new Thread[nThreads];
		for (int i = 0; i < nThreads; i++) {
			Thread t = new Thread(new EMTrainerWorker());
			t.start();
			workers[i] = t;
		}
		for (int i = 0; i < nThreads; i++) {
			try {
				workers[i].join();
			} catch (InterruptedException e) {
				log.error("Join of worker threads in EM "
						+ "training interrupted: " + e.getMessage());
			}
		}
		log.debug("Normalize fractional counts...");
		model.writeBackFractionalCounts();
		it = null;
		log.debug(model.getName() + " perplexity is now 2^"
				+ (-perpExp / corpus.size()));
	}
}
