/**
 * 
 */
package de.uni_leipzig.informatik.asv.unsupervised_pos_tagger.learner;
import java.util.Date;
import cern.jet.random.tdouble.DoubleUniform;
import cern.jet.random.tdouble.engine.MersenneTwister64;
import de.uni_leipzig.informatik.asv.unsupervised_pos_tagger.data.Word;
import de.uni_leipzig.informatik.asv.unsupervised_pos_tagger.model.Model;
import de.uni_leipzig.informatik.asv.util.MixingIterator;
/**
 * 
 * @author Christoph Teichmann
 * created May 29, 2014 10:12:55 PM
 * @version 0.1
 */
public class IncrementalViterbiTraining
{
	/**
	 * 
	 */
	public void train(Model model,double minChange, int checkSteps)
	{
		model.initialize();
		MixingIterator<Word> mi = model.mIterator();
		DoubleUniform du = new DoubleUniform(new MersenneTwister64(new Date()));
		while(mi.hasNext())
		{
			Word w = mi.next();
			if(!model.isInitialized(w))
			{model.setTag(w, du.nextIntFromTo(0, model.getNumberOfLabels(w)));}
		}
		System.out.println("initialized");
		double prev = 0;
		double curr = Double.NEGATIVE_INFINITY;
		int c = 0;
		boolean check = false;
		do
		{
			mi = model.mIterator();
			while(mi.hasNext())
			{
				Word w = mi.next();
				makeWord(model, w);
			}
			++c;
			if(check = (checkSteps<=c))
			{
				c = 0;
				prev = curr;
				curr = model.makeLogLikelihood();
				System.out.println("current value of inverse loglikelihood "+Double.toString(curr));
			}
			System.out.println("finished one round");
		}while((!check) || curr-prev > minChange);
	}
	/**
	 *
	 * @param model
	 * @param du
	 * @param list
	 * @param w
	 * @param anneal 
	 */
	private void makeWord(Model model, Word w)
	{
		int nOL = model.getNumberOfLabels(w);
		double max = Double.NEGATIVE_INFINITY;
		int best = -1;
		for(int i=0;i<nOL;++i)
		{
			double d = makeWeight(model, w, i);
			if(d > max)
			{
				best = i;
				max = d;
			}
		}
		processChoice(model, w, best);
	}
	/**
	 *
	 * @param max
	 * @param d
	 * @return
	 */
	protected double expNumber(double max, double d)
	{return Math.exp(d-max);}
	/**
	 *
	 * @param model
	 * @param w
	 * @return
	 */
	protected double makeWeight(Model model, Word w, int choice)
	{return model.makeLogProbability(w,choice);}
	/**
	 *
	 * @param model
	 * @param w
	 * @param choice
	 * @param sum 
	 * @param list 
	 */
	private void processChoice(Model model, Word w, int choice)
	{
		if(choice < 0)
		{throw new IllegalStateException("no log probability above negative infinity");}
		model.setTag(w,choice);
	}
}