//065959264	Limor Bagizada	040199770	Sarit Chicotay

package Models;

import java.io.IOException;
import java.util.ArrayList;

import Base.NGram;
import General.Perplexity;

/**
 * 
 * Class implementing the Lidstone discounting model.
 *
 */
public class LidstoneModel extends BaseModel {

	// Lidstone parameter
	private double lambda;
	
	/**
	 * Constructor
	 * @param filePath - Initialize model from the given file
	 * @throws Exception 
	 * @throws IOException
	 */
	public LidstoneModel(ArrayList<String> content, int nGrams) throws Exception {
		
		// initialize model with first 90% of words as training, the rest will be used for test
		super(content, 0.9, nGrams);
		
		// Evaluate model's parameter
		EvaluateParams();
	}
	
	// return lambda
	public double getLambda() {
		return lambda;
	}

	@Override
	/**
	 * P_lid(x) - calculate the probability of an word (event) that appears r times in the training according to lidstone discounting
	 */
	public double CalculateProbability(int r) 
	{
		// check if the probability for r was already calculated - 
		// if so - return this probability
		if (r_probabilities.containsKey(r))
			return r_probabilities.get(r);
		
		int corpusSize =  training.getSize();
		return CalculateProbability(r, corpusSize);
	}
	
	@Override
	/**
	 * P_lid(x) - calculate the probability of an event according to lidstone discounting
	 */
	public double CalculateProbability(NGram event){
		int r = training.getCount(event);
		if (event.getNGram() == 1)
			return CalculateProbability(r);
		
		int corpusSize = training.num_appearances(event.getConditioningWord());
		return CalculateProbability(r, corpusSize);
	}
	
	// calculate lidstone discounting
	private double CalculateProbability(int r_appearances, int corpusSize)
	{
		double counter = r_appearances + this.lambda; 
		double denominator =  corpusSize + (double)(this.lambda*(double)training.getVocabularySize());

		if (denominator == 0)
			return 0;
		
		double result = (double) counter / denominator;
		return result;
	}
	
	
	/**
	 * Evaluate model's parameter: lambda
	 * The evaluation is done by comparing the perplexity of the model with different values of lambda.
	 * The lambda value which provides the lowest perplexity value is chosen.
	 * @throws Exception 
	 * @throws IOException
	 */
	private void EvaluateParams() throws Exception 
	{
		double minPerplexity = 0;
		double minPerplexityLambda = 0; 
		
		if (training.getNGram() == 1)
		{
			this.lambda = 0.056272;
			return;
		}
		
		if (training.getNGram() == 2)
		{
			this.lambda = 0.000289;
			return;
		}
		
		// test different values of lambda - unigram
		//for (double testLambda = 0.01; testLambda <= 1; testLambda += 0.01)
		//for (double testLambda = 0.05; testLambda <= 0.07; testLambda += 0.0001)
		//for (double testLambda = 0.0562; testLambda <= 0.0564; testLambda += 0.000001)
		
		// test different values of lambda - bigram
		// for (double testLambda = 0.01; testLambda <= 1; testLambda += 0.01)
		// for (double testLambda = 0.001; testLambda <= 0.02; testLambda += 0.0001)
		// for (double testLambda = 0.0001; testLambda <= 0.001; testLambda += 0.00001)
		for (double testLambda = 0.00028; testLambda <= 0.0003; testLambda += 0.000001)
		{
			
			// set model's lambda to the test value
			this.lambda = testLambda;

			// Initialize the perplexity with the current model 
			Perplexity p = new Perplexity(this);
			
			// calculate perplexity for test corpus
			double tempPerplexity = p.calculatePerplexity(trainingTest);
			
			// save the lower perplexity and the appropriate lambda
			if (tempPerplexity < minPerplexity || minPerplexity == 0)
			{
				minPerplexity = tempPerplexity;
				minPerplexityLambda = testLambda;
			}
			
			r_probabilities.clear();
			System.out.println("lambda: " + testLambda + " perplexity: " + tempPerplexity);// +" current lambda: " + minPerplexityLambda);
		
		}
		
		// set lambda which provided the lowest perplexity value
		this.lambda = minPerplexityLambda;
	
	}

}
