package edu.cornell.cs4740.sentencegenerator;

import java.util.List;

public class Perplexity {
  BigramTable bigramTable;
  UnigramTable unigramTable;
  boolean isBigramTable = false;
  
  public Perplexity(BigramTable bigramTable) {
    this.bigramTable = bigramTable;
    isBigramTable = true;
  }
  
  public Perplexity(UnigramTable unigramTable) {
    this.unigramTable = unigramTable;
    isBigramTable = false;
  }
  
  public void changeTable(BigramTable newTable) {
    bigramTable = newTable;
    isBigramTable = true;
  }
  
  public void changeTable(UnigramTable newTable) {
    unigramTable = newTable;
    isBigramTable = false;
  }
  
  private double bigramPerplexity(List<String> words, Utilities.Smoothing smoothingType) {
    double perplexity = 0.;
    String word1 = words.get(0);
    String word2;
    for (int i = 1; i < words.size(); i++) {
      word2 = words.get(i);
      switch (smoothingType) {
      case NONE:
        double temp = bigramTable.getUnsmoothedProb(word1, word2);
        if (temp != 0) {
          perplexity += Math.log(temp);
        }
        break;
      case LAPLACIAN:
        perplexity += Math.log(bigramTable.getLaplacianProb(word1, word2));
        break;
      case GOODTURING:
        perplexity += Math.log(bigramTable.getGoodTuringProb(word1, word2));
        break;
      }
      word1 = word2;
    }
    return Math.pow(Math.E, (-1. * perplexity / words.size()));
  }
  
  private double unigramPerplexity(List<String> words, Utilities.Smoothing smoothingType) {
    double perplexity = 0.;
    String word;
    for (int i = 0; i < words.size(); i++) {
      word = words.get(i);
      switch (smoothingType) {
      case NONE:
        perplexity += Math.log(unigramTable.getUnsmoothedProb(word));
        break;
      case LAPLACIAN:
        perplexity += Math.log(unigramTable.getLaplacianProb(word));
        break;
      case GOODTURING:
        perplexity += Math.log(unigramTable.getGoodTuringProb(word));
        break;
      }
    }
    return Math.pow(Math.E, (-1. * perplexity / words.size()));
  }
  
  /** 
   * Calculates the perplexity for the given words based on the current model (uni/bigram). 
   * Lower perplexity means better fit to the model */
  public double getPerplexity(List<String> words, Utilities.Smoothing smoothingType) {
    if (isBigramTable) {
      return bigramPerplexity(words, smoothingType);
    } else {
      return unigramPerplexity(words, smoothingType);
    }
  }
}
