package edu.cornell.cs4740.sentencegenerator;

import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;

public class BigramTable {

  private Map<String, Map<String, Long>> bigramCountTable;
  private Map<String, Map<String, Double>> unsmoothedProbTable;
  private Map<String, Double> wordCounts = new HashMap<String, Double>();
  private Map<Integer, Double> gtProbTable;
  private int numberOfBigrams = 0;
  private int vocabSize = 0;
  private int maxFreq = 0;

  /** word1 -> (word2 -> bigram count) */
  public BigramTable(List<String> words, boolean unknownWords) {
    bigramCountTable = new HashMap<String, Map<String, Long>>();
    HashSet<String> wordsSeen = new HashSet<String>();

    String prevWord = Utilities.BREAK;
    for (String wrd : words) {
      String w = wrd;
      if (unknownWords) {
        if (!wordsSeen.contains(w)) {
          wordsSeen.add(w);
          w = Utilities.UNKNOWN;
        }
      }

      Map<String, Long> internalMap = bigramCountTable.get(prevWord);

      // create a new map with our bigram since it doesnt exist
      if (internalMap == null) {
        internalMap = new HashMap<String, Long>();
        internalMap.put(w, new Long(1));
        maxFreq = (1 > maxFreq) ? 1 : maxFreq;

        bigramCountTable.put(prevWord, internalMap);
      } else {

        Long oldCount = internalMap.get(w);

        // we don't have the word in the map so initialize
        if (oldCount == null) {
          internalMap.put(w, new Long(1));
          maxFreq = (1 > maxFreq) ? 1 : maxFreq;
        } else {
          Long newCount = new Long(oldCount.longValue() + 1);
          internalMap.put(w, newCount);
          maxFreq = (newCount > maxFreq) ? newCount.intValue() : maxFreq;
        }
      }

      prevWord = w;
      numberOfBigrams++;
    }

    // DO NOT NEED THIS - taken care of in Utilities tokenization code
    // Map<String, Long> internalMap = bigramCountTable.get(prevWord);
    //		
    // if(internalMap == null) {
    // internalMap = new HashMap<String, Long>();
    // internalMap.put(Utilities.BREAK, new Long(1));
    //			
    // bigramCountTable.put(prevWord, internalMap);
    // } else {
    //			
    // Long oldCount = internalMap.get(prevWord);
    //			
    // if(oldCount == null) {
    // internalMap.put(Utilities.BREAK, new Long(1));
    // } else {
    // Long newCount = new Long(oldCount.longValue() + 1);
    // internalMap.put(Utilities.BREAK, newCount);
    // }
    // }
    // numberOfBigrams++;

    vocabSize = bigramCountTable.size();
  }

  /** word1 -> (word2 -> unsmoothed probability of word2 given word1) */
  public Map<String, Map<String, Double>> generateProbabilityTable() {
    Map<String, Map<String, Double>> probabilityTable = new HashMap<String, Map<String, Double>>();

    for (String key : bigramCountTable.keySet()) {
      Map<String, Long> internalCountMap = bigramCountTable.get(key);

      // calc conditional probability P(A|B) = P(A /\ B) / P(B)
      // need count of bigrams that start with key

      long keyBigramCount = 0;

      for (String internalKey : internalCountMap.keySet()) {
        keyBigramCount += internalCountMap.get(internalKey).longValue();
      }

      // we have everything needed to calculate probabilities...
      Map<String, Double> internalProbMap = new HashMap<String, Double>();
      for (String internalKey : internalCountMap.keySet()) {
        long bigramCount = internalCountMap.get(internalKey).longValue();
        double probAandB = ((double) bigramCount) / ((double) numberOfBigrams);
        double probB = ((double) keyBigramCount) / ((double) numberOfBigrams);
        double condProb = probAandB / probB;

        internalProbMap.put(internalKey, new Double(condProb));
      }
      probabilityTable.put(key, internalProbMap);
    }

    unsmoothedProbTable = probabilityTable;
    return probabilityTable;
  }

  /** The unsmoothed probability of word2 given word1 - P(word2 | word1) */
  public double getUnsmoothedProb(String word1, String word2) {
    if (unsmoothedProbTable == null) {
      generateProbabilityTable();
    }

    String w1 = (unsmoothedProbTable.containsKey(word1)) ? word1
        : Utilities.UNKNOWN;
    String w2 = (unsmoothedProbTable.containsKey(word2)) ? word2
        : Utilities.UNKNOWN;

    Map<String, Double> internalCountMap = unsmoothedProbTable.get(w1);
    if (internalCountMap == null) {
      return 0;
    }

    Double prob = internalCountMap.get(w2);

    if (prob == null) {
      return 0;
    }
    return prob;
  }

  /** The Laplacian smoothed probability of word2 given word1 - P(word2 | word1) */
  public double getLaplacianProb(String word1, String word2) {
    Map<String, Long> internalCountMap = bigramCountTable.get(word1);
    if (internalCountMap == null) {
      internalCountMap = bigramCountTable.get(Utilities.UNKNOWN);
    }

    Long count = null;
    if (bigramCountTable.containsKey(word2)) {
      count = internalCountMap.get(word2);
    } else {
      count = internalCountMap.get(Utilities.UNKNOWN);
    }
    if (count == null) {
      count = new Long(0);
    }

    double bigramCount = count;
    double word1Count = 0.;

    if (wordCounts.containsKey(word1)) {
      word1Count = wordCounts.get(word1);
    } else {
      for (String internalKey : internalCountMap.keySet()) {
        word1Count += internalCountMap.get(internalKey);
      }
      wordCounts.put(word1, word1Count);
    }

    double ans = (bigramCount + 1.) / (word1Count + vocabSize);
    return ans;
  }

  /** Calculates and stores the probabilities of different frequencies. */
  private void makeGoodTuringTable() {
    int[] freqOfFreq = new int[maxFreq + 1];
    for (int i = 0; i <= maxFreq; i++) {
      freqOfFreq[i] = 0;
    }

    for (String key1 : bigramCountTable.keySet()) {
      Map<String, Long> internalCountMap = bigramCountTable.get(key1);

      for (String key2 : internalCountMap.keySet()) {
        Long tempCount = internalCountMap.get(key2);
        freqOfFreq[tempCount.intValue()] += 1;
      }
    }

    double sumXes = 0; // sum(x_1,...,x_n)
    double sumYs = 0; // sum(y_1,...,y_n)
    double sumXYs = 0; // sum(xy_1,...,xy_n)
    double sumSquareXes = 0; // sum(x^2)

    for (int xKey = 1; xKey < freqOfFreq.length; xKey++) {
      double x = Math.log(xKey);
      double y = freqOfFreq[xKey];
      sumXes += x;
      sumYs += y;
      sumXYs += x * y;
      sumSquareXes += x * x;
    }

    double n = freqOfFreq.length;

    double beta = sumXYs - ((sumXes * sumYs) / n);
    beta = beta / (sumSquareXes - ((sumXes * sumXes) / n));

    double alpha = (sumYs - (beta * sumXes)) / n;

    gtProbTable = new HashMap<Integer, Double>();
    for (int k = 0; k <= maxFreq; k++) {
      double log = (k == 0) ? Math.log(1) : Math.log(k);
      double nc = alpha + (beta * log);
      nc = Math.pow(Math.E, nc);
      double nc1 = alpha + (beta * Math.log(k + 1));
      nc1 = Math.pow(Math.E, nc1);
      double newProb = (k + 1) * nc1;
      newProb = newProb / (nc);
      gtProbTable.put(k, newProb / numberOfBigrams);
    }
  }

  /**
   * The Good-Turing discounting probability of word2 given word1 - P(word2 |
   * word1)
   */
  public double getGoodTuringProb(String word1, String word2) {
    if (gtProbTable == null) {
      makeGoodTuringTable();
    }

    String w1 = (bigramCountTable.containsKey(word1)) ? word1
        : Utilities.UNKNOWN;
    String w2 = (bigramCountTable.containsKey(word2)) ? word2
        : Utilities.UNKNOWN;

    Map<String, Long> internalCountMap = bigramCountTable.get(w1);
    if (internalCountMap == null) {
      return gtProbTable.get(0);
    }

    Long count = internalCountMap.get(w2);
    if (count == null) {
      return gtProbTable.get(0);
    }

    return gtProbTable.get(count.intValue());
  }
}
