# Ofri Keidar Inbal Wiesel 302933981 305331878

from UniLidstone import UniLidstone
from BiLidstone import BiLidstone
import math

'''
Class implements Backoff method for bigram model
'''
class Backoff:
    
    '''
    Constructor- creates a new object implementing Backoff method
    for bigram model
    '''
    def __init__(self, uniLidstone, biLidstone, condWords, uniLambda, biLambda):
        
        # set members
        self.uniLidstone = uniLidstone; # implements Lidstone smoothing for unigram model
        self.biLidstone = biLidstone; # implements Lidstone smoothing for bigram model
        self.condWords = condWords; # maps each conditioning words to the following words
        self.BEGIN_ARTICLE = "begin-article-event"; # virtual event at article's beginning
        
        # set smoothing parameters
        self.uniLidstone.lambdaVal = uniLambda;
        self.biLidstone.lambdaVal = biLambda;
        
        # calculate normalizing value for each conditioning word        
        self.alphas = {};
        self.calcAlphas();
        
    '''
    For each observed word, calculates its matching normalizing value (alpha)
    '''
    def calcAlphas(self):
        
        # for each observed conditioning word
        for conditioning in self.uniLidstone.obsVoc.keys():
            
            # get words matching to current conditioning word
            followingWords = self.condWords[conditioning];
            
            '''
            sum discounted probabilities of conditioned bigrams and
            sum discounted probabilities of conditioned words 
            '''
            biSumDiscProb = float(0.0);
            uniSumDiscProb = float(0.0);
            for currWord in followingWords:
                
                # update bigramds' sum
                biSumDiscProb += self.biLidstone.getDiscProb((conditioning, currWord)); # += Pd(w'|w)
                
                # update unigram's sum
                uniSumDiscProb += self.uniLidstone.getDiscProb(currWord); # += Pd(w')
                
            # set normalizer for conditioning word
            self.alphas[conditioning] = (1.0-biSumDiscProb) / (1.0-uniSumDiscProb);
        
        # calculate normalizer value for "begin-article" event
        
        # get all words that appear at an article's beginning
        followingWords = self.condWords[self.BEGIN_ARTICLE];
        
        '''
        sum discounted probabilities of conditioned bigrams and
        sum discounted probabilities of conditioned words 
        '''
        biSumDiscProb = float(0.0);
        uniSumDiscProb = float(0.0);
        for currWord in followingWords:
            
            # update bigramds' sum
            biSumDiscProb += self.biLidstone.getDiscProb((self.BEGIN_ARTICLE, currWord)); # += Pd(w'|"begin-article")
            
            # update unigram's sum
            uniSumDiscProb += self.uniLidstone.getDiscProb(currWord); # += Pd(w')
            
        # set normalizer for "begin-article" event
        self.alphas[self.BEGIN_ARTICLE] = (1.0-biSumDiscProb) / (1.0-uniSumDiscProb);
                
    '''
    Sets lambda value for unigram Lidstone smoothing.
    Calculates normalizers (alphas) according to new lambda value
    '''
    def setUniLamdba(self, newLambda):
        
        # set new lambda for unigram Lidstone
        self.uniLidstone.lambdaVal = newLambda;
        
        # calculate normalizing value for each word
        self.calcAlphas();
        
    '''
    Sets lambda value for bigram Lidstone smoothing.
    Calculates normalizers (alphas) according to new lambda value
    '''
    def setBiLamdba(self, newLambda):
        
        # set new lambda for bigram Lidstone
        self.biLidstone.lambdaVal = newLambda;
        
        # calculate normalizing value for each word
        self.calcAlphas();
                
    '''
    Returns discounted probability of given bigram:
    If bigram appeards in train set, returns its probability according to Lidstone for bigram model.
    Otherwise, returns its normalized probability according to Lidstone for unigram model
    '''
    def backoffProb(self, bigram):
        
        # check if bigram appears in train set
        if self.biLidstone.getFreq(bigram) > 0: # if C(w w') >0
            # return probability according to Lidstone smoothing for bigram model
            return self.biLidstone.getDiscProb(bigram); # return Pd(w'|w)

        # handle unseen conditioning word
        if self.uniLidstone.getFreq(bigram[0]) == 0: # if C(w) = 0
            return self.uniLidstone.getDiscProb(bigram[1]); # return Pd(w') 

        # return normalized probability according to Lidstone smoothing for unigram model
        return self.alphas[bigram[0]] * self.uniLidstone.getDiscProb(bigram[1]); # return alpha(w)*Pd(w') 
    
    '''
    Returns discounted probability of the bigram (condWord, x*) where x* is an unseen word:
    alpha(condWord) * Pd(x*), where Pd is the discounted probability of Lidstone smoothing for unigram model
    '''
    def backoffProbUnseenEvent(self, condWord):
        
        # get normalizer value for given conditioning word
        alpha = 1; # value for unseen conditioning word
        if condWord in self.alphas:
            alpha = self.alphas[condWord];
        
        # return alpha(condWord)*Pd(x*)
        return alpha * self.uniLidstone.getDiscProb(); 
       
    '''
    Returns model's perplexity on given test file
    '''
    def perplexity(self, testFileName, headerPrefix, wordDelim):
        
        #open test file
        testFile = open(testFileName, "r");
        
        # initialize bigram counter
        numBigrams = 0;
        
        # sum log of probabilities for each word the test sequence
        sumLogProb = float(0.0);
        for line in testFile:
            
            # skip header or empty line
            if line.strip().startswith(headerPrefix) or line.strip() == "":
                continue
            
            # for each word in current line
            words = line.strip().split(wordDelim);
            currWord = words[0].strip(); # get line's first word
            
            # calculate discounted probability of first bigram (with virtual event)
            firstProb = self.backoffProb((self.BEGIN_ARTICLE, currWord));
            if firstProb == 0: # handle case of infinite power of 2 
                return float("inf");
            
            # consider discounted probability of first bigram (with virtual event)
            sumLogProb += math.log(firstProb, 2);
            numBigrams += 1;
            for i in range(1,len(words)):
                
                # sum log of probability
                nextWord = words[i].strip();
                prob = self.backoffProb((currWord, nextWord));
                
                if prob == 0: # handle case of infinite power of 2 
                    return float("inf");                
                sumLogProb += math.log(prob, 2);
                    
                # move to next word
                currWord = nextWord;
                
                # increment bigram counter
                numBigrams += 1;
        
        # close test file
        testFile.close();
                
        # normalize sum of log of probabilities
        normSum = sumLogProb / float(numBigrams);
        
        # return perplexity
        return math.pow(2.0, -normSum);
    
    '''
    Prints sum of discounted probabilities for all bigrams of the form (condWord, observed-word)
    with the discounted probability of bigrams of the form (condWord, unseen-word)
    '''
    def debug(self, condWord):
        
        # number of unseen events
        numUnseen = self.uniLidstone.VOCAB_SIZE - len(self.uniLidstone.obsVoc);
        
        # sum the probabilities of unseen bigrams
        alpha = 1;
        if condWord in self.alphas:
            alpha = self.alphas[condWord];
        unseenProbSum = numUnseen * alpha * self.uniLidstone.getDiscProb(); # n0 * alpha[x'] * Pd(unseen-word)
        
        # sum the probabilities of seen events
        obsEventsProbSum = 0.0;
        for currWord in self.uniLidstone.obsVoc.keys():
            obsEventsProbSum += self.backoffProb((condWord, currWord)); # Pb(currWord | condWord)
        
        # print probabilities sum of all events
        print (unseenProbSum + obsEventsProbSum);
        
    