'''
Created on Jan 5, 2010

@author: matan
'''

import math

from Lidstone import Lidstone
from BigramLidstone import BigramLidstone
from ModelUtils import * 
import pickle

debug_print = True


def DEBUG(str):
    if debug_print:
        print "DEBUG:",str

V = 300000

class Backoff(object):
    '''
    Represents a Backoff Lidstone smoothing model 
    '''
    
    def __init__(self, bigramCorpus, unigramCorpus,condDic):
        '''
        Constructor
        '''
        
        DEBUG("CTOR backoff started")
        self.alphaDic = {} 

        # initialization of unigram lidstone
        DEBUG("building unigram lidstone")
        self.uniCorpus = unigramCorpus
        self.uniLidstone = Lidstone(self.uniCorpus)
        self.uniLidstone.setLambda(0.14)#0.14

        # initialization of bigram lidstone
        DEBUG("building bigram lidstone")
        self.biCorpus = bigramCorpus
        self.biLidstone=BigramLidstone(self.uniCorpus, self.biCorpus)
        self.biLidstone.setLambda(0.0003)
        
        #map each word index and its conditioning words indices   
        self.condDic = condDic
        DEBUG("calc alpha list")
        self.calcAllAlphas()
        DEBUG("done CTOR backoff")
        
    def calcProbability(self, words): 
        '''
        Calculates the probability of the given words
        words = tuple of (w,w') 
        '''
        if self.biCorpus.C(words) > 0:
            return self.biLidstone.calcProbability(words) # P(w|w')
        
        alpha = 1
        
        if self.alphaDic.get(words[1]) is not None:
            alpha = self.alphaDic[words[1]]
        
        return alpha * self.uniLidstone.calcProbability(words[0]) # alpha(w)*P(w')
       
    def calcAllAlphas(self):
        '''
        calculates all the alpha values (smoothing parameters) according to train data  
        '''
        
        #scan each single word in corpus and calculates its alpha value 
        for word in self.uniCorpus.getEntries():
            self.alphaDic[word] = self.calcAlpha(word)
    
    def calcAlpha(self, w_cndr):   
        '''
        calculates the alpha value of the given word
        w_cndr - w in lecture formula (conditioner word)
        '''
        #calculate numerator
        probList = self.condDic[w_cndr] # get all w' that c(w,w')>0
        sum = float(0)        
        
        # scan each conditioned word (w') of w_cndr 
        for w_tag in probList:
            sum += self.biLidstone.calcProbability( (w_tag,w_cndr) ) # P(w'|w)
        
        numerator = 1 - sum
        
        #calculate denominator
        sum = float(0)
        
        # scan each conditioned word of w_cndr            
        for w_tag in probList:
            sum+=self.uniLidstone.calcProbability(w_tag) #P(w')
        
        denominator = 1 - sum
        
        return numerator / denominator
    
    def calcPerplexityFromDictionary(self, condDic):
        
        counter = 0
        sum = float(0)
        
        for w_cnd in condDic.keys():  

            for w in condDic[w_cnd]:
                sum += math.log(self.calcProbability( (w, w_cnd) ), 2)
                counter += 1
            
        
        return math.pow(2, -1.0 / counter * sum)
        
        
        
    def calcPerplexityFromTestFile(self, testFile):
        #calculate conditioning dictionary words from test file
        condDic = {}   
        
        f = open(testFile, "r")
        word1 = f.readline().strip()
        
        
        for line in f: 
            word2 = line.strip()

            if condDic.get(word1) is None:
                condDic[word1] = []
            
            condDic[word1].append(word2)
                
            word1 = word2
        
        f.close()
        
        #calculate the perplexity from the created dictionary
        return self.calcPerplexityFromDictionary(condDic)
       
    def calcBestLambda(self, testFile):
        '''
        calculates the optimal lambda parameter value of the bigram Lidstone smoothing model
        The function searches for the best 
        '''
        currLambda = 0.0001
        step = 0.0001
        limit = 0.001
        
        minPerplexity = float("infinity")
        currPerplexity = 0
        minLambda = 0
        
        #calculate conditioning dictionary words from test file
        condDic = {}   
        
        f = open(testFile, "r")
        word1 = f.readline().strip()
        
        
        for line in f: 
            word2 = line.strip()

            if condDic.get(word1) is None:
                condDic[word1] = []
            
            condDic[word1].append(word2)
                
            word1 = word2
        
        f.close()

        #calculate best lambda
        while currLambda < limit:
            self.biLidstone.setLambda(currLambda)
            self.calcAllAlphas()
            
            currPerplexity = self.calcPerplexityFromDictionary(condDic)
            print "curr=%f, perplexity=%f, best=%f, perplexity=%f" % (currLambda, currPerplexity, minLambda, minPerplexity)
            if currPerplexity < minPerplexity:
                minPerplexity = currPerplexity
                minLambda = currLambda
                
            currLambda += step
        
        self.biLidstone.setLambda(minLambda)
        self.calcAllAlphas()
        
        return minLambda, minPerplexity
        
    def debug(self,x_tag):
        '''
        Debug current model probabilities
        '''
        output=open(x_tag+".txt","w")
        print >> output,"066500992\tMatan Keidar\t040854705\tEliahu Khalastchi"
        
        sum = float(0)
        for x in self.uniCorpus.getEntries():
            p = self.calcProbability( (x, x_tag) )
            sum += p
            print >> output,"%s\t%f" %(x,p)
         
        #accumulate the probabilities of the "unseen" words
        alpha = 1
        if self.alphaDic.get(x_tag) is not None:
            alpha = self.alphaDic[x_tag]
        
        V_minus_Vt=V - self.uniCorpus.X()
        Pb_Xstar_given_Xtag = self.uniLidstone.calcProbability("<<<MATANELIMATANELI>>")
        sum += V_minus_Vt *  alpha * Pb_Xstar_given_Xtag
        
        print >>output, "%d\t%.20f" %(V_minus_Vt,Pb_Xstar_given_Xtag)
        output.close()
        
        return sum
        