from utils.dict_saver_loader import *
import math
'''
Bigram entropy is calculated as:
H=-SigmaX(p(x)*SigmaY(p(y|x)*log(p(y|x)))
p(y|x)=p(x,y)/p(x)
'''
merge_dict_word_dir='../../hard_drive/entropy/unigram/merge.txt'
merge_dict_bigram_dir='../../hard_drive/entropy/bigram/merge.txt'

word_dict=load_dict(merge_dict_word_dir)
# copy a word dict, replace its values with a list [] which contains all appearance numbers of bigrams whose fist word
# is word dict[i]
word_dict_startwith=word_dict.copy()

bigram_dict=load_dict(merge_dict_bigram_dir)

print(sum(list(word_dict.values())))
print(len(word_dict.keys()))
def calc_entropy():
    words_num=sum(list(word_dict.values()))
    bigram_num=sum(list(bigram_dict.values()))
    word_num_processed=0
    entropy_ret=0

    for bigram_word in bigram_dict:
        first = bigram_word.split(' ')[0]
        if first == '':
            first = ' '
        if type(word_dict_startwith[first]) is not list:
            word_dict_startwith[first]=[]
        word_dict_startwith[first].append(bigram_dict[bigram_word])
    print('word_dict_startwith DONE!')

    for key_word in word_dict:
        px=word_dict[key_word]/words_num
        if type(word_dict_startwith[key_word]) is list:
            for second in word_dict_startwith[key_word]:
                pxy=int(second)/bigram_num
                entropy_ret-=px*(pxy/px)*math.log2(pxy/px)
        word_num_processed+=1
        if word_num_processed%1000==0:
            print(word_num_processed,'processed')
    print(entropy_ret)


if __name__=='__main__':
    calc_entropy()

'''
On train data, 
Entropy_bigram:
7.992172243205335 on valid data
8.537 on train data

'''