from utils.dict_saver_loader import *
import math
merge_dict_dir='../../hard_drive/entropy/dict_char/merge.txt'
merge_dict=load_dict(merge_dict_dir)

print(sum(list(merge_dict.values())))
print(len(merge_dict.keys()))
def calc_entropy():
    sum_num=sum(list(merge_dict.values())) #2,817,172,080
    key_num_processed=0
    entropy_ret=0
    for key in merge_dict:
        p=merge_dict[key]/sum_num
        entropy_ret-=p*math.log2(p)
        key_num_processed+=1
        if key_num_processed%10000==0:
            print(key_num_processed,'processed')
    print(entropy_ret)


if __name__=='__main__':
    calc_entropy()

'''
In the news2016zh_train.json file, there are totally 2,817,172,080
(2.8t) chars and 21,420 different chars(including symbols and numbers.
And the finally 
Entropy(char level) result is 9.638 
using 
sigma(-p*log2(p)).
'''