#!/usr/bin/env python
# encoding: utf-8

import sys
from datetime import datetime
import pickle
sys.path.append('.')
from preprocessing.doc_pre.extra_queries_related_docs import *
from preprocessing.doc_pre.seg_and_pos_docs import seg_and_pos_docs
from context_disamb.methods import *
from general.mysqloperations import MySQL
def preprocessing(**argv):
    #extra_doc(**argv)
    seg_and_pos_docs(**argv)

def context_disambiguation(**argv):
    data_path, queries_dict, keyword_id_dict, entity_id_dict, query_info_dict, mysql = init_cd()
    count = 100
    for docid in queries_dict:
        doc = get_document(docid, data_path, True)
        if not doc:
            continue

        queries_list = queries_dict[docid]
        list_len = len(queries_list)
        for i in range(list_len):
            query = queries_list.pop()
            if not count:
                mysql.release()
                return
            #if query.queryid != "EL_CLCMN_01747": continue
            count -= 1
            t0_s = datetime.now()
            init_query(query, entity_id_dict, keyword_id_dict, query_info_dict, doc)
            t1_s = datetime.now()
            query.identify_terms(doc, mysql)
            t2_s = datetime.now()
            query.cal_local_features(mysql)
            t3_s = datetime.now()
            query.cal_global_features(mysql)
            t3_e = datetime.now()
            print('init_query\t:', t1_s - t0_s)
            print('identify_terms\t:', t2_s - t1_s)
            print('cal_local\t:', t3_s - t2_s)
            print('cal_global\t:', t3_e - t3_s)
            print("Count:", count)
            query.print_result()
            del query
            #query.cal_terms_feature(mysql)
    mysql.release()

def get_wiki_entity_db(xml_path):
    mydb = open('entity-id.db', 'wb')
    error_record, entity_dict = get_entity_dict(xml_path)
    pickle.dump(entity_dict, mydb)
    mydb.close()
    print(len(entity_dict))

def get_query_db(xml_path):
    mydb = open('query.db', 'wb')
    query_dict = get_query_dict(xml_path)
    pickle.dump(query_dict, mydb)
    mydb.close()

def get_entity_id_db(dir_path):
    mydb = open('id.db', 'wb')
    id_set = get_entity_id_from_dir(dir_path)
    pickle.dump(id_set, mydb)
    mydb.close()
    print(len(id_set))

def temp_build():
    mysql = MySQL()
    mysql.connect()
    #mysql.build_kpp_mem()
    #mysql.build_entity_mem()
    mysql.build_category_mem()
    mysql.release()

if __name__ == "__main__":
    context_disambiguation()
    #get_wiki_entity_db('/home/fengys/data/TAC_2009_KBP_Evaluation_Reference_Knowledge_Base/data')
    #get_query_db('/home/fengys/data/Chinese_Queries_Related_Source_Corpus/data/new/query/training_set/xml')
    #match_result()
    #temp_build()
    sys.exit(0)
    argv = {}
    for arg in sys.argv[1:]:
        arg_name = arg.split(':')[0]
        arg_value = arg.split(':')[1]
        argv[arg_name] = arg_value
    if argv['debug'] == 'True':
        argv['debug'] = True
    else:
        argv['debug'] = False
    preprocessing(**argv)
