# -*- coding: utf-8 -*-
import codecs
import sys,os
from st_common import *
from google_browser import *
import wikipedia
from langconv import *
import cPickle
import time 
import Levenshtein
import random

'''
<weibo id = "wanyuangongjijin7">
    <content>【兰州石化被曝最高每月为员工缴万元公积金】近日，一份名为《兰州石化党办2011年度职工住房公积金明细账》的文件显示，名单上52人中，兰州石化为其缴存公积金超过9万元的有10人，缴存额最高达12万元，平均每月一万。网友纷纷吐槽人家公积金比他一个月收入都要多。http://t.cn/zTwmi9i 有钱人……</content>
    <name id = "1">兰州石化</name>
    <startoffset id = "1">66</startoffset>
    <endoffset id = "1">70</endoffset>
    <kb id = "1">NIL</kb>
</weibo>
'''
###################################################################################################
wikipedia.set_lang("zh")
BASE_DATA_PATH  = 'D:\\data\\Shared_Task_NLPCC14\\'
BASE_KB_PATH = 'E:\\desktop\\wu-request\\NLPCC 2014 Shared Tasks Guidelines\\Chinese Entity Linking  SAMPLE DATA NLPCC2014_EL_sample\\'

BASELINE_STEP = 0

'step0. 加载sample_query_2014和PKBase_key_title'
if BASELINE_STEP<=0:
    kb_path = BASE_KB_PATH
    sp_file = kb_path+'weiboAutoTag_6.txt'
    query_set = readSample(sp_file)
    kb_file = kb_path + 'PKBase_key_title.txt'
    (map_key2name,map_name2key,name_list) = load_KBaseKey(kb_file)
    print 'Load sample and key finished!'
##---------------------------------------------------------------------
#在map_entity2sml_keys中查找最接近的10个name
ftemp = open(BASE_DATA_PATH+'map_entity2sml_keys.dup','r')
map_entity2sml_keys=cPickle.load(ftemp)
ftemp.close()
print 'Load map_entity2sml_keys finished!'
##---------------------------------------------------------------------
#在wikipedia里扩展name的最接近的20个
ftemp = open(BASE_DATA_PATH+'map_query2wikipedia.dup','r')
map_query2wikipedia=cPickle.load(ftemp)
ftemp.close()
print 'Load map_query2wikipedia finished!'
##---------------------------------------------------------------------
BaseLine_Candidate_set = []
'step 1.获取candidate，通过wikipedia扩展'
ftemp = open(BASE_DATA_PATH+'BaseLine_Candidate_set_old.dup','r')
BaseLine_Candidate_set = cPickle.load(ftemp)
ftemp.close()
    
'step 3. '
wiki_page_set_exact=[]
try:
    ftemp = open(BASE_DATA_PATH+'wiki_page_set_exact.dup','r')
    wiki_page_set_exact=cPickle.load(ftemp)
    ftemp.close()        
except :
    pass

wiki_page_set_suggest=[]
try:
    ftemp = open(BASE_DATA_PATH+'wiki_page_set_suggest.dup','r')
    wiki_page_set_suggest=cPickle.load(ftemp)
    ftemp.close()        
except :
    pass

if True:
    #wiki_page_set = []
    for cs in BaseLine_Candidate_set:
        entity = cs[0]
        query = cs[1]
        (namekeys_base,namekeys_wiki) = cs[2]
        entity_id = entity[0]
        query_id = query[0]
            
        name = entity[1]
        entId = entity[3]
        '**************************************'
        print '#search:', name.encode('gbk','ignore')
        found_name = False
        for wcs in wiki_page_set_exact:
            if wcs[0]==name:
                found_name = True
        if found_name:
            print '@founded:', name.encode('gbk','ignore')
            continue
        for wcs in wiki_page_set_suggest:
            if wcs[0]==name:
                found_name = True
        if found_name:
            print '@founded wiki_page_set_suggest:', name.encode('gbk','ignore')
            continue
        '**********baseline searching**********'
        base_key = 'NIL'
        base_name = name
        
        run_count = 0
        while True:
            try:
                time.sleep(random.randint(0,100)/500.0)
                page = wikipedia.page(name,auto_suggest=False, redirect=True)
                wiki_page_set_exact.append((name, page.title, page.content) )
                break
            except :
                print '#ERROR:\t search error,',name.encode('gbk','ignore'),'rerun',run_count
                run_count=run_count+1
                if run_count>1:
                    break
        
        if not (len(wiki_page_set_exact) % 100):
            ftemp = open(BASE_DATA_PATH+'wiki_page_set_exact[%d].dup' % len(wiki_page_set_exact),'w')
            cPickle.dump(wiki_page_set_exact, ftemp)
            ftemp.close()
    ftemp = open(BASE_DATA_PATH+'wiki_page_set_exact.dup','w')
    cPickle.dump(wiki_page_set_exact, ftemp)
    ftemp.close()
    

'step 4. '


if True:
    #wiki_page_set = []
    for cs in BaseLine_Candidate_set:
        entity = cs[0]
        query = cs[1]
        (namekeys_base,namekeys_wiki) = cs[2]
        entity_id = entity[0]
        query_id = query[0]
            
        name = entity[1]
        entId = entity[3]
        '**************************************'
        print '#search:', name.encode('gbk','ignore')
        found_name = False
        for wcs in wiki_page_set_suggest:
            if wcs[0]==name:
                found_name = True
        if found_name:
            print '@founded wiki_page_set_suggest:', name.encode('gbk','ignore')
            continue
        for wcs in wiki_page_set_exact:
            if wcs[0]==name:
                found_name = True
        if found_name:
            print '@founded wiki_page_set_exact:', name.encode('gbk','ignore')
            continue
        '**********baseline searching**********'
        base_key = 'NIL'
        base_name = name
        
        run_count = 0
        while True:
            try:
                time.sleep(random.randint(0,100)/500.0)
                page = wikipedia.page(name)
                wiki_page_set_suggest.append((name, page.title, page.content) )
                break
            except :
                print '#ERROR:\t search error,',name.encode('gbk','ignore'),'rerun',run_count
                run_count=run_count+1
                if run_count>2:
                    break
        
        if not (len(wiki_page_set_suggest) % 100):
            ftemp = open(BASE_DATA_PATH+'wiki_page_set_suggest[%d].dup' % len(wiki_page_set_suggest),'w')
            cPickle.dump(wiki_page_set_suggest, ftemp)
            ftemp.close()
    ftemp = open(BASE_DATA_PATH+'wiki_page_set_suggest.dup','w')
    cPickle.dump(wiki_page_set_suggest, ftemp)
    ftemp.close()
        
            
        
        
        

    
  
  



        