'''
@Version: 0.0.1
@Author: ider
@Date: 2019-12-27 17:31:32
@LastEditors: ider
@LastEditTime: 2020-03-23 22:54:10
@Description: 计算所有类的有向图的点位情况, 可以输入参数计算指定年，否则计算全部
'''


from util import Sea
from DataGet import WikiLinkYear
import pickle
import numpy as np
from multiprocessing import Process,Queue
from multiprocessing.dummy import Process as DProcess
from multiprocessing.dummy import Queue as DQueue
import pymongo
import logging
from config import SEA_URL,MONGO_URL
import zlib
import os


def update_need_nodes(category_page_ids,in_queues,PSWorkers,year,SEA):
    empty_count = 0
    for i,page_id in enumerate(category_page_ids):
        
        _,_,data = SEA.get(f'{page_id}_{year}','small_world_one_lv2')
        if not data:
            empty_count += 1
            logging.info(f'empty count:{empty_count}')
            continue

        try:
            sharp,target = pickle.loads(zlib.decompress(data))
        except Exception as e:
            logging.exception(e)
            empty_count += 1
            logging.info(f'empty count:{empty_count}')
            continue

        for queue in in_queues:
            queue.put([np.array(target),sharp])
        
        logging.debug(f'{i},{len(category_page_ids)}')
    for queue in in_queues:
        queue.put(['over','over'])
        
    logging.info(f'empty count:{empty_count}')
    for p in PSWorkers:
        p.join()
        p.close()
        logging.info('process close')

    

def worker(in_queue,out_queue,v2,ret_dict):
    v2 = np.array(list(v2))
    while 1:
        small_world_target,small_world_distance = in_queue.get()
        if isinstance(small_world_target, str):
            out_queue.put(ret_dict)
            in_queue.close()
            return
        
        np_distance = small_world_distance[np.isin(small_world_target,v2)]
        if len(np_distance):
            ret_dict['max_distance'] = max(ret_dict['max_distance'],int(np.max(np_distance)))
            ret_dict['sum_short_dist'] += int(np.sum(np_distance))
            ret_dict['sum_connect_path'] += int(np.count_nonzero(np_distance))        

            
def result_handle(in_queue,count):
    if not count:
        Table = pymongo.MongoClient(MONGO_URL).small_world.wikipedia_category_direct
    else:
        Table = pymongo.MongoClient(MONGO_URL).small_world[f'wikipedia_category_direct_count{count}']
        
    while 1:
        ret_dict = in_queue.get()
        if ret_dict == 'over':
            return
        try:
            Table.insert_one(ret_dict) 
            logging.info(ret_dict)
        except pymongo.errors.DuplicateKeyError:
            pass


def handle_year(year,OUT_QUEUE,Table,SEA,shflag=False,level=2,count=None):
    '''
    @description: 默认 2 层下的pageid 计算
    @param {type} 
    @return: 
    '''    
    logging.info(year)
    logging.debug('start')
    wly = WikiLinkYear(year)

    # 每年对应的 大类 ids
    categorys_ids_dict, _categorys_ids_set_dict = wly.get_article_ids(level=level)
        


    for k1,v1 in categorys_ids_dict.items():

        if not v1:
            continue

        # 限制点数
        if count:
            v1=v1[:count]
            
        ret_doc_list = []
        for k2,v2 in categorys_ids_dict.items():
            if not v2:
                continue
            # 限制点数
            if count:
                v2=v2[:count]

            if Table.find_one({'_id':f'{k1}_{k2}_{year}'},{'_id':1}):
                logging.debug(f'finish: {k1}_{k2}_{year}')
                continue
            
            logging.info(f'start {k1},{k2}') 
            # 将没有的点，更新到 NODE_DICT
            ret_dict = {
                '_id':f'{k1}_{k2}_{year}',
                'source':k1,
                'target':k2,
                'year':year,
                'source_count':len(v1),
                'target_count':len(v2),
                'sum_short_dist':0,    #最短路径和
                'sum_connect_path':0,   #联通路径量
                'max_distance':0,    #最长路径
            }
            
            ret_doc_list.append([v2,ret_dict])
            
        IN_QUEUES = []
        PSWorkers = []
        if ret_doc_list:
            for row in ret_doc_list:
                in_queue = Queue(10)
                p = Process(target=worker,args=(in_queue,OUT_QUEUE,row[0],row[1]))
                p.start()
                PSWorkers.append(p)
                IN_QUEUES.append(in_queue)
            logging.info('开启加载 seaweed 数据')
            update_need_nodes(set(v1),IN_QUEUES,PSWorkers,year,SEA)
            if shflag:
                import time
                time.sleep(5)
                logging.info('exit')
                os._exit(1)

def main(level=2,desyear=None, interrupt=False, count=None):
    '''
    @description: 
    @param  level: 用以计算的 wikipedia 层数
            desyear：是否指定一年用以计算
    @return: 
    '''

    logging.info(f'count: {count}')
    if not count:
        Table = pymongo.MongoClient(MONGO_URL).small_world.wikipedia_category_direct
    else:
        Table = pymongo.MongoClient(MONGO_URL).small_world[f'wikipedia_category_direct_count{count}']
        
    SEA = Sea(SEA_URL)
    years = [i for i in range(2007,2021)]
    del(years[years.index(2012)])
    del(years[years.index(2010)])
    
    OUT_QUEUE = Queue(10000)      
    RP = DProcess(target=result_handle,args=(OUT_QUEUE,count))
    RP.start()

    if not desyear and not interrupt:
        for year in years:
            handle_year(year,OUT_QUEUE,Table,SEA,level=level,count=count)
    elif desyear and not interrupt:
        handle_year(desyear,OUT_QUEUE,Table,SEA,level=level,count=count)
    elif interrupt:
        logging.debug('开启自动退出')
        handle_year(desyear,OUT_QUEUE,Table,SEA,True,level=level,count=count)



if __name__ == "__main__":
    main()