'''
@Version: 0.0.1
@Author: ider
@Date: 2019-12-27 17:04:07
@LastEditors: ider
@LastEditTime: 2020-04-02 20:47:57
@Description: 加载 wikipedia 全年节点到图，然后计算 category 集合中类与类之间的最短距离
            结果记录保存到 mongo，结果距离数组保存到 seaweed
            测试表明，多线程传递的对象是同一个
            可以输入参数，选择计算的年份.
            直接计算三层类
'''

from multiprocessing import Queue as PQueue
from multiprocessing import Process as PProcess
import time
from multiprocessing.dummy import Process, Queue
import pymongo
import pika
import json
import csv
import os
import graph_tool.all as gt
import numpy as np
import pickle
from config import MONGO_URL
from DataGet import WikiLinkYear
import logging
import random
import zlib


class Graph:
    def __init__(self, wiki_obj):
        def get_edges():
            for k,v in wiki_obj.iterate_edges():
                yield (wiki_obj.page_id_node_id_dict[k],wiki_obj.page_id_node_id_dict[v])
        
        # 有向图
        self.graph = gt.Graph(directed=True)
        self.graph.add_vertex(wiki_obj.page_id_count)
        logging.info('添加点完成,开始添加边')
        # 添加所有的边
        self.graph.add_edge_list(get_edges())
        logging.info('添加边完成,开始去除重复边')
        # 去除重复边
#        gt.remove_parallel_edges(self.graph)
#        logging.info('去除重复边完成')

    def get_single_distance(self, node, targets):
        all_sp = gt.shortest_distance(self.graph, node, targets, directed=True)
        return targets[all_sp != 2147483647], all_sp[all_sp != 2147483647]

    def get_avg_path(self):
        '''整个图的算不完，不能计算'''
        all_sp = gt.shortest_distance(self.graph, directed=True)
        return float(np.sum([np.sum(i.get_array()) for i in all_sp])/(self.graph.num_vertices()**2-self.graph.num_vertices()))

    def get_clustering_coefficient(self):
        '''
        @description: 计算聚集系数
        '''
        clust = gt.local_clustering(self.graph)
        vertex_avgs = gt.vertex_average(self.graph, clust)
        return vertex_avgs[0]

    def get_num_edges(self):
        return self.graph.num_edges()

    def get_num_vertices(self):
        return self.graph.num_vertices()

    def node_dis_nodes(self, node, target):
        # 计算得到路径长度，联通路径量，不连通的量
        all_sp = gt.shortest_distance(self.graph, node, target)
        return float(np.sum([np.sum(i.get_array()) for i in all_sp])/(self.graph.num_vertices()**2-self.graph.num_vertices()))

def handle_year(year,Table):
    logging.info(year)

    wly = WikiLinkYear(year)

    # 使用 2 层节点
    categorys_ids_dict,categorys_ids_set_dict = wly.get_article_ids(level=2)
    #完成点的映射
    wly.mapping_graph(categorys_ids_dict,categorys_ids_set_dict)

    # 数量一致则确认此类已完成
    all_subject_ids_set = set()
    for _,v in wly.categorys_ids_set_dict.items():
        all_subject_ids_set.update(v)

    count_in_db = Table.count_documents({'year':year})
    if  count_in_db >= len(all_subject_ids_set):
        logging.info(f'{year} already finsh')
        return
    logging.info(f"start {year}:{count_in_db} in db,{len(all_subject_ids_set)} in fact")
    # 生成图
    graph = Graph(wly)
    logging.info('graph 生成完成')
    job_queue = Queue(2)
    ret_queue = PQueue(100)

    workers = []
    for _ in range(5):
        p = Process(target=work, args=(job_queue, ret_queue, wly, graph))
        p.start()
        workers.append(p)
    logging.info('工作线程启动完成')
    p = PProcess(target=handle_result, args=(ret_queue,))
    p.start()
    workers.append(p)

    categorys_ids_set_random = list(all_subject_ids_set)
    np.random.shuffle(categorys_ids_set_random)
    
    logging.info('开始加载任务')
    for page_id in categorys_ids_set_random:
        if Table.find_one({'_id': f'{page_id}_{year}'}, {'_id': 1}):
            continue
        job_queue.put(wly.page_id_node_id_dict[page_id])

    for p in workers:
        job_queue.put('over')
        p.join()


def work(in_queue, out_queue, wly, graph):
    while 1:
        node_id = in_queue.get()
        if node_id == 'over':
            logging.info('work over')
            return

        page_id = wly.node_id_page_id_dict[node_id]
        _id = f'{page_id}_{wly.year}'

        cisp = wly.node_id_in_subject_set.copy()
        cisp.remove(node_id)
        cisp_list = np.array(list(cisp))
        cisp_list, all_sp = graph.get_single_distance(node_id, cisp_list)

        ret_dict = {
            "_id": _id,
            "page_id": page_id,
            'year': wly.year,
            'd_t':zlib.compress(pickle.dumps((all_sp,list(map(lambda x: wly.node_id_page_id_dict[x], cisp_list)))))
        }
        out_queue.put(ret_dict)
        logging.info(_id)


def handle_result(in_queue):
    credentials = pika.PlainCredentials('shuzhi', 'lambdax')
    pika_connection = pika.BlockingConnection(pika.ConnectionParameters(
        '192.168.1.224', 5671, '/', credentials, heartbeat=0,))
    channel = pika_connection.channel()
    channel.queue_declare(queue='small_world_direct_one_lv2')
    while 1:
        data = in_queue.get()
        if data == 'over':
            logging.info('result over')
            channel.close()
            return
        while 1:
            try:
                channel.basic_publish(
                    exchange='', routing_key='small_world_direct_one_lv2', body=pickle.dumps(data))
                break
            except:
                credentials = pika.PlainCredentials('shuzhi', 'lambdax')
                pika_connection = pika.BlockingConnection(pika.ConnectionParameters(
                    '192.168.1.224', 5671, '/', credentials, heartbeat=0,))
                channel = pika_connection.channel()
                channel.queue_declare(queue='small_world_direct_one_lv2')


def main(desyear=None):
    Table = pymongo.MongoClient(MONGO_URL).small_world.wikipedia_category_nodes_lv2
    if not desyear:
        years = [i for i in range(2007,2021)]
        del(years[years.index(2012)])
        del(years[years.index(2010)])
        for year in years:
            handle_year(year,Table)
    else:
        handle_year(desyear,Table)

if __name__ == '__main__':
    main()