import unittest
import os,sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)),'../'))


from multiprocessing import Queue as PQueue
from multiprocessing import Process as PProcess
import time
from multiprocessing.dummy import Process, Queue
import json
import csv
import os
import graph_tool.all as gt
import numpy as np
import pickle
from config import MONGO_URL
from DataGet import WikiLinkYear
import logging
import logging.config
logging.config.fileConfig('logging.conf')

import pdb


class Graph:
    def __init__(self, wiki_obj):
        def get_edges():
            for k,v in wiki_obj.iterate_edges():
                yield (wiki_obj.page_id_node_id_dict[k],wiki_obj.page_id_node_id_dict[v])
        
        # 有向图
        self.graph = gt.Graph(directed=True)
        self.graph.add_vertex(wiki_obj.page_id_count)
        # 添加所有的边
        self.graph.add_edge_list(get_edges())
        # 去除重复边
        gt.remove_parallel_edges(self.graph)

    def get_single_distance(self, node, targets):
        all_sp = gt.shortest_distance(self.graph, node, targets, directed=True)
        return targets[all_sp != 2147483647], all_sp[all_sp != 2147483647]

    def get_avg_path(self):
        '''整个图的算不完，不能计算'''
        all_sp = gt.shortest_distance(self.graph, directed=True)
        return float(np.sum([np.sum(i.get_array()) for i in all_sp])/(self.graph.num_vertices()**2-self.graph.num_vertices()))

    def get_clustering_coefficient(self):
        '''
        @description: 计算聚集系数
        '''
        clust = gt.local_clustering(self.graph)
        vertex_avgs = gt.vertex_average(self.graph, clust)
        return vertex_avgs[0]

    def get_num_edges(self):
        return self.graph.num_edges()

    def get_num_vertices(self):
        return self.graph.num_vertices()

    def node_dis_nodes(self, node, target):
        # 计算得到路径长度，联通路径量，不连通的量
        all_sp = gt.shortest_distance(self.graph, node, target)
        return float(np.sum([np.sum(i.get_array()) for i in all_sp])/(self.graph.num_vertices()**2-self.graph.num_vertices()))


def handle_year(year):

    logging.info(year)

    wly = WikiLinkYear(year)

    #完成点的映射
    wly.mapping_graph()

    # 生成图
    graph = Graph(wly)

    job_queue = Queue(100)
    ret_queue = PQueue(100)
    pdb.set_trace()
    workers = []
    for _ in range(5):
        p = Process(target=work, args=(job_queue, ret_queue, wly, graph))
        p.start()
        workers.append(p)

    p = PProcess(target=handle_result, args=(ret_queue,))
    p.start()
    workers.append(p)

    for node_id in wly.page_id_node_id_dict.values():
        job_queue.put(node_id)

    for p in workers:
        job_queue.put('over')
        p.join()

def work(in_queue, out_queue, wly, graph):
    while 1:
        current_id = in_queue.get()
        if current_id == 'over':
            logging.info('over')
            return

        page_id = wly.node_id_page_id_dict[current_id]
        _id = f'{page_id}_{wly.year}'

        cisp = set(wly.page_id_node_id_dict.values()).copy()
        cisp.remove(current_id)
        cisp_list = np.array(list(cisp))
        cisp_list, all_sp = graph.get_single_distance(current_id, cisp_list)

        ret_dict = {
            "_id": _id,
            "page_id": page_id,
            'year': wly.year,
            "distance": pickle.dumps(all_sp),
            "target": pickle.dumps(list(map(lambda x: wly.node_id_page_id_dict[x], cisp_list))),
        }
        out_queue.put(ret_dict)
        logging.info(_id)


def handle_result(in_queue):
    while 1:
        data = in_queue.get()
        if data == 'over':
            logging.info('result over')
            return
        logging.info(f'result handle: {data["_id"]},{data["page_id"]},{data["year"]}')



if __name__ == '__main__':
    handle_year(2007)