# import matplotlib.pyplot as plt
# from collections import OrderedDict
# from matplotlib.backends.backend_pdf import PdfPages
# import re
import copy
import matplotlib.pyplot as plt
import networkx as nx

class BaseAlgorithm:
    def __init__(self, algoname):
        self.algoanme = algoname
        pass

class PageRank:
    def __init__(self, algoname = 'pr'):
        BaseAlgorihm.__init__(algoname)

# Get an random graph
def generate_randgraph(v_num = 12, p = 0.3):
    graph = nx.binomial_graph(v_num, p, directed=True)
    graph.add_edge(1, 0, weight=10)
    for v, d in graph.nodes(data=True):
        d['weight'] = 1.0 / len(graph.nodes())
    for u,v,d in graph.edges(data=True):
        d['weight'] = 1
    nx.draw(graph, with_labels=True)
    # plt.show()
    # print list(graph.nodes(data=True))
    # print list(graph.edges(data=True))
    # print "#################"
    return graph

graph = generate_randgraph()
raw_graph = graph.copy()
intermediateResult = dict()
msgqueues = [dict() for i in range(len(graph.nodes()))]
proc_num = 4
subgraphs = [dict() for i in range(proc_num)]
for i in range(proc_num):
    pass

'''
Define computation work.
parameters:
    msg:
'''
def computing(msgs = None):
    # calculate the pr
    for i in range(proc_num):
        msg_sum = 0.0
        for i, msgqueue in enumerate(msgqueues):
            graph.node[i]['weight'] = 0.15 / len(graph.nodes()) \
                    + sum(msgqueue.values()) * 0.85

    # generate intermediate results
    for node, d in graph.nodes(data = True):
        if len(graph.out_edges(node)) <= 0:
            continue
        share_pr = d['weight'] / len(graph.out_edges(node))
        for u, v in graph.out_edges(node):
            # local synchronization
            if u / proc_num == v / proc_num:
                msgqueues[v][u] = share_pr
        
'''
For data exchange.
parameters:
'''
def dataexchange():
    # send msg to other graph nodes
    for node, d in graph.nodes(data=True):
        if len(graph.out_edges(node)) <= 0:
            continue
        share_pr = d['weight'] / len(graph.out_edges(node))
        for u, v in graph.out_edges(node):
            # global synchronization
            msgqueues[v][u] = share_pr
    print "dataexchange..."
    pass

# Loss function
def loss_function(nodes1, nodes2):
    max_loss = 0.0
    for i, j in zip(nodes1, nodes2):
        i_nodeid, i_weight = i
        j_nodeid, j_weight = j
        # print i_weight.values()[0] - j_weight.values()[0]
        loss = abs(i_weight.values()[0] - j_weight.values()[0])
        if  loss > max_loss:
            max_loss = loss
    return max_loss

'''
Loop entry.
parameters:
    tolerant: iterative precision 
    round: number of iteration
    delta: delta computing per data exchange
'''
def entry(delta = 1, tolerant = 0.0001, rounds = 100):
    round_num = 0
    loss = 1.0
    inner_iter_num = 0
    old_nodes = copy.deepcopy(graph.nodes(data = True))
    while loss > tolerant and round_num < rounds:
        computing()
        if round_num % delta == 0:
            dataexchange()
            inner_iter_num += 1

            # print old_nodes, graph.nodes(data = True)
            loss = loss_function(old_nodes, graph.nodes(data = True)) 
            old_nodes = copy.deepcopy(graph.nodes(data = True))
            print "loss:", loss

        # Take termination condition into consideration 
        round_num += 1
    print "inner_iter_num:", inner_iter_num
    pass

# Display graph pagerank values
def displayPR(graph):
    for v, d in graph.nodes(data = True):
        print str(v) + ":" + str(d['weight'])

# BSP pagerank
def bsp_pagerank(G, tolerant = 0.001, round_num = 100):
    msgqueues = [[] for i in range(len(G.nodes()))]
    loss = 1.0
    rounds = 0
    old_nodes = copy.deepcopy(G.nodes(data = True))
    while loss > tolerant and rounds < round_num:
        # calculate the pr
        msg_sum = 0.0
        for i, msgqueue in enumerate(msgqueues):
            G.node[i]['weight'] = 0.15 / len(G.nodes()) \
                    + sum(msgqueue) * 0.85

        # clear the message queues
        for node in G.nodes():
            msgqueues[node][:] = []

        # send msg to other G.nodes
        for node, d in G.nodes(data=True):
            if len(G.out_edges(node)) <= 0:
                continue
            share_pr = d['weight'] / len(G.out_edges(node))
            for u, v in G.out_edges(node):
                msgqueues[v].append(share_pr)
        loss = loss_function(old_nodes, G.nodes(data = True))
        old_nodes = copy.deepcopy(G.nodes(data = True))
        rounds += 1
    print "BSP PageRank iterative rounds:", rounds
    pass

'''
Verify the result
'''
def evaluate(g1, g2):
    # G=nx.binomial_graph(10, 0.3, directed=True)
    layout = nx.spring_layout(g1)
    pr=nx.pagerank(g1, alpha=0.85)
    for (i, j), r in zip(g2.nodes(data=True), pr.values()):
        print j.values()[0], ",", r

# Start the program
if __name__ == "__main__":
    entry(delta = 6)
    print "-----------Evaluation-----------"
    evaluate(raw_graph, graph)
    bsp_pagerank(raw_graph)
    print raw_graph.nodes(data = True)
