import networkx as nx

from algorithm.simplified_graph.compress_graph import compress_graph


def PRDD(G, beta=0.2):
    # 重新计算PageRank值
    pr = nx.pagerank(G)
    # 计算边权重并保存到边属性中
    for u, v in G.edges():
        weight = pr[u] / (pr[u] + pr[v])
        G[u][v]['weight'] = max(weight, 1 - weight)
    # 计算阈值
    edge_weights = nx.get_edge_attributes(G, 'weight')
    th = sum(edge_weights.values()) / len(G.edges())
    compress_graph(G)
    # 过滤小于阈值的边
    for u, v in G.edges():
        if G[u][v]['weight'] <= th:
            G.remove_edge(u, v)
    pr = nx.pagerank(G)
    degree = dict(G.degree())
    discount_degree = {}
    for u in G.nodes():
        discount_degree[u] = sum(degree[v] for v in G.neighbors(u))
    max_discount_degree = max(discount_degree.values())
    importance = {}
    for u in G.nodes():
        importance[u] = (1-beta) * pr[u] + beta * (1 - discount_degree[u] / max_discount_degree)
    sorted_nodes = sorted(importance.items(), key=lambda x: x[1], reverse=True)
    return [node for node, score in sorted_nodes]
