import networkit as nkit
from flask import jsonify
from flask import request
from flask_login import login_required
from flask_socketio import emit
import json
from app import socket
import os
from . import networkitapi

def getIsolatedNodes(degrees):
    sequence = sorted(degrees)
    i = 0
    nIsolated = 0
    while i < len(sequence) and sequence[i] == 0:
        nIsolated += 1
        i += 1
    return nIsolated


def getClusteringCoefficient(G):
    if G.numberOfSelfLoops()>0: #如果直接运行RuntimeError: Local Clustering Coefficient implementation does not support graphs with self-loops. Call Graph.removeSelfLoops() first.
        G.removeSelfLoops()
    lcc = nkit.centrality.LocalClusteringCoefficient(G, True).run().scores()
    return sum(lcc) /G.numberOfNodes()


def getComponentPartition(G):
    if G.isDirected():
        cc = nkit.components.StronglyConnectedComponents(G).run()
    else:
        cc = nkit.components.ConnectedComponents(G).run()
    return cc.getPartition()

@networkitapi.route('/getBasicAttribute',methods=['POST'])
@login_required
def myOverall():
    a = request.get_data()
    listData = json.loads(a.decode('utf-8'))
    edges=listData['data']
    G = nkit.Graph(listData['num_nodes'])
    for edge in edges:
        G.addEdge(edge[0],edge[1])

    result_dict={};
    result_dict["节点数"]=G.numberOfNodes()
    result_dict["边数"] = G.numberOfEdges()

    result_dict["有向图"]="True" if G.isDirected() else "False"
    result_dict["有权重"]="True" if G.isWeighted() else "False"


    result_dict["密度"]=nkit.graphtools.density(G)
    result_dict["自环数"] = G.numberOfSelfLoops()
    degrees = nkit.centrality.DegreeCentrality(
        G, ignoreSelfLoops=G.numberOfSelfLoops() == 0).run().scores()
    result_dict["孤立节点数"] = getIsolatedNodes(degrees)

    result_dict["度相似性"] = nkit.correlation.Assortativity(G, degrees).run().getCoefficient()

    result_dict["min/max/平均度"]=[int(min(degrees)), int(max(degrees)),sum(degrees) / result_dict["节点数"]]

    result_dict["去除自环后_聚类系数"] = getClusteringCoefficient(G)
    cp = getComponentPartition(G)
    lcs = max(cp.subsetSizes())

    result_dict["连接的组件数"]=cp.numberOfSubsets()
    result_dict["最大组件的大小"]=str(lcs)+str(100 * lcs / result_dict["节点数"])
    return jsonify(result_dict)

def tools(communities):
    """
    :param communities
    :return: result_dict:除了加入了社区信息外，还加入了一些额外信息
    """
    vector = communities.getVector();
    list_communities = {}
    for index, val in enumerate(vector):
        if val not in list_communities:
            list_communities[val] = [index];
        else:
            list_communities[val].append(index)
    result_dict = {}
    result_dict["communities"] = list_communities
    # print(type(communities))
    str_OtherInformation = "{0} elements assigned to {1} subsets".format(communities.numberOfElements(),
                                                                         communities.numberOfSubsets()) + "," \
                           + "the biggest subset has size {0}".format(max(communities.subsetSizes()))
    result_dict["OtherInformation"] = str_OtherInformation
    return result_dict

def createGraph(a):
    """
    :param edges:从前端返回的
    :return:构建Graph类
    """
    a = request.get_data()
    listData = json.loads(a.decode('utf-8'))
    edges=listData['data']
    G = nkit.Graph(listData['num_nodes'])
    for edge in edges:
        G.addEdge(edge[0],edge[1])
    return G

@networkitapi.route('/parallelLouvain',methods=['POST'])
@login_required
def parallelLouvain():
    a = request.get_data()
    listData = json.loads(a.decode('utf-8'))
    edges = listData['data']
    G = createGraph(a)

    plmCommunities = nkit.community.detectCommunities(G, algo=nkit.community.PLM(G,refine=True,gamma=0.5,maxIter=32))
    # print("{0} elements assigned to {1} subsets".format(plmCommunities.numberOfElements(),
    #                                                     plmCommunities.numberOfSubsets()))
    # print("the biggest subset has size {0}".format(max(plmCommunities.subsetSizes())))
    print(plmCommunities)
    result_dict=tools(plmCommunities)
    return jsonify(result_dict)


@networkitapi.route('/parallelLabelPropagation',methods=['POST'])
@login_required
def parallelLabelPropagation():
    a = request.get_data()
    listData = json.loads(a.decode('utf-8'))
    edges = listData['data']
    G = createGraph(a)

    plpCommunities = nkit.community.detectCommunities(G, algo=nkit.community.PLP(G))
    result_dict=tools(plpCommunities)
    return jsonify(result_dict)


@networkitapi.route('/getConnectedComponents',methods=['POST'])
@login_required
def getConnectedComponents():
    a = request.get_data()
    listData = json.loads(a.decode('utf-8'))
    edges = listData['data']
    G = createGraph(a)
    cc = nkit.components.ConnectedComponents(G)
    cc.run()
    print(cc.getComponents())
    print(cc.getComponentSizes())
    result_dict={}
    result_dict["Components"]=cc.getComponents()
    result_dict["ComponentSizes"]=cc.getComponentSizes()
    return jsonify(result_dict)


@networkitapi.route('/getBiconnectedComponents',methods=['POST'])
@login_required
def getBiconnectedComponents():
    a = request.get_data()
    listData = json.loads(a.decode('utf-8'))
    edges = listData['data']
    G = createGraph(a)
    cc = nkit.components.BiconnectedComponents(G)
    cc.run()
    print("-----getBiconnectedComponents------")
    print(cc.getComponents())
    print(cc.getComponentSizes())
    result_dict={}
    result_dict["algo_information"]="要求输入为无向图，计算其重连通分量。当且仅当删去G中的顶点 " \
                                    "v及所有依附于v的所有边后，可将图分割成两个或两个以上的连通分量，则称顶点v为关节点，" \
                                    "没有关节点的连通图叫做重连通图。在重连通图上, 任何一对顶点之间至少存在有两条路径, " \
                                    "在删去某个顶点及与该顶点相关联的边时, 也不破坏图的连通性。"
    result_dict["Components"]=cc.getComponents()
    result_dict["ComponentSizes"]=cc.getComponentSizes()
    return jsonify(result_dict)

@networkitapi.route('/getWeaklyComponents',methods=['POST'])
@login_required
def getWeaklyComponents():
    a = request.get_data()
    listData = json.loads(a.decode('utf-8'))
    edges = listData['data']
    G = createGraph(a)
    result_dict = {}
    try:
        cc = nkit.components.WeaklyConnectedComponents(G)
        cc.run()
        print("-----getWeaklyComponents------")
        print(cc.getComponents())
        print(cc.getComponentSizes())
        result_dict["algo_information"] = "计算弱连通子图，将有向图的所有的有向边替换为无向边，所得到的图称为原图的基图。如果一个有向图的基图是连通图，则有向图是弱连通图。"
        result_dict["Components"] = cc.getComponents()
        result_dict["ComponentSizes"] = cc.getComponentSizes()
        result_dict["isSuccess"]=1;
        return jsonify(result_dict)
    except:
        error="输入数据集为无向图，请选择有向图输入"
        result_dict["isSuccess"] = 0;
        result_dict["error"]=error
        return jsonify(result_dict)


def takeThird(elem):
    return elem[2]

@networkitapi.route('/CommonNeighborsIndex',methods=['POST'])
@login_required
def CommonNeighborsIndex():
    #返回的是两个节点共享邻居的个数
    a = request.get_data()
    listData = json.loads(a.decode('utf-8'))
    edges = listData['data']
    G = createGraph(a)
    cni = nkit.linkprediction.CommonNeighborsIndex(G)
    result = []
    for i in range(0,listData['num_nodes']):
        for j in range(i+1, listData['num_nodes']): #如果输入的是无向图，可以避免结果列表中出现自环和重复的边
            result.append([i,j,cni.run(i,j)/listData['num_nodes']])
    result.sort(key=takeThird, reverse=True)
    description="该算法返回两个节点间的共有邻居占比"
    result_dict={}
    result_dict["result"]=result
    result_dict["description"]=description
    return jsonify(result_dict)