import igraph as ig
import leidenalg as la
import pickle
import copy
import json

# 按照边列表的形式读入文件，生成无向图
# g = ig.Graph.Read_Edgelist("./UserData/graph_test2.txt", directed=False)

with open('relationships_new.pkl','rb') as f:
    relationships = pickle.load(f)
with open('entities_new.pkl','rb') as f:
    entities = pickle.load(f)





# 创建图
def get_partition(entities,relationships,resolution_parameter):
    '''输入实体信息，输出社区划分结果和集群数量'''
    vertices_iter = [entity[0] for entity in entities]
    edges_iter = [(relation[0],relation[1]) for relation in relationships]

    g = ig.Graph(directed=False)    # 创建图
    # 添加一些节点和边到图中
    g.add_vertices(vertices_iter)
    g.add_edges(edges_iter)

    partition = la.find_partition(g, partition_type=la.CPMVertexPartition,  resolution_parameter=resolution_parameter)
    # print(partition)

    coordinates = []

    # print("社区划分结果：", partition)
    for index_arr in partition:
        tmp = []
        for index in index_arr:
            key_name = vertices_iter[index]
            tmp.append(key_name)
        coordinates.append(tmp)
    # print("coordinates：", coordinates)
    return coordinates




record = {entity[0]:[] for entity in entities}


'''设置迭代次数'''
depth = 0       # 当前深度
max_depth = 2   # 总深度
def recording(entities_tmp, i):
    global record
    for entity in entities_tmp:
        print(entity[0],i)
        record[entity[0]].append(i)

def dfs(entities_tmp, relationships_tmp, depth, max_depth):
    global entities, relationships
    # 检查是否达到了最大深度
    if depth > max_depth:
        return
    coordinates = get_partition(entities_tmp, relationships_tmp,resolution_parameter = 0.05 * (1+(depth) * 0.3))

    for i in range(len(coordinates)):
        entities_tmp = [entity for entity in entities if entity[0] in coordinates[i]]
        relationships_tmp = [(relation[0], relation[1])
                        for relation in relationships
                        if relation[0] in coordinates[i] and relation[1] in coordinates[i]]
        recording(entities_tmp, i)
        print(f"第 {i} 点 - C{depth} - 节点数：{len(entities_tmp)}")
        # 对新节点递归调用func
        dfs(entities_tmp, relationships_tmp, depth + 1, max_depth)
dfs(entities, relationships, depth, max_depth)


# record 为每个节点的记录信息{<entity>:[<一级社区>, <二级社区>, <三级社区>]}
with open('record.json', 'w') as f:
    json.dump(record,f,ensure_ascii=False)
