import functools
import builtins
import multiprocessing
import time
import MDAnalysis
from matplotlib.image import imread
import matplotlib.pyplot as plt
import graph_tool.all as gt
import pickle
import os
from pathlib import Path
import numpy as np
import networkx as nx
from typing import Dict
from collections import deque
import numpy as np
import time
import networkx as nx

# ============ graph_tool ============
from graph_tool import centrality as gt_cent, _prop
from graph_tool.libgraph_tool_core import Vector_double, Vector_size_t

import GraphTools.GEBC_break as Gb
import GraphTools.analysis_topology as at
import GraphTools.basic_method as bm


# 保证 print 实时输出
builtins.print = functools.partial(builtins.print, flush=True)


def example_graph(deep=5, seed=42):    # 读取图文件
    base_path = Path(
        "/home/baixj/gel_network/1-crosslink_degree_space/string=bondingprob0.6667/N0")
    graph_file = base_path / "graph_series" / "0-graph.pkl"

    with open(graph_file, 'rb') as f:
        G_full = pickle.load(f)

    # 获取最大连通子图
    G = at.simplify_maximum_subgraph(G_full)
    # 设置随机种子
    np.random.seed(seed)

    # 获取所有边并随机选择一个
    edges = list(G.edges())
    edge = edges[np.random.randint(0, len(edges))]
    subgraphs_dict = all_edge_neighborhoods_nx(G, edge, max_depth=deep)
    return subgraphs_dict[deep]


def get_edge_neighborhood(G, start_edge, depth=5):
    """获取以边为中心的深度为depth的子图（NetworkX实现）

    Args:
        G: networkx.MultiGraph
        start_edge: 起始边
        depth: 深度

    Returns:
        networkx.MultiGraph: 子图，保留原图的所有属性
    """
    # 获取起始边的节点
    u, v = start_edge

    # 使用BFS获取所有相关节点
    visited_nodes = set()
    visited_edges = set()
    queue = deque([(u, 0), (v, 0)])  # (node, depth)

    while queue:
        node, current_depth = queue.popleft()
        if node in visited_nodes or current_depth > depth:
            continue

        visited_nodes.add(node)

        # 遍历节点的所有邻居
        for neighbor in G.neighbors(node):
            if neighbor not in visited_nodes and current_depth < depth:
                queue.append((neighbor, current_depth + 1))
                # 记录边
                edge = tuple(sorted([node, neighbor]))
                visited_edges.add(edge)

    # 创建子图
    subgraph = G.subgraph(visited_nodes).copy()
    return subgraph


def nx_to_gt(G_nx):
    """将 NetworkX 图转换为 graph_tool 图，并使用无序集合作为边的键。

    Args:
        G_nx: networkx.Graph 对象（无向）

    Returns:
        tuple: (G_gt, vertex_map, edge_map)
            - G_gt: graph_tool.Graph 对象
            - vertex_map: 从 NetworkX 节点到 graph_tool 顶点的映射字典
            - edge_map: 从 frozenset({u, v}) 到 graph_tool 边的映射字典
    """

    # 创建无向图
    G_gt = gt.Graph(directed=False)
    vertex_map = {}
    edge_map = {}

    # 添加所有节点并构建映射
    for node in G_nx.nodes():
        v = G_gt.add_vertex()
        vertex_map[node] = v

    # 添加所有边，使用 frozenset({u, v}) 作为键
    for u, v in G_nx.edges():
        e = G_gt.add_edge(vertex_map[u], vertex_map[v])
        key = frozenset((u, v))
        edge_map[key] = e

    return G_gt, vertex_map, edge_map


def get_edge_neighborhood_gt(G_gt, start_edge, vertex_map, edge_map, depth=5):
    """获取以边为中心的深度为depth的子图（使用graph_tool实现）

    Args:
        G_gt: graph_tool.Graph对象
        start_edge: 起始边（NetworkX格式的边）
        vertex_map: 从NetworkX节点到graph_tool顶点的映射字典
        edge_map: 从NetworkX边到graph_tool边的映射字典
        depth: 深度

    Returns:
        graph_tool.Graph: 子图
    """
    # 获取起始边的顶点
    u, v = start_edge[:2]  # 只取前两个元素，忽略可能的key
    start_v1 = vertex_map[u]
    start_v2 = vertex_map[v]

    # 使用BFS获取所有相关顶点
    visited_vertices = set()
    queue = deque([(start_v1, 0), (start_v2, 0)])  # (vertex, depth)

    while queue:
        vertex, current_depth = queue.popleft()
        if vertex in visited_vertices or current_depth > depth:
            continue

        visited_vertices.add(vertex)

        # 遍历顶点的所有邻居
        for neighbor in vertex.out_neighbors():
            if neighbor not in visited_vertices and current_depth < depth:
                queue.append((neighbor, current_depth + 1))

    # 创建子图
    subgraph = gt.GraphView(G_gt, vfilt=lambda v: v in visited_vertices)
    return subgraph


def compare_subgraphs(subgraph_nx, subgraph_gt, vertex_map):
    """比较两个子图的拓扑结构

    Args:
        subgraph_nx: NetworkX子图
        subgraph_gt: graph_tool子图
        vertex_map: 从NetworkX节点到graph_tool顶点的映射字典

    Returns:
        bool: 两个子图是否拓扑等价
    """
    # 检查节点数量
    if subgraph_nx.number_of_nodes() != subgraph_gt.num_vertices():
        print("节点数量不一致")
        return False

    # 检查边数量
    if subgraph_nx.number_of_edges() != subgraph_gt.num_edges():
        print("边数量不一致")
        return False

    # 检查每个节点的邻居
    for node in subgraph_nx.nodes():
        v = vertex_map[node]
        if v not in subgraph_gt.vertices():
            print(f"节点 {node} 在graph_tool子图中不存在")
            return False

        # 获取NetworkX中的邻居
        nx_neighbors = set(subgraph_nx.neighbors(node))

        # 获取graph_tool中的邻居
        gt_neighbors = set()
        for n in v.out_neighbors():
            if n in subgraph_gt.vertices():
                # 找到对应的NetworkX节点
                for nx_node, gt_vertex in vertex_map.items():
                    if gt_vertex == n:
                        gt_neighbors.add(nx_node)
                        break

        if nx_neighbors != gt_neighbors:
            print(f"节点 {node} 的邻居不一致")
            print(f"NetworkX邻居: {nx_neighbors}")
            print(f"graph_tool邻居: {gt_neighbors}")
            return False

    return True


def calculate_diameter_using_distance_matrix(base_path):
    """使用预先计算好的距离矩阵文件计算图的直径

    Args:
        base_path: 基础路径，用于确定对应的距离矩阵文件路径

    Returns:
        int: 图的直径
    """
    # 获取距离矩阵文件路径
    distance_dir = Path(base_path) / "graph_distance"
    dist_file = distance_dir / "0-distance.dat"
    nodes_file = distance_dir / "0-nodes.pkl"

    # 读取节点列表
    with open(nodes_file, 'rb') as f:
        nodes = pickle.load(f)

    # 读取距离矩阵
    n = len(nodes)
    dist_map = np.memmap(dist_file, dtype=np.int32, mode='r', shape=(n, n))

    # 计算直径（距离矩阵中的最大值）
    diameter = np.max(dist_map)

    return diameter


def initialize_indices(graph):
    """
    Build mappings:
      - node_idx: node -> integer index
      - edge_list: list of normalized edges (tuple)
      - edge_id: normalized edge -> integer ID
    """
    node_idx = {node: i for i, node in enumerate(graph.nodes())}
    edge_list = [tuple(sorted((u, v))) for u, v in graph.edges()]
    edge_id = {e: i for i, e in enumerate(edge_list)}
    return node_idx, edge_list, edge_id


def compute_edge_occurrences(graph, edge_id):
    """
    Compute betweenness contributions for each edge without storing full path lists.
    Returns a list edge_occ where edge_occ[i] = \sum_{s,t} (# times edge i appears)/(# shortest paths(s,t)).
    """
    num_edges = len(edge_id)
    edge_occ = [0.0] * num_edges

    for s in graph.nodes():
        # distances for reachabilityw
        dist = nx.single_source_shortest_path_length(graph, s)
        for t in graph.nodes():
            if s == t or t not in dist:
                continue
            # count total shortest paths
            gen1 = nx.all_shortest_paths(graph, source=s, target=t)
            total = sum(1 for _ in gen1)
            if total == 0:
                continue
            # accumulate per-edge contributions
            gen2 = nx.all_shortest_paths(graph, source=s, target=t)
            for path in gen2:
                for u, v in zip(path, path[1:]):
                    e = tuple(sorted((u, v)))
                    eid = edge_id[e]
                    edge_occ[eid] += 1.0 / total

    # for undirected graphs, divide by 2
    if not graph.is_directed():
        edge_occ = [val / 2.0 for val in edge_occ]

    return edge_occ


def save_variables(output_dir, **vars):
    """
    Save provided variables into output_dir using pickle.
    vars: keyword args name->object
    """
    os.makedirs(output_dir, exist_ok=True)
    for name, obj in vars.items():
        path = os.path.join(output_dir, f"{name}.pkl")
        with open(path, 'wb') as f:
            pickle.dump(obj, f)


def load_variables(output_dir, *names):
    """
    Load pickled variables from output_dir.
    names: list of base names without .pkl
    returns dict name->object
    """
    loaded = {}
    for name in names:
        path = os.path.join(output_dir, f"{name}.pkl")
        with open(path, 'rb') as f:
            loaded[name] = pickle.load(f)
    return loaded


def process_graph(graph, output_dir):
    """
    | 变量                 | 类型                          | 说明                                                                                |
    | ------------------ | --------------------------- | --------------------------------------------------------------------------------- |
    | `node_idx`         | `dict{hashable node → int}` | 把节点映射成 0…N-1                                                                      |
    | `edge_list`        | `list[tuple]`               | `edge_list[eid]` 是 `normalize_edge((u,v))`                                        |
    | `edge_id`          | `dict{edge_norm → int}`     | 反向索引：标准化边 → `eid`                                                                 |
    | `pair_total_paths` | `list[list[int]]`           | `pair_total_paths[i][j]` = 从 i→j 的**最短路径条数**                                      |
    | `edge_occ`         | `list[list[tuple]]`         | `edge_occ[eid]` 是一串三元组 `[(si, ti, cnt), …]`，其中 `cnt` 是该条边在 (i,j) 所有最短路径中出现的**次数** |

    Build indices, compute edge occurrences, assemble GEBC mapping, and save all vars.
    Returns a dict edge->GEBC value.
    """
    node_idx, edge_list, edge_id = initialize_indices(graph)
    edge_occ, pair_total_paths = compute_edge_occurrences_full(
        graph, edge_id, node_idx)
    save_variables(output_dir,
                   node_idx=node_idx,
                   edge_list=edge_list,
                   edge_id=edge_id,
                   edge_occ=edge_occ,
                   pair_total_paths=pair_total_paths)
    return node_idx, edge_list, edge_id, edge_occ, pair_total_paths


def compute_single_edge_gebc(edge_occ,
                             pair_total_paths,
                             edge_id,
                             target_edge,
                             graph,
                             normalize_edge=None):
    """
    计算指定边的 GEBC
    ----------------
    参数
    ----
    edge_occ : list[list[tuple]]
        edge_occ[eid] = [(si, ti, cnt), ...]
    pair_total_paths : list[list[int]]
        pair_total_paths[si][ti] = # shortest paths from si -> ti
    edge_id : dict
        { normalize_edge((u,v)) : eid }
    target_edge : tuple
        (u, v)  或  (u, v, key)  (多重边时带 key)
    graph : networkx.Graph / DiGraph
        用于判断是否为有向图
    normalize_edge : callable
        归一化无向边方向的函数

    返回
    ----
    float
        目标边的 GEBC 值
    """
    if normalize_edge is None:
        def normalize_edge(x): return tuple(sorted(x))

    # -- 1. 标准化并拿到 eid ---------------------------------------------------
    if len(target_edge) == 3:
        u, v, _ = target_edge
    else:
        u, v = target_edge

    e_norm = normalize_edge((u, v))
    try:
        eid = edge_id[e_norm]
    except KeyError:
        raise ValueError(f"Edge {target_edge} not found in graph.")

    # -- 2. 累加  cnt / total_paths  ------------------------------------------
    gebc_val = 0.0
    for si, ti, cnt in edge_occ[eid]:
        total_paths = pair_total_paths[si][ti]
        # 理论上 total_paths > 0，否则不会出现在 edge_occ
        gebc_val += cnt / total_paths

    # -- 3. 无向图要除以 2 -----------------------------------------------------
    if not graph.is_directed():
        gebc_val /= 2.0

    return gebc_val


def get_gebc_for_edge(u, v, output_dir):
    """
    Load GEBC mapping from output_dir and print the GEBC for edge (u,v).
    Returns the GEBC value.
    """
    vars = load_variables(output_dir, 'gebc')
    gebc = vars['gebc']
    e = tuple(sorted((u, v)))
    value = gebc.get(e)
    print(f"GEBC for edge {e}: {value}")
    return value


def compute_edge_occurrences_full(graph, edge_id, node_idx):
    """
    计算每条边在每对节点最短路径中出现的次数，以及每对节点的最短路径条数。
    返回：
      - edge_occ: list[list[tuple]]，edge_occ[eid] = [(si, ti, cnt), ...]
      - pair_total_paths: list[list[int]]，pair_total_paths[si][ti] = 最短路径条数
    """
    n = graph.number_of_nodes()
    num_edges = len(edge_id)
    edge_occ = [[] for _ in range(num_edges)]
    pair_total_paths = [[0] * n for _ in range(n)]
    for s in graph.nodes():
        si = node_idx[s]
        dist = nx.single_source_shortest_path_length(graph, s)
        for t in graph.nodes():
            if s == t or t not in dist:
                continue
            ti = node_idx[t]
            paths = list(nx.all_shortest_paths(graph, source=s, target=t))
            total = len(paths)
            if total == 0:
                continue
            pair_total_paths[si][ti] = total
            # 统计每条边出现次数
            edge_count = {}
            for path in paths:
                for u, v in zip(path, path[1:]):
                    e = tuple(sorted((u, v)))
                    eid = edge_id[e]
                    edge_count[eid] = edge_count.get(eid, 0) + 1
            for eid, cnt in edge_count.items():
                edge_occ[eid].append((si, ti, cnt))
    # 不要在这里除以2，统一在compute_single_edge_gebc中处理
    return edge_occ, pair_total_paths


def add_cos_factor_to_edges(graph):
    """
    计算每条边的cos因子并作为属性添加到图中

    Args:
        graph: networkx.MultiGraph对象，包含节点位置信息和盒子尺寸

    Returns:
        networkx.MultiGraph: 添加了cos_factor属性的图
    """
    # 获取盒子尺寸
    box = graph.graph.get('box', None)
    if box is None:
        raise ValueError("图中缺少box属性")
    box = bm.get_box_dimensions(box)

    # 使用bm.detect_position_key检测位置键
    pos_key = bm.detect_position_key(graph)
    if pos_key is None:
        raise ValueError("无法找到节点位置信息")

    # 遍历所有边
    for u, v, key, data in graph.edges(data=True, keys=True):
        # 获取节点位置
        pos_u = graph.nodes[u][pos_key]
        pos_v = graph.nodes[v][pos_key]

        # 计算cos因子
        a = np.array(pos_u, dtype=float)
        b = np.array(pos_v, dtype=float)
        box_array = np.array(box, dtype=float)

        delta = a - b

        # 应用周期性边界条件
        for i in range(len(box_array)):
            if box_array[i] != 0:
                delta[i] -= round(delta[i] / box_array[i]) * box_array[i]

        delta_length = np.linalg.norm(delta)
        if delta_length == 0:
            cos_factor = 1  # 如果点重合
        else:
            cos_factor = abs(delta[0] / delta_length)

        # 将cos因子添加为边的属性
        graph[u][v][key]['cos_factor'] = float(cos_factor)

    return graph


# ----------------------------------------------------------------------
# 1 · 单次 BFS 生成所有子图
# ----------------------------------------------------------------------
def __old_all_edge_neighborhoods_nx(
    G: nx.Graph, edge, max_depth: int = 5, copy_graph: bool = True
) -> Dict[int, nx.Graph]:
    """
    返回 {depth: subgraph} 字典，基于一次 multi-source BFS
    说明： 返回的列表长度等于 graph 的直径 diameter, 
        第 0 个 是初始边，就一条
        第 diameter-1 个就是全图
        可见: /home/baixj/gel_network/GEBC_local/_test/t8-所有子图生成程序检查.py
    """
    u, v = edge
    dist = {u: 0, v: 0}
    q = deque([(u, 0), (v, 0)])

    while q:
        w, d = q.popleft()
        if d == max_depth:
            continue
        for nbr in G.neighbors(w):
            if nbr not in dist:
                dist[nbr] = d + 1
                q.append((nbr, d + 1))

    # 桶装 + 累积
    buckets = [set() for _ in range(max_depth + 1)]
    for w, d in dist.items():
        buckets[d].add(w)

    cum_nodes, subgraphs = set(), {}
    for d in range(max_depth + 1):
        cum_nodes |= buckets[d]
        sg = G.subgraph(cum_nodes)
        subgraphs[d] = sg.copy() if copy_graph else sg

    return subgraphs


def all_edge_neighborhoods_nx(
    G: nx.Graph,
    edge,
    max_depth: int = 5,
    copy_graph: bool = True
) -> Dict[int, nx.Graph]:
    """
    返回 {depth: subgraph} 字典，基于一次 multi-source BFS。
    深度从 0 到 min(max_depth, actual_max_distance)。
      - depth=0 时只包含那条边的两个端点；
      - depth=k 时包含从 edge 两端出发，距离 ≤ k 的所有节点及其中的所有边；
      - 当 k ≥ 图的直径时，就相当于全图。
    """
    u, v = edge
    # multi-source BFS，记录每个节点到最近源（u 或 v）的距离
    dist = {u: 0, v: 0}
    q = deque([(u, 0), (v, 0)])
    while q:
        w, d = q.popleft()
        if d == max_depth:
            continue
        for nbr in G.neighbors(w):
            if nbr not in dist:
                dist[nbr] = d + 1
                q.append((nbr, d + 1))

    # 计算实际能到达的最大距离
    actual_max = max(dist.values())

    # 用实际最大距离和传入上限取最小值，作为真正要生成的 depth 范围
    depth_limit = min(max_depth, actual_max)

    # 按 depth_limit 构造桶
    buckets = [set() for _ in range(depth_limit + 1)]
    for node, d in dist.items():
        # d 一定 ≤ actual_max，所以 d ≤ depth_limit
        buckets[d].add(node)

    # 累积并生成各深度子图
    cum_nodes = set()
    subgraphs = {}
    for d in range(depth_limit + 1):
        cum_nodes |= buckets[d]
        sg = G.subgraph(cum_nodes)
        subgraphs[d] = sg.copy() if copy_graph else sg

    return subgraphs


def graphs_equal(g1: nx.Graph, g2: nx.Graph) -> bool:
    """
    判断节点集合 与 边集合 是否完全一致
    （默认 NetworkX 边是无序对；若是 DiGraph 可改为有序）
    """
    return (set(g1.nodes) == set(g2.nodes)) and (set(g1.edges) == set(g2.edges))

# ------------------- 核心函数 -------------------


def set_edge_attributes_for_multigraph(G, edge_attr_dict, attr_name):
    """
    为多重图(MultiGraph)设置边属性，将简单图的边属性应用到多重图的所有边键上。

    Args:
        G: networkx.MultiGraph对象
        edge_attr_dict: 简单图边属性字典 {(u,v): value}
        attr_name: 要设置的属性名称
    """
    # 创建一个新的字典来存储多重图的边属性
    multi_edge_attr = {}

    # 遍历多重图的所有边
    for u, v, key in G.edges(keys=True):
        # 获取简单图形式的边
        simple_edge = (u, v)
        # 如果简单图边在属性字典中，则使用其值
        if simple_edge in edge_attr_dict:
            multi_edge_attr[(u, v, key)] = edge_attr_dict[simple_edge]

    # 设置多重图的边属性
    nx.set_edge_attributes(G, multi_edge_attr, attr_name)


def add_edge_attributes_for_multigraph(G, edge_attr_dict, attr_name):
    """
    为多重图(MultiGraph)在已有的属性字典中添加新的键值对。

    Args:
        G: networkx.MultiGraph 对象
        edge_attr_dict: 简单图边到"新"属性键值对的映射。
                        形式为 {(u, v): {k1: v1, k2: v2, …}, …}
        attr_name: 要更新的属性名称（例如 'gebc_local'、'gebc_m1_local'、'node_count'）

    举例:
        # 假设你想把 simple_graph 中 (1,2) 边的 {'a': 10, 'b':20} 加到 G 上
        edge_attr_dict = {(1,2): {'a': 10, 'b':20}}
        add_edge_attributes_for_multigraph(G, edge_attr_dict, 'gebc_local')
    """
    # 暂存待写回的多重边属性
    multi_edge_attr = {}

    # 遍历所有边，带上属性字典
    for u, v, key, data in G.edges(keys=True, data=True):
        # 找对应的简单图边 (考虑无向图时可能要反向匹配)
        simple_edge = (u, v)
        if simple_edge not in edge_attr_dict and (v, u) in edge_attr_dict:
            simple_edge = (v, u)

        if simple_edge in edge_attr_dict:
            new_kv = edge_attr_dict[simple_edge]

            # 检查原本是否已有这个属性
            existing = data.get(attr_name)
            if existing is None:
                # 之前没有，直接用新的整个字典
                updated = new_kv.copy()
            else:
                # 已有的话，必须是 dict，合并后写回
                if not isinstance(existing, dict):
                    raise ValueError(
                        f"Edge ({u},{v},{key}) 上的属性 '{attr_name}' 不是 dict，无法 update"
                    )
                updated = existing.copy()
                updated.update(new_kv)

            # 指定这条多重边的新属性字典
            multi_edge_attr[(u, v, key)] = updated

    # 批量写回
    nx.set_edge_attributes(G, multi_edge_attr, attr_name)


def GEBC_local_full(base_path):
    base_path = Path(base_path)
    graph_file = base_path / "graph_series" / "0-graph.pkl"

    # # 备份原始图文件
    # backup_file = base_path / "graph_series" / "0-graph_backup.pkl"
    # shutil.copy2(graph_file, backup_file)
    # print(f"已创建原始图文件备份: {backup_file}")

    # 读取网络文件, 获取最大连通子图
    with open(graph_file, 'rb') as f:
        G_full = pickle.load(f)

    # 添加cos因子
    G_full = add_cos_factor_to_edges(G_full)

    G = at.simplify_maximum_subgraph(G_full)
    diameter = calculate_diameter_using_distance_matrix(base_path)

    # 转换为graph_tool图
    G_gt, vertex_map, edge_map = nx_to_gt(G)

    # 初始化存储结果的字典
    edge_gebc_dict = {}  # 存储每条边的GEBC值
    edge_node_count_dict = {}  # 存储每个深度下的节点数量
    edge_gebc_m1_dict = {}  # 存储每条边的GEBC_m1值

    GEBC_weight = {}
    GEBC_weight['box'] = bm.get_box_dimensions(G.graph['box'])
    GEBC_weight['mode'] = 'm1'
    key_name = bm.detect_position_key(subgraph_nx)
    # 遍历所有边
    for edge in G.edges():
        edge_start_time = time.time()  # 记录每条边的开始时间

        edge_gebc_dict[edge] = {}
        edge_node_count_dict[edge] = {}
        edge_gebc_m1_dict[edge] = {}

        print(f"\n开始计算边 {edge}")

        # 一次性生成所有深度的子图
        subgraphs_dict = all_edge_neighborhoods_nx(
            G, edge, max_depth=diameter-1)

        # 对每个深度计算GEBC
        for depth in range(1, diameter):  # 遍历的深度是： 1，2，3... diameter -1
            print(f"计算深度 {depth}/{diameter-1}")

            # 1. 获取边的局部子图 (NetworkX)
            nx_start_time = time.time()
            subgraph_nx = subgraphs_dict[depth]
            nx_time = time.time() - nx_start_time

            # 2 将NetworkX子图转换为graph_tool子图
            convert_start_time = time.time()
            subgraph_gt1, sub_vertex_map, sub_edge_map = nx_to_gt(subgraph_nx)
            convert_time = time.time() - convert_start_time

            # 3. 使用graph_tool计算GEBC
            gebc_start_time = time.time()
            # 计算第一个子图的GEBC
            bc_v_gt1, bc_e_gt1 = gt.betweenness(
                subgraph_gt1, eprop=None, norm=False)

            # 获取目标边在子图中的索引
            target_edge_gt1 = None
            u, v = edge
            if u in sub_vertex_map and v in sub_vertex_map:
                source = sub_vertex_map[u]
                target = sub_vertex_map[v]
                for e in subgraph_gt1.edges():
                    if (e.source() == source and e.target() == target) or \
                       (e.source() == target and e.target() == source):
                        target_edge_gt1 = e
                        break

            if target_edge_gt1 is None:
                print(f"警告：在子图1中未找到目标边 {edge}")
                gebc_value1 = 0.0
            else:
                gebc_value1 = bc_e_gt1[target_edge_gt1]

            gebc_time = time.time() - gebc_start_time

            # 4. 计算GEBC_m1
            m1_start_time = time.time()
            # 准备GEBC_weight字典
            position = {node: data[key_name] for node, data in G.nodes(
                data=True) if key_name in data}
            GEBC_weight['position'] = position

            # 计算GEBC_m1
            edge_betweenness = nx.edge_betweenness_centrality(
                subgraph_nx,
                GEBC_weight=GEBC_weight,
                normalized=True
            )

            # 确保边的顺序一致性
            edge_key = edge
            if edge not in edge_betweenness:
                # 尝试反转边的顺序
                edge_key = (edge[1], edge[0])
                if edge_key not in edge_betweenness:
                    print(f"警告：边 {edge} 在子图中不存在，跳过GEBC_m1计算")
                    gebc_m1_value = 0.0
                else:
                    gebc_m1_value = edge_betweenness[edge_key]
            else:
                gebc_m1_value = edge_betweenness[edge_key]
            m1_time = time.time() - m1_start_time

            # 打印各部分用时
            print(f"各部分用时:")
            print(f"  - NetworkX子图构建: {nx_time:.2f}秒")
            print(f"  - NetworkX转graph_tool: {convert_time:.2f}秒")
            print(f"  - graph_tool GEBC计算: {gebc_time:.2f}秒")
            print(f"  - GEBC_m1计算: {m1_time:.2f}秒")

            # 存储结果
            edge_gebc_dict[edge][depth] = gebc_value1  # 使用第一个子图的结果
            edge_node_count_dict[edge][depth] = subgraph_nx.number_of_nodes()
            edge_gebc_m1_dict[edge][depth] = gebc_m1_value

        # 计算并存储该边的总用时
        edge_time = time.time() - edge_start_time
        print(f"边 {edge} 计算完成，总用时: {edge_time:.2f} 秒")

    # 将结果添加为图的边属性 把计算的结果依然保留到了包含溶胶的完整的图中，为了方便可能的后续分析
    nx.set_edge_attributes(G_full, edge_gebc_dict, 'gebc_local')
    nx.set_edge_attributes(G_full, edge_node_count_dict, 'node_count')
    nx.set_edge_attributes(G_full, edge_gebc_m1_dict, 'gebc_m1_local')

    # 保存结果
    output_file = base_path / "graph_series" / "0-graph_with_gebc_local.pkl"
    with open(output_file, 'wb') as f:
        pickle.dump(G_full, f)

    print("计算完成，结果已保存到:", output_file)


def GEBCs_local(base_path):
    base_path = Path(base_path)
    graph_file = base_path / "graph_series" / "0-graph.pkl"

    # # 备份原始图文件
    # backup_file = base_path / "graph_series" / "0-graph_backup.pkl"
    # shutil.copy2(graph_file, backup_file)
    # print(f"已创建原始图文件备份: {backup_file}")

    # 读取网络文件, 获取最大连通子图
    with open(graph_file, 'rb') as f:
        G_full = pickle.load(f)

    # 添加cos因子
    G_full = add_cos_factor_to_edges(G_full)

    G = at.simplify_maximum_subgraph(G_full)
    diameter = calculate_diameter_using_distance_matrix(base_path)

    # 初始化存储结果的字典
    edge_gebc_dict = {}  # 存储每条边的 GEBC 值
    edge_gebc_m1_dict = {}  # 存储每条边的 GEBC_m1
    edge_node_count_dict = {}  # 存储每个深度下的节点数量

    key_name = bm.detect_position_key(G)

    # 获取总边数
    total_edges = len(list(G.edges()))
    completed_edges = 0

    # 遍历所有边
    for edge in G.edges():
        edge_start_time = time.time()  # 记录每条边的开始时间

        edge_gebc_dict[edge] = {}
        edge_node_count_dict[edge] = {}

        print(f"\n开始计算边 {edge}")

        # 一次性生成所有深度的子图
        subgraphs_dict = all_edge_neighborhoods_nx(
            G, edge, max_depth=diameter-1)

        # Todo 对每个深度计算GEBC
        for depth in range(1, diameter):  # 遍历的深度是： 1，2，3... diameter -1
            # print(f"计算深度 {depth}/{diameter-1}")

            # 1. 获取边的局部子图 (NetworkX)
            nx_start_time = time.time()
            subgraph_nx = subgraphs_dict[depth]
            nx_time = time.time() - nx_start_time

            # 2. 计算GEBC
            subgraph_gt1, sub_vertex_map, sub_edge_map = nx_to_gt(subgraph_nx)
            _, gebc = gt.betweenness(
                subgraph_gt1, norm=True)
            # 3. 计算 GEBC_m1
            gebc_m1 = gebc_m1_gt(subgraph_nx)
            # 存储结果
            edge_gebc_dict[edge][depth] = gebc[sub_edge_map[edge]]
            edge_gebc_m1_dict[edge][depth] = gebc_m1[edge]
            edge_node_count_dict[edge][depth] = subgraph_nx.number_of_nodes()

        # 计算并存储该边的总用时
        edge_time = time.time() - edge_start_time
        completed_edges += 1
        completion_percentage = (completed_edges / total_edges) * 100
        print(f"边 {edge} 计算完成，总用时: {edge_time:.2f} 秒")
        print(
            f"完成进度: {completion_percentage:.2f}% ({completed_edges}/{total_edges})")

    # 将结果添加为图的边属性 把计算的结果依然保留到了包含溶胶的完整的图中，为了方便可能的后续分析
    nx.set_edge_attributes(G_full, edge_gebc_dict, 'gebc_local')
    nx.set_edge_attributes(G_full, edge_node_count_dict, 'node_count')

    # 保存结果
    output_file = base_path / "graph_series" / "0-graph_with_gebc_local.pkl"
    with open(output_file, 'wb') as f:
        pickle.dump(G_full, f)

    print("计算完成，结果已保存到:", output_file)


def process_edge_chunk(args):
    """处理一组边的函数，用于并行计算，使用 frozenset 边键简化查找。"""
    edges, G, diameter = args

    edge_gebc_dict = {}
    edge_gebc_m1_dict = {}
    edge_node_count_dict = {}

    start = time.time()
    for edge in edges:
        edge_gebc_dict[edge] = {}
        edge_gebc_m1_dict[edge] = {}
        edge_node_count_dict[edge] = {}

        # 生成子图
        subgraphs_dict = all_edge_neighborhoods_nx(
            G, edge, max_depth=diameter-2)  # 排除全图
        depths_all = list(subgraphs_dict.keys())
        depths_all.sort()
        depths = depths_all[1:]  # 排除初始 edge 的图

        for depth in depths:
            # 检查G的该边该深度是否已有数据
            already_has_gebc = (
                'gebc_local' in G.edges[edge] and int(
                    depth) in G.edges[edge]['gebc_local']
            )
            already_has_gebc_m1 = (
                'gebc_m1_local' in G.edges[edge] and int(
                    depth) in G.edges[edge]['gebc_m1_local']
            )

            sub_nx = subgraphs_dict[depth]
            sub_gt, v_map, e_map = nx_to_gt(sub_nx)

            # 直接用 frozenset 查找子图边
            key = frozenset(edge)

            # 计算 GEBC
            if not already_has_gebc:
                _, gebc_map = gt.betweenness(sub_gt, norm=True)
                edge_gebc_dict[edge][int(depth)] = gebc_map[e_map[key]]
            if not already_has_gebc_m1:
                gebc_m1_map = gebc_m1_gt(sub_nx, normalized=True)
                edge_gebc_m1_dict[edge][int(depth)] = gebc_m1_map[key]

            edge_node_count_dict[edge][int(depth)] = sub_nx.number_of_nodes()

    elapsed = time.time() - start
    print(
        f"进程 {multiprocessing.current_process().name} 处理 {len(edges)} 条边，耗时 {elapsed:.2f}s")
    return edge_gebc_dict, edge_gebc_m1_dict, edge_node_count_dict


def GEBC_local_parall(graph_file, num_processes=36):
    """在服务器运行 并行版本 GEBC_local 计算函数，采用 frozenset 边键。"""
    # 读取并简化
    with open(graph_file, 'rb') as f:
        G_full = pickle.load(f)
    G = at.simplify_maximum_subgraph(G_full)
    diameter = G.graph['diameter']

    all_edges = list(G.edges())
    # 划分块
    size = (len(all_edges) + num_processes - 1) // num_processes
    chunks = [all_edges[i:i+size] for i in range(0, len(all_edges), size)]

    args = [(chunk, G, diameter) for chunk in chunks]
    print(f"开始并行计算，使用 {num_processes} 个进程，每块大小约 {size} 条边。")
    t0 = time.time()
    with multiprocessing.Pool(num_processes) as pool:
        results = pool.map(process_edge_chunk, args)

    # 合并结果
    gebc_all = {}
    gebc_m1_all = {}
    nodecnt_all = {}
    for g_dict, m1_dict, n_dict in results:
        gebc_all.update(g_dict)
        gebc_m1_all.update(m1_dict)
        nodecnt_all.update(n_dict)

    print(f"并行计算完成，耗时 {time.time()-t0:.2f}s")

    # 把属性 添加到 gebc_local gebc_m1_local node_count
    if gebc_all:
        add_edge_attributes_for_multigraph(G_full, gebc_all, 'gebc_local')
    if gebc_m1_all:
        add_edge_attributes_for_multigraph(
            G_full, gebc_m1_all, 'gebc_m1_local')
    if nodecnt_all:
        add_edge_attributes_for_multigraph(G_full, nodecnt_all, 'node_count')

    # 覆盖
    with open(graph_file, 'wb') as f:
        pickle.dump(G_full, f)
    print(f"结果已保存到: {graph_file}")


# ================================
# 利用 graph_tool 计算的关键
# ================================


def gebc_m1_gt(G_nx, normalized=True):
    """
    利用 graph-tool 计算 GEBC_m1。

    参数:
        G_nx: NetworkX 图
        normalized: 是否归一化，默认为 False

    返回值:
        dict: 边的 GEBC_m1 值 {edge: value}
    """
    def to_double_vec(seq):
        """Python list → Vector_double"""
        v = Vector_double()
        v.extend(float(x) for x in seq)
        return v

    def to_size_t_vec(seq):
        """Python list[int] → Vector_size_t"""
        v = Vector_size_t()
        v.extend(int(x) for x in seq)
        return v

    # 转换为 graph-tool 图
    g_gt, vertex_map, edge_map = nx_to_gt(G_nx)

    # 获取位置信息
    box = bm.get_box_dimensions(G_nx.graph.get('box'))
    key_name = Gb.detect_position_key(G_nx)
    position_dict = {node: data[key_name] for node,
                     data in G_nx.nodes(data=True) if key_name in data}

    # 创建顶点坐标属性
    pos = g_gt.new_vertex_property("vector<double>")
    for v in g_gt.vertices():
        # 使用 vertex_map 的反向映射找到对应的 NetworkX 节点
        nx_node = None
        for node, gt_vertex in vertex_map.items():
            if gt_vertex == v:
                nx_node = node
                break

        if nx_node is not None and nx_node in position_dict:
            pos[v] = np.array(position_dict[nx_node])
        else:
            raise ValueError("找不到对应的节点或没有坐标")

    # ----------- 1. 缺省参数 ----------

    pivots = list(range(g_gt.num_vertices()))

    # ----------- 2. 属性 → std::any ----
    pos_any = _prop("v", g_gt, pos)

    # 创建边和顶点的介数中心性属性
    ebc = g_gt.new_edge_property("double")
    vbc = g_gt.new_vertex_property("double")
    ebc_any = _prop("e", g_gt, ebc)
    vbc_any = _prop("v", g_gt, vbc)

    # ----------- 3. list → Vector_* ---
    box_vec = to_double_vec(box)
    pivots_vec = to_size_t_vec(pivots)

    # ----------- 4. 调 C++ ------------
    gt_cent.get_betweenness_m1(
        g_gt._Graph__graph,   # GraphInterface *
        pos_any,
        box_vec,
        ebc_any,
        vbc_any,
        pivots_vec
    )

    # 反归一化, 在graph_tool 的脚本里包含了归一化。
    if normalized:
        n = g_gt.num_vertices()
        if n > 2:
            ebc.a /= ((n-1)*(n)/2)
            # vbc.a /= ((n-1)*(n)/2)

    # 将结果转换回字典格式
    result = {}
    for nx_edge in G_nx.edges():
        gt_edge = edge_map[frozenset(nx_edge)]
        result[frozenset(nx_edge)] = float(ebc[gt_edge])/2

    return result


# 依赖函数和模块，需要在环境中已定义或导入：
# all_edge_neighborhoods_nx, nx_to_gt, at.simplify_maximum_subgraph,
# gt.betweenness, gebc_m1_gt, set_edge_attributes_for_multigraph


def GEBC_local_serial(graph_file, chunk_num=10):
    """
    串行版本的GEBC_local计算函数。

    Args:
        graph_file (str): 图文件路径，pickle格式。
        chunk_num (int): 每条边抽样的深度数量，默认为10。
    """
    # 读取网络文件并简化为最大连通子图
    with open(graph_file, 'rb') as f:
        G_full = pickle.load(f)

    G = at.simplify_maximum_subgraph(G_full)
    diameter = G.graph['diameter']

    all_edges = list(G.edges())
    edge_gebc_dict = {}
    edge_gebc_m1_dict = {}
    edge_node_count_dict = {}

    start_time = time.time()
    total_edges = len(all_edges)

    for idx, edge in enumerate(all_edges, start=1):
        edge_gebc_dict[edge] = {}
        edge_gebc_m1_dict[edge] = {}
        edge_node_count_dict[edge] = {}

        # 生成所有深度的子图
        subgraphs_dict = all_edge_neighborhoods_nx(
            G, edge, max_depth=max(diameter-1, 1))

        # 等间隔抽取 chunk_num 个深度进行计算
        depths = np.linspace(1, max(diameter-1, 1),
                             min(chunk_num, max(diameter-1, 1)), dtype=int)

        for depth in depths:
            depth = int(depth)
            subgraph_nx = subgraphs_dict[depth]
            subgraph_gt1, sub_vertex_map, sub_edge_map = nx_to_gt(subgraph_nx)

            # 计算GEBC
            _, bc_e_gt1 = gt.betweenness(subgraph_gt1, norm=True)
            GEBC_m1 = gebc_m1_gt(subgraph_nx, normalized=True)

            # 修改这里：尝试两种顺序的边
            edge_gebc_dict[edge][depth] = bc_e_gt1[sub_edge_map[frozenset(
                edge)]]
            edge_gebc_m1_dict[edge][depth] = GEBC_m1[frozenset(edge)]

            edge_node_count_dict[edge][depth] = subgraph_nx.number_of_nodes()

        # 打印进度
        elapsed = time.time() - start_time
        print(f"[{idx}/{total_edges}] 完成边 {edge}，已用时 {elapsed:.2f} 秒")

    total_time = time.time() - start_time
    print(f"串行计算完成，总用时: {total_time:.2f} 秒")

    # 将结果作为边属性保存到原图
    set_edge_attributes_for_multigraph(G_full, edge_gebc_dict, 'gebc_local')
    set_edge_attributes_for_multigraph(
        G_full, edge_node_count_dict, 'node_count')

    # 输出文件
    output_file = os.path.join(os.path.dirname(
        graph_file), "0-graph_with_gebc_local_serial.pkl")
    with open(output_file, 'wb') as f:
        pickle.dump(G_full, f)

    print(f"结果已保存到: {output_file}")
