import numpy as np
import itertools
from collections import Counter
import os
import pickle
import networkx as nx
import matplotlib.pyplot as plt
from GraphTools.analysis_topology import plot_correlation_function,  average_generational_space


def simplify_maximum_subgraph(graph):
    """
    获取图的最大连通子图并简化。

    Parameters
    ----------
    graph : networkx.Graph
        输入图

    Returns
    -------
    networkx.Graph
        简化后的最大连通子图
    """
    # 获取最大连通子图
    largest_cc = max(nx.connected_components(graph), key=len)
    graph = graph.subgraph(largest_cc).copy()
    # 将 MultiGraph 转换为 Graph
    graph = nx.Graph(graph)
    return graph


def get_edge_distance(edge1, edge2, D, node_to_idx):
    """基于距离矩阵查询两条边之间的距离"""
    i = node_to_idx[edge1]
    j = node_to_idx[edge2]
    if i < j:
        i, j = j, i
    return D[i, j]


def count_pair_num_new(G, edges, dist_map, node_to_idx, max_depth=None, mass_weight_flag=None):
    """
    计算边对之间的距离分布，支持质量加权。基于距离矩阵查询。
    """
    edge_weights = {}
    if mass_weight_flag:
        for u, v in edges:
            weight = G[u][v].get(mass_weight_flag, 1.0)
            edge_weights[(u, v)] = weight
            edge_weights[(v, u)] = weight
    else:
        for u, v in edges:
            edge_weights[(u, v)] = 1.0
            edge_weights[(v, u)] = 1.0

    cnt = Counter()
    for e1, e2 in itertools.combinations(edges, 2):
        k = get_edge_distance(e1, e2, dist_map, node_to_idx)
        if max_depth is not None and k > max_depth:
            continue
        cnt[k] += edge_weights[e1]

    if max_depth is None:
        max_depth = max(cnt.keys()) if cnt else 0
    cnt_array = np.zeros(max_depth + 1)
    for k, v in cnt.items():
        if k <= max_depth:
            cnt_array[k] = v
    return cnt_array[1:]


def topological_pair_corr_distance_matrix(G, marked_edges, dist_map, node_to_idx, max_depth=None, sample_size=50,
                                          seed=None, mass_weight_flag=None):
    """
    计算质量加权的拓扑对关联函数 g_rel^(m)(k)。基于距离矩阵查询。
    """
    M = len(marked_edges)
    if M < 2:
        raise ValueError("至少需要两条被标记的边")

    cnt = count_pair_num_new(G, marked_edges, dist_map,
                             node_to_idx, max_depth, mass_weight_flag)
    S_bar = average_generational_space(
        G, max_depth=len(cnt), sample_size=sample_size, seed=seed)

    if mass_weight_flag:
        total_mass = sum(G[u][v].get(mass_weight_flag, 0)
                         for u, v in marked_edges)
        rho0_m = total_mass / (2 * G.number_of_edges())
    else:
        rho0_m = M / (2 * G.number_of_edges())
    with np.errstate(divide='ignore', invalid='ignore'):
        g_rel = np.where(S_bar > 0, cnt / (M * rho0_m * S_bar), 0)

    ks = np.arange(1, len(cnt) + 1)
    return ks, g_rel


def intial_end_GEBC_GEBC_m1_topo_PCF_distance_matrix(base_path, sample_ratio=None):
    """
    使用距离矩阵方法计算初态和末态的GEBC和GEBC_m1的PCF。
    相比原始方法，这个版本使用预计算的距离矩阵来加速边对之间距离的计算。

    Parameters
    ----------
    base_path : str
        任务的基础路径，需要包含graph_series和graph_distance子目录
    sample_ratio : float, optional
        边的抽样比例，范围(0,1]。如果为None，则使用全部边。
        例如：0.01表示使用1%的边，1.0表示使用全部边。
    """
    # 定义目录路径
    graph_path = os.path.join(base_path, 'graph_series/')

    # 获取所有文件名（不包含目录）
    files = [f for f in os.listdir(graph_path) if os.path.isfile(
        os.path.join(graph_path, f))]

    # 对文件名进行排序
    files.sort()

    # 获取排序后的第一个和最后一个文件路径
    graph_path_initial = os.path.join(graph_path, files[0]) if files else None
    graph_path_end = os.path.join(graph_path, files[-1]) if files else None

    output_dir = os.path.join(
        base_path, 'analysis_topology/corr_func_GEBC_weighted')

    # Create output directory if it doesn't exist
    os.makedirs(output_dir, exist_ok=True)

    # 加载距离矩阵和节点映射
    dist_path = os.path.join(base_path, 'graph_distance/0-distance.dat')
    nodes_path = os.path.join(base_path, 'graph_distance/0-nodes.pkl')
    with open(nodes_path, 'rb') as f:
        nodes = pickle.load(f)
    node_to_idx = {node: i for i, node in enumerate(nodes)}
    n = len(nodes)
    dist_map = np.memmap(dist_path, dtype=np.int32, mode='r', shape=(n, n))

    # 处理初始图和最终图
    if 'N0' not in base_path:
        list_init_end = [(graph_path_end, 'end')]
    else:
        list_init_end = [(graph_path_initial, 'initial'),
                         (graph_path_end, 'end')]

    for graph_path, suffix in list_init_end:
        # Load graph
        with open(graph_path, 'rb') as f:
            graph = pickle.load(f)

        # 获取最大连通子图并简化
        graph = simplify_maximum_subgraph(graph)

        # 获取所有边
        all_edges = list(graph.edges())
        n_edges = len(all_edges)

        # 根据抽样率决定是否采样
        if sample_ratio is not None and 0 < sample_ratio <= 1:
            n_sample = int(sample_ratio * n_edges)
            # 使用固定随机种子以保证结果可重复
            np.random.seed(42)
            edge_indices = np.random.choice(n_edges, n_sample, replace=False)
            sampled_edges = [all_edges[i] for i in edge_indices]
        else:
            sampled_edges = all_edges
        print(f'{suffix} calculate edges: {len(sampled_edges)}')
        # Calculate GEBC weighted correlation function using distance matrix
        ks_GEBC, g_rel_GEBC = topological_pair_corr_distance_matrix(
            graph, sampled_edges, dist_map, node_to_idx,
            sample_size=50, seed=1234, mass_weight_flag='GEBC'
        )

        # Calculate GEBC_m1 weighted correlation function using distance matrix
        ks_GEBC_m1, g_rel_GEBC_m1 = topological_pair_corr_distance_matrix(
            graph, sampled_edges, dist_map, node_to_idx,
            sample_size=50, seed=1234, mass_weight_flag='GEBC_m1'
        )

        # 在输出文件名中添加抽样信息
        sample_suffix = f"_sample{sample_ratio}" if sample_ratio is not None else "_full"

        # Plot and save results
        plot_correlation_function(
            ks_GEBC, g_rel_GEBC,
            f'GEBC Weighted Correlation Function ({suffix})',
            os.path.join(
                output_dir, f'GEBC_weighted_correlation_{suffix}_distance_matrix{sample_suffix}')
        )

        plot_correlation_function(
            ks_GEBC_m1, g_rel_GEBC_m1,
            f'GEBC_m1 Weighted Correlation Function ({suffix})',
            os.path.join(
                output_dir, f'GEBC_m1_weighted_correlation_{suffix}_distance_matrix{sample_suffix}')
        )
