from scipy.stats import pointbiserialr
import os
import pickle
from collections import defaultdict
import numpy as np
import matplotlib.pyplot as plt
from scipy.stats import pearsonr
from concurrent.futures import ProcessPoolExecutor, as_completed

import GraphTools.network_method as nm
import GraphTools.graph_method as gm
import GraphTools.frame_series_method as fm
import GraphTools.basic_method as bm
import GraphTools.analysis as am

import os
import pickle
from collections import defaultdict
import numpy as np
import matplotlib.pyplot as plt
from scipy.stats import pearsonr
from concurrent.futures import ProcessPoolExecutor, as_completed


# 全局变量:在子进程中用于加载初始图的路径x
INITIAL_GRAPH = None


def _init_worker(initial_graph_path):
    """
    子进程初始化函数:设置全局 INITIAL_GRAPH
    """
    global INITIAL_GRAPH
    with open(initial_graph_path, 'rb') as f:
        INITIAL_GRAPH = pickle.load(f)


def _process_mission(mission_dir):
    """
    子进程执行函数:
    - 从 INITIAL_GRAPH 获取初始图
    - 从 mission_dir 加载末态图
    - 返回断裂键列表
    """
    # 加载当前任务的末态图
    pkl_path = os.path.join(mission_dir, 'frame_series.pkl')
    with open(pkl_path, 'rb') as f:
        series = pickle.load(f)
    t_last = sorted(series.keys())[-1]
    last_graph = series[t_last]['graph']

    # 计算并返回断裂键列表
    return fm.collect_broken_bonds(INITIAL_GRAPH, last_graph)


def analyze_crosslink_folder_parallel(crosslink_path, max_workers=10, plot_flag=False):
    """
    并行版本:对 crosslink_path 下所有任务并行计算键断裂概率分布和 GEBC 相关性.
    结果保存在 crosslink_path/results 中.
    """
    # 1. 找到所有任务文件夹
    mission_dirs = bm.find_lammps_files(crosslink_path)
    max_workers = len(mission_dirs) if max_workers is None else max_workers

    if not mission_dirs:
        print(f"No missions found in {crosslink_path}")
        return

    # 2. 从graph_series目录读取初始图
    initial_graph_path = os.path.join(
        mission_dirs[0], 'graph_series', '0-graph.pkl')
    with open(initial_graph_path, 'rb') as f:
        initial_graph = pickle.load(f)

    # 3. 并行执行子任务,收集断裂列表
    broken_list = []
    with ProcessPoolExecutor(
        max_workers=max_workers,
        initializer=_init_worker,
        initargs=(initial_graph_path,)
    ) as executor:
        futures = [executor.submit(_process_mission, md)
                   for md in mission_dirs]
        for fut in as_completed(futures):
            broken_list.append(fut.result())

    # 4. 汇总断裂概率
    key_broken_count = defaultdict(int)
    total = len(broken_list)
    for bl in broken_list:
        for bond in bl:
            key_broken_count[bond] += 1

    broken_prob = {
        (u, v, k): key_broken_count.get((u, v, k), 0) / total
        for u, v, k in initial_graph.edges(keys=True)
    }

    # 5. 准备结果目录
    results_dir = os.path.join(crosslink_path, 'results')
    os.makedirs(results_dir, exist_ok=True)

    # 6. 绘制断裂概率分布条形图
    if plot_flag:
        probs = list(broken_prob.values())
        unique_probs = np.arange(0, 1.1, 0.1)
        counts = [probs.count(round(p, 1)) for p in unique_probs]

        plt.figure()
        x = np.arange(len(unique_probs))
        plt.bar(x, counts, width=0.6, edgecolor='black', alpha=0.7)
        plt.xticks(x, [f"{p:.1f}" for p in unique_probs])
        plt.title('Distribution of Bond Breaking Probabilities')
        plt.xlabel('Breakage Probability')
        plt.ylabel('Counts')
        plt.grid(True, axis='y', linestyle='--', alpha=0.7)
        plt.savefig(os.path.join(results_dir, 'bond_broken.png'))
        plt.close()

    # 7. 提取 GEBC, GEBC_m1, GEBC_m2 并计算皮尔逊相关性
    GEBC = {}
    GEBC_m1 = {}
    GEBC_m2 = {}
    for u, v, k, data in initial_graph.edges(keys=True, data=True):
        eid = (u, v, k)
        GEBC[eid] = data.get('GEBC', 0.0)
        GEBC_m1[eid] = data.get('GEBC_m1', 0.0)
        GEBC_m2[eid] = data.get('GEBC_m2', 0.0)

    edges = list(broken_prob.keys())
    prob_vals = [broken_prob[e] for e in edges]
    gebc_vals = [GEBC[e] for e in edges]
    gebc_m1_vals = [GEBC_m1[e] for e in edges]
    gebc_m2_vals = [GEBC_m2[e] for e in edges]

    corr_gebc,    p_gebc = pearsonr(gebc_vals,    prob_vals)
    corr_gebc_m1, p_gebc_m1 = pearsonr(gebc_m1_vals, prob_vals)
    corr_gebc_m2, p_gebc_m2 = pearsonr(gebc_m2_vals, prob_vals)

    # 8. 写入相关性结果
    txt_path = os.path.join(results_dir, 'pearson_correlation_result.txt')
    with open(txt_path, 'w') as f:
        f.write(f"Pearson correlation (GEBC): {corr_gebc}\n")
        f.write(f"P-value (GEBC):            {p_gebc}\n")
        f.write(f"Pearson correlation (GEBC_m1): {corr_gebc_m1}\n")
        f.write(f"P-value (GEBC_m1):             {p_gebc_m1}\n")
        f.write(f"Pearson correlation (GEBC_m2): {corr_gebc_m2}\n")
        f.write(f"P-value (GEBC_m2):             {p_gebc_m2}\n")

    print(f"Results saved in {results_dir}")


def plot_gebc_and_bb_diff(x_strain_label, broken_nums,
                          correlations_GEBC_time, correlations_GEBC_m1_time,
                          save_path='./', pic_name=''):
    """
    Plot GEBC correlations and broken bonds differential in two subplots.

    Parameters:
    - x_strain_label: array-like, common x-axis data
    - broken_nums: array-like, broken bonds data
    - correlations_GEBC_time: array-like, first correlation series
    - correlations_GEBC_m1_time: array-like, second correlation series
    """
    import matplotlib.pyplot as plt
    import numpy as np

    # Create figure with two subplots
    fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(10, 12), sharex=True)

    # Calculate differential of broken bonds
    diffs = np.diff(broken_nums, prepend=broken_nums[0])

    # Upper subplot: GEBC correlations
    ax1.plot(x_strain_label, correlations_GEBC_time,
             label='GEBC Time Correlation',
             color=(0.1216, 0.4667, 0.7059),  # Default blue
             marker='.')
    ax1.plot(x_strain_label, correlations_GEBC_m1_time,
             label='GEBC m1 Time Correlation',
             color=(1.0, 0.4980, 0.0549),  # Default orange
             marker='.')
    ax1.axhline(0, color='red', linestyle='-.', linewidth=1)
    ax1.set_ylabel('GEBC Correlations', color='black')
    ax1.tick_params(axis='y', labelcolor='black')
    ax1.legend(loc='best')
    ax1.set_title('GEBC Correlations')
    ax1.grid(True)

    # Lower subplot: Broken bonds differential
    ax2.plot(x_strain_label, diffs,
             label='Broken Bonds Differential',
             color='red',
             marker='.')
    ax2.set_xlabel('Strain')
    ax2.set_ylabel('Broken Bonds Differential', color='red')
    ax2.tick_params(axis='y', labelcolor='red')
    ax2.legend(loc='best')
    ax2.set_title('Broken Bonds Differential')
    ax2.grid(True)

    plt.tight_layout()
    plt.savefig(save_path)


def analyze_gebc_broken_corr(mission_dir: str) -> None:
    """
    Analyze point-biserial correlation between broken bonds and GEBC values
    over time for a mission directory containing frame series data.

    Steps:
    1. Load frame_series.pkl, which maps timestamps to {'graph': MultiGraph, ...}.
    2. Sort timestamps and extract the initial graph to compute reference box size.
    3. Compute x-strain for each frame relative to the initial state using compute_box_strain_rate.
    4. Collect broken bonds between consecutive frames.
    5. For each frame, compute:
       - number of broken bonds
       - point-biserial correlation and p-value between broken status and GEBC/GEBC_m1
    6. Plot and save both raw and differential correlation graphs.
    7. Serialize results (strains, counts, correlations, p-values) to a pickle file.

    Args:
        mission_dir: Path to the mission folder containing frame_series.pkl.
    """
    # 1. Load time series of graphs
    series_path = os.path.join(mission_dir, 'frame_series.pkl')
    with open(series_path, 'rb') as f:
        graph_series = pickle.load(f)

    # 2. Sort timestamps and get initial graph
    times = sorted(graph_series.keys())
    initial = graph_series[times[0]]['graph']

    # 3. Compute x-strains: zero for t0, then (Lx(t)-Lx0)/Lx0
    strains = [0.0]
    for t in times[1:]:
        current = graph_series[t]['graph']
        eps = bm.compute_box_strain_rate(initial, current)
        strains.append(eps)

    # 4. Track broken bonds per timestamp
    broken = {}
    for prev_t, curr_t in zip(times, times[1:]):
        G0 = graph_series[prev_t]['graph']
        G1 = graph_series[curr_t]['graph']
        broken[curr_t] = fm.collect_broken_bonds(G0, G1)

    # 5. Prepare containers for analysis
    x_vals, b_nums = [], []
    corr0, corr1 = [], []  # GEBC, GEBC_m1 correlations
    p0, p1 = [], []         # corresponding p-values

    # 5a. Precompute broken bond counts vs strain from plotting helper
    two_x, all_counts = fm.plot_broken_bonds(graph_series, save_flag=False)
    strain_bins, count_bins = two_x[1], all_counts

    # 5b. Loop frames, skipping initial
    for idx, t in enumerate(times[1:], start=1):
        bc_set = broken.get(t)
        if not bc_set:
            continue

        eps = strains[idx]
        # Map strain to total broken count
        j = strain_bins.index(eps)
        total_broken = count_bins[j]

        x_vals.append(eps)
        b_nums.append(total_broken)

        # Use previous graph to get GEBC values and labels
        G_prev = graph_series[times[idx-1]]['graph']
        edges = list(G_prev.edges(keys=True))
        labels = [1 if e in bc_set else 0 for e in edges]
        gebc = [G_prev.edges[u, v, k].get('GEBC', 0) for u, v, k in edges]
        gebc_m1 = [G_prev.edges[u, v, k].get(
            'GEBC_m1', 0) for u, v, k in edges]

        # Compute point-biserial correlation
        r0 = pointbiserialr(labels, gebc)
        r1 = pointbiserialr(labels, gebc_m1)

        corr0.append(r0.correlation)
        p0.append(r0.pvalue)
        corr1.append(r1.correlation)
        p1.append(r1.pvalue)

    # 6. Create output directory
    out = os.path.join(mission_dir, 'analysis_correlations')
    os.makedirs(out, exist_ok=True)

    # 7. Plot and save correlations with broken bonds differential
    base = 'GEBCs-Broken_Chains_Correlation_overTime'
    plot_gebc_and_bb_diff(x_vals, b_nums, corr0, corr1,
                          save_path=os.path.join(out, base + '.png'),
                          pic_name=base)

    # 8. Save results to pickle
    results = {
        'strains': strains,
        'broken_nums': b_nums,
        'corr_GEBC': corr0,
        'corr_GEBC_m1': corr1,
        'p_GEBC': p0,
        'p_GEBC_m1': p1,
    }
    with open(os.path.join(out, base + '.pkl'), 'wb') as f:
        pickle.dump(results, f)

    print(f'Analysis complete: {out}')
