import base64
from io import BytesIO
import torch
import numpy as np
import networkx as nx
from tqdm import tqdm
import matplotlib.pyplot as plt
from torch.distributions import Categorical
from torch_geometric.loader import DataLoader
from web.AttGnn.model import Att_Gnn
from web.AttGnn.util import simulate, recur, generate_data, num_of_l
from web.AttGnn.loss import weighted_loss

def run_er():
    model = torch.load("./web/AttGnn/models/Web_Gnn_Model_1.pkl")
    model.to("cpu")
    er_sim = []

    for i in tqdm(range(1, 50)):
        G = nx.erdos_renyi_graph(2000, 8e-5 * i)
        avg = G.number_of_edges() * 2 / 2000
        ground_truth = simulate(0.04, 0.08, G)
        recur_data = recur(model, G)
        er_sim.append([avg, ground_truth.item(), recur_data.item()])

    er_sim = np.array(er_sim)
    fig, ax = plt.subplots()

    ax.scatter(er_sim[..., 0], er_sim[..., 1], label="ground_truth")
    ax.scatter(er_sim[..., 0], er_sim[..., 2], label="gnn")
    ax.axvline(4, linestyle='dashed')
    ax.set_xlabel("average degree", fontsize=12)
    ax.set_ylabel("prevalence", fontsize=12)
    ax.set_title("SIS -- ER", fontsize=15)
    ax.legend()

    buffer = BytesIO()
    fig.savefig(buffer)
    img_data = buffer.getvalue()
    imb = base64.b64encode(img_data)
    ims = imb.decode()
    imd = "data:image/png;base64," + ims

    return imd

def run_ba():
    model = torch.load("./web/AttGnn/models/Web_Gnn_Model_1.pkl")
    ba_sim = []

    for i in range(1, 9):
        G = nx.barabasi_albert_graph(2000, i)
        avg = G.number_of_edges() * 2 / 2000
        ground_truth = simulate(0.04, 0.08, G)
        recur_data = recur(model, G)
        ba_sim.append([avg, ground_truth.item(), recur_data.item()])

    cluster_sim = []
    
    for i in range(1, 9):
        cluster_temp = []
        for j in range(1, 10):
            G = nx.powerlaw_cluster_graph(2000, i, j / 10)
            avg = G.number_of_edges() * 2 / 2000
            ground_truth = simulate(0.04, 0.08, G)
            recur_data = recur(model, G)
            clustering = np.mean(list(nx.clustering(G).values()))
            cluster_temp.append([clustering, ground_truth.item(), recur_data.item()])
        cluster_sim.append(cluster_temp)

    ba_sim = np.array(ba_sim)

    fig, ax = plt.subplots()

    ax.scatter(ba_sim[..., 0], ba_sim[..., 1], label="ground_truth")
    ax.scatter(ba_sim[..., 0], ba_sim[..., 2], label="gnn")
    ax.axvline(4, linestyle='dashed')
    ax.set_xlabel("average degree", fontsize=12)
    ax.set_ylabel("prevalence", fontsize=12)
    ax.set_title("SIS -- BA", fontsize=15)
    ax.legend()    
        
    buffer1 = BytesIO()
    plt.savefig(buffer1)
    img_data = buffer1.getvalue()
    imb = base64.b64encode(img_data)
    ims = imb.decode()
    imd1 = "data:image/png;base64," + ims

    cluster_sim = np.array(cluster_sim)
    fig, ax = plt.subplots(figsize=(10, 7))
    i = 1
    for item in cluster_sim:
        avg_label = "avg_deg=" + str(i * 2)
        i += 1
        ax.plot(item[..., 0], item[..., 1], label=avg_label)
        ax.scatter(item[..., 0], item[..., 2])
        ax.text(item[-1, 0]+0.02, item[-1, 2] + 0.01, avg_label)
    ax.set_xlabel("clustering coefficient", fontsize=16)
    ax.set_ylabel("prevalence", fontsize=16)
    ax.set_title("SIS -- powerlaw cluster graph", fontsize=20)
    ax.set_xlim((-0.05, 1))
    ax.set_ylim((0.1, 1))
    ax.legend()

    buffer2 = BytesIO()
    fig.savefig(buffer2)
    img_data = buffer2.getvalue()
    imb = base64.b64encode(img_data)
    ims = imb.decode()
    imd2 = "data:image/png;base64," + ims
    # fig.savefig("./images/BA_cluster.svg", format="svg")

    return [imd1, imd2]

def run_fig():
    G = nx.barabasi_albert_graph(1000, 2)
    dataset = generate_data(0.04, 0.08, G)
    batch_size = 1
    data_iter = DataLoader(dataset, batch_size=batch_size, drop_last=True)  # 批量训练

    model = Att_Gnn()

    model.train()

    optimizer = torch.optim.RAdam(
        model.parameters(), betas=(0.9, 0.999), eps=4.0e-4, lr=1e-3, weight_decay=1.0e-4
    )
    for epoch in range(10):
        loss_avg = 0
        count = 0
        for data in data_iter:
            optimizer.zero_grad()
            out = model(data)

            loss_func = weighted_loss(data.edge_index, 0.5)
            loss = loss_func(out, data)
            loss.backward()
            optimizer.step()
            loss_avg += loss
            count += 1
        loss_avg /= count
    
    torch.save(model, "./web/AttGnn/models/Web_Gnn_Model.pkl")

    trans_pro = {}

    with torch.no_grad():
        for item in dataset:
            l = num_of_l()(item)
            ltp = model(item)
            temp = dict(
                map(
                    lambda x: (
                        (x[0].to(torch.long).item(), x[1].to(torch.long).item()),
                        x[2].tolist(),
                    ),
                    zip(item.x, l, ltp),
                )
            )
            trans_pro.update(temp)

    fig, ax = plt.subplots(figsize=(7, 5))
    S_I_l = []
    S_I_p = []
    I_S_l = []
    I_S_p = []

    for i in range(60):
        if (0, i) in trans_pro:
            S_I_l.append(i)
            S_I_p.append(trans_pro[(0, i)][1])
        if (1, i) in trans_pro:
            I_S_l.append(i)
            I_S_p.append(trans_pro[(1, i)][0])

    ground_x = np.arange(0, 60)
    ground_I = 1 - (1 - 0.04) ** ground_x
    ax.plot(S_I_l, S_I_p, label="gnn_S_to_I")
    ax.plot(ground_x, ground_I, label="gound_truth")
    ax.plot(I_S_l, I_S_p, label="gnn_I_to_S")
    ax.set_xlabel("number of infected neighbors", fontsize=12)
    ax.set_ylabel("transition probability", fontsize=12)
    ax.set_title("Simple", fontsize=15)
    ax.legend()

    buffer = BytesIO()
    fig.savefig(buffer)
    img_data = buffer.getvalue()
    imb = base64.b64encode(img_data)
    ims = imb.decode()
    imd = "data:image/png;base64," + ims

    return imd