import torch
import numpy as np
import draw
import pandas as pd
from sentence_transformers import SentenceTransformer


def calc_importance_factor(r, d=0.95):
    """calculate structural importance factor R from graph r"""
    n = len(r)
    R = np.array([[1 / n] * n])
    M = np.zeros((n, n))
    for i in range(n):
        s = np.zeros(1)
        for j in range(n):
            s += np.exp(3 * r[i][j])
        for j in range(n):
            M[j][i] = np.exp(3 * r[i][j]) / s

    # print(M, R.T, M.dot(R.T))
    gap = []
    for i in range(100):
        R_ = R
        R = d * (M @ R.T).T + (1 - d) / n
        gap.append(np.sqrt(((R - R_) * (R - R_)).sum()))
        # if i % 10 == 0:
            # print(R)

    print(list(R[0]))
    print(R.sum())
    print((-R).argsort()[0] + 1)
    return R, gap


model = SentenceTransformer('multi-qa-MiniLM-L6-cos-v1', device='cuda:0')

# get embeddings of every goal
n = 17
v = []
all_v = []
lb_v = []
to_draw = [5, 7, 14]
for i in range(n):
    print(i)
    with open('./data-grabber/static-data/description/' + str(i + 1) + '.txt', "r", encoding='utf-8') as g1:
        cnt = 0
        embeddings = torch.zeros(384, device=torch.device('cuda:0'))
        for line in g1:
            embedding = model.encode(line, convert_to_tensor=True)
            embeddings += embedding
            all_v.append(embedding.detach().cpu().numpy())
            lb_v.append(i + 1)
            cnt += 1
        embeddings /= cnt
        embeddings /= embeddings.norm(p=2)
        v.append(embeddings)
all_v = np.array(all_v)
lb_v = np.array(lb_v)
draw.draw_sentences(all_v, lb_v, "pictures/sentences.png")
draw.draw_sentences_single(all_v, lb_v, to_draw, "pictures/sentences1.png")
# calculate the Cosine correlation coefficient r
r = np.zeros((n, n))
for i in range(n):
    for j in range(n):
        r[i][j] = (v[i] * v[j]).sum().item()

csv_file = pd.read_csv("data-grabber/csv_results/relation.csv")
csv_value = np.float32(csv_file.values[:, 1:])
# print(r)
# print(csv_value * r)
r = csv_value * r

draw.draw_graph(r, None, 'pictures/graph.png', use_node_values=False)

if __name__ == '__main__':
    done = [0, 10]
    r_pi = np.zeros((n, n))
    for i in range(n):
        for j in range(n):
            if i not in done and j not in done:
                r_pi[i][j] = r[i][j]
            else:
                r_pi[i][j] = -100

    # r_pi = r
    R, _ = calc_importance_factor(r_pi)

    """draw heatmap tor r_pi"""

    import matplotlib.pyplot as plt

    y_labels = [f"SDG{i}" for i in range(1, len(r_pi) + 1)]
    x_labels = [f"SDG{i}" for i in range(1, len(r_pi) + 1)]
    draw.draw_heatmap(np.array(r_pi), x_labels,
                      y_labels, "pictures/heatmap.png")

    data = [[R[0][i], 0] for i in range(len(R[0]))]
    dim = len(data[0])
    w = 0.75
    dimw = w / dim

    fig, ax = plt.subplots(figsize=(20, 20))
    x = np.arange(len(data))
    for i in range(len(data[0])):
        y = [d[i] for d in data]
        b = ax.bar(x + i * dimw, y, dimw, bottom=0.001)

    ax.set_xticks(x + dimw / 2)
    ax.set_xticklabels(['SDG' + str(i + 1) for i in x])

    ax.set_xlabel('Goal')
    ax.set_ylabel('Importance')
    plt.savefig("pictures/goal_importance.png")
    """
    l = []
    R, gap = calc_importance_factor(r_pi, d=0.75)
    l.append(gap)
    R, gap = calc_importance_factor(r_pi, d=0.8)
    l.append(gap)
    R, gap = calc_importance_factor(r_pi, d=0.85)
    l.append(gap)
    R, gap = calc_importance_factor(r_pi, d=0.9)
    l.append(gap)
    R, gap = calc_importance_factor(r_pi, d=0.95)
    l.append(gap)
    print(l)

    fig, ax = plt.subplots()
    ax.plot(range(100), l[0], label='d=0.75')
    ax.plot(range(100), l[1], label='d=0.80')
    ax.plot(range(100), l[2], label='d=0.85')
    ax.plot(range(100), l[3], label='d=0.90')
    ax.plot(range(100), l[4], label='d=0.95')
    ax.set_xlabel('Iterations')
    ax.set_ylabel('L2 distance')
    ax.legend()
    plt.show()
    """