import os
import joblib
import numpy as np

import paddle
import paddle.nn as nn
from paddle.optimizer import Adam

import utils
import Evaluation
from Discriminator import Discriminator
from Generator import Generator


class Config():
    data_path = '../data/graph.csv'  # 数据文件
    data_web_path = 'http://mirror.coggle.club/dataset/graph-wiki/graph.txt.zip'  # 数据文件网络版
    embed_path = '../data/node2vec_emb.pkl'  # embdding文件
    app = 'link_prediction'  # 检验方法
    modes = ["gen", "dis"]  # 模型
    result_filename = '../result/result.txt'  # 结果
    # 批数据大小
    batch_size = 256
    # 学习率
    lr_dis = 1e-3
    lr_gen = 1e-3
    # 迭代次数
    epochs = 10
    dis_epochs = 30
    gen_epochs = 30
    # 校验步骤
    dis_interval = dis_epochs
    gen_interval = gen_epochs
    # 更新步骤
    update_ratio = 1
    # l2 权重
    lambda_dis = 1e-5
    lambda_gen = 1e-5
    # 窗大小
    window_size = 2


def sampling(root, tree, sample_num, for_d, all_score):
    """ sample nodes from BFS-tree

    Args:
        root: int, root node
        tree: dict, BFS-tree
        sample_num: the number of required samples
        for_d: bool, whether the samples are used for the generator or the discriminator
    Returns:
        samples: list, the indices of the sampled nodes
        paths: list, paths from the root to the sampled nodes
    """
    tmp_all_score = all_score.clone().numpy()
    samples = []
    paths = []
    n = 0
    while len(samples) < sample_num:
        current_node = root
        previous_node = -1
        paths.append([])
        is_root = True
        paths[n].append(current_node)

        while True:
            node_neighbor = tree[current_node][1:] if is_root else tree[current_node]
            is_root = False
            if len(node_neighbor) == 0:  # the tree only has a root
                return None, None
            if for_d:  # skip 1-hop nodes (positive samples)
                if node_neighbor == [root]:
                    # in current version, None is returned for simplicity
                    return None, None
                if root in node_neighbor:
                    node_neighbor.remove(root)

            relevance_probability = tmp_all_score[current_node, node_neighbor]
            relevance_probability = utils.softmax(relevance_probability)
            next_node = np.random.choice(node_neighbor, size=1, p=relevance_probability)[0]  # select next node
            paths[n].append(next_node)
            if next_node == previous_node:  # terminating condition
                samples.append(current_node)
                break

            previous_node = current_node
            current_node = next_node

        n = n + 1

    return samples, paths


def prepare_data_for_d(root_nodes, graph, trees, all_score):
    """generate positive and negative samples for the discriminator, and record them in the txt file"""

    center_nodes = []
    neighbor_nodes = []
    labels = []
    for i in root_nodes:
        if np.random.rand() < config.update_ratio:
            pos = graph[i]
            neg, _ = sampling(i, trees[i], len(pos), for_d=True, all_score=all_score)
            if len(pos) != 0 and neg is not None:
                # positive samples
                center_nodes.extend([i] * len(pos))
                neighbor_nodes.extend(pos)
                labels.extend([1] * len(pos))

                # negative samples
                center_nodes.extend([i] * len(pos))
                neighbor_nodes.extend(neg)
                labels.extend([0] * len(neg))

    return center_nodes, neighbor_nodes, labels


def get_node_pairs_from_path(path):
    """
    given a path from root to a sampled node, generate all the node pairs within the given windows size
    e.g., path = [1, 0, 2, 4, 2], window_size = 2 -->
    node pairs= [[1, 0], [1, 2], [0, 1], [0, 2], [0, 4], [2, 1], [2, 0], [2, 4], [4, 0], [4, 2]]
    :param path: a path from root to the sampled node
    :return pairs: a list of node pairs
    """

    path = path[:-1]
    pairs = []
    for i in range(len(path)):
        center_node = path[i]

        for j in range(max(i - config.window_size, 0), min(i + config.window_size + 1, len(path))):
            if i == j:
                continue
            node = path[j]
            pairs.append([center_node, node])

    return pairs


def prepare_data_for_g(discriminator, root_nodes, trees, all_score, config):
    """sample nodes for the generator"""

    paths = []

    for i in root_nodes:
        if np.random.rand() < config.update_ratio:
            sample, paths_from_i = sampling(i, trees[i], config.gen_epochs, for_d=False, all_score=all_score)
            if paths_from_i is not None:
                paths.extend(paths_from_i)

    node_pairs = list(map(get_node_pairs_from_path, paths))
    node_1 = []
    node_2 = []
    for i in range(len(node_pairs)):
        for pair in node_pairs[i]:
            node_1.append(pair[0])
            node_2.append(pair[1])

    score, _, _, _ = discriminator(np.array(node_1), np.array(node_2))
    reward = discriminator.get_reward(score)

    return node_1, node_2, reward


def evaluation(n_node):
    '''
    校验
    :param n_node:
    :return:
    '''
    results = []
    if config.app == "link_prediction":
        for i in range(2):
            lpe = Evaluation.LinkPredictEval(config.embed_path, config.data_path)
            result = lpe.eval_link_prediction()
            results.append(config.modes[i] + ":" + str(result) + "\n")

    with open(config.result_filename, mode="a+") as f:
        f.writelines(results)


def train(generator, discriminator, n_node, graph, root_nodes, trees, config: Config):
    '''
    训练网络
    :param generator: 生成器
    :param discriminator: 鉴别器
    :param n_node: 节点数
    :param graph: 图
    :param root_nodes: 根节点
    :param trees: bfs树
    :return:
    '''
    print('初始化训练工具')
    # 保存生成器和判别器的参数
    utils.write_embeddings_to_file(generator, discriminator, n_node)
    # 首次校验
    evaluation(n_node)

    # 初始化分数和loss
    all_score = paddle.matmul(generator.embedding_matrix, generator.embedding_matrix.t()) + generator.bias_vector
    criterion1 = nn.BCEWithLogitsLoss()
    # 初始化校验器
    d_optimizer = Adam(parameters=discriminator.parameters(), learning_rate=config.lr_dis)
    g_optimizer = Adam(parameters=generator.parameters(), learning_rate=config.lr_gen)
    print('开始训练')
    for epoch in range(config.epochs):
        print('--------epoch {}--------'.format(epoch))

        # --------------- 判别器 --------------- #
        center_nodes = []
        neighbor_nodes = []
        labels = []
        for d_epoch in range(config.dis_epochs):
            # 每一争论都生成新的点
            if d_epoch % config.dis_interval == 0:
                center_nodes, neighbor_nodes, labels = prepare_data_for_d(root_nodes, graph, trees, all_score)
            # 训练
            train_size = len(center_nodes)
            start_list = list(range(0, train_size, config.batch_size))
            np.random.shuffle(start_list)
            for start in start_list:
                end = start + config.batch_size

                score, node_embedding, node_neighbor_embedding, bias = discriminator(np.array(center_nodes[start:end]),
                                                                                     np.array(
                                                                                         neighbor_nodes[start:end]))
                label = np.array(labels[start:end])
                label = paddle.to_tensor(label, dtype='float32')
                d_loss = paddle.sum(criterion1(score, label)) + config.lambda_dis * (
                        paddle.sum(node_embedding ** 2) / 2 +
                        paddle.sum(node_neighbor_embedding ** 2) / 2 + paddle.sum(bias ** 2) / 2)

                d_loss.backward()
                d_optimizer.step()
                d_optimizer.clear_grad()
            print('[Total Epoch {}/{}] [D Epoch {}/{}] [D Loss: {}]'.format(epoch, config.epochs, d_epoch,
                                                                            config.dis_epochs, d_loss.item()))

            if d_epoch == config.dis_epochs - 1:
                print('Discrimination finished(Epoch {}).'.format(epoch))
        # ------------------------------------- #

        # --------------- 生成器 --------------- #
        node_1 = []
        node_2 = []
        reward = []
        for g_epoch in range(config.gen_epochs):
            all_score = generator.get_all_score()
            if g_epoch % config.gen_interval == 0:
                node_1, node_2, reward = prepare_data_for_g(discriminator, root_nodes, trees, all_score, config)
                reward = reward.detach()  # Prevent the gradient flowing in the discriminator

            # 训练
            train_size = len(node_1)
            start_list = list(range(0, train_size, config.batch_size))
            np.random.shuffle(start_list)
            for start in start_list:
                end = start + config.batch_size

                node_embedding, node_neighbor_embedding, prob = generator(np.array(node_1[start:end]),
                                                                          np.array(node_2[start:end]))
                reward_p = reward[start:end]

                g_loss = -paddle.mean(paddle.log(prob) * reward_p) + config.lambda_gen * (
                        paddle.sum(node_embedding ** 2) / 2 +
                        paddle.sum(node_neighbor_embedding ** 2) / 2)

                g_loss.backward()
                g_optimizer.step()
                g_optimizer.clear_grad()

            print('[Total Epoch {}/{}] [G Epoch {}/{}] [G Loss: {}]'.format(epoch, config.epochs, g_epoch,
                                                                            config.gen_epochs, g_loss.item()))
            if g_epoch == config.gen_epochs - 1:
                print('Generation finished (Epoch {}).'.format(epoch))

        # ------------------------------------- #
        # 更新embed
        utils.write_embeddings_to_file(generator, discriminator, n_node)
        evaluation(n_node)
    print('训练完成')


if __name__ == '__main__':
    # 配置
    config = Config()
    # 设置驱动
    print('设置训练驱动')
    if paddle.is_compiled_with_cuda() is True:
        paddle.device.set_device("gpu")
    else:
        paddle.device.set_device("cpu")
    # 读取图数据
    print('读取图数据')
    n_node, graph = utils.read_graph_data(config.data_path)
    root_nodes = [i for i in range(n_node)]
    # 初始化embedding
    node_embed_init_d = utils.read_graph_embedding(config.embed_path)  # 判别器
    node_embed_init_g = utils.read_graph_embedding(config.embed_path)  # 生成器

    # 生成BFS-tree
    trees = utils.construct_trees(root_nodes, graph)

    # 初始化网络
    discriminator = Discriminator(n_node, node_embed_init_d)
    generator = Generator(n_node, node_embed_init_g)

    # 训练网络
    train(generator, discriminator, n_node, graph, root_nodes, trees, config)
