"""
@Filename       : inf2vec.py
@Create Time    : 2021/1/12 8:54
@Author         : Rylynn
@Description    : 

"""
import random

import torch as th
import torch.nn as nn
from torch.utils.data import DataLoader
from tqdm import tqdm

import networkx as nx

from utils.dataloader import DiffuseSequenceDataset, sequence_collate_fn
from utils.eval import Evaluator


class Inf2Vec(nn.Module):
    def __init__(self, config):
        super(Inf2Vec, self).__init__()
        self.alpha = config['alpha']
        self.negative_samples_num = config['negative_samples_num']

        self.source_embed = nn.Embedding(config['user_num'], config['embed_size'], padding_idx=0)
        self.target_embed = nn.Embedding(config['user_num'], config['embed_size'], padding_idx=0)

        self.source_bias = nn.Embedding(config['user_num'], config['embed_size'], padding_idx=0)
        self.target_bias = nn.Embedding(config['user_num'], config['embed_size'], padding_idx=0)

        nn.init.uniform_(self.source_embed.weight, -1/config['embed_size'], 1/config['embed_size'])
        nn.init.uniform_(self.target_embed.weight, -1 / config['embed_size'], 1 / config['embed_size'])

        self.sigmoid = nn.Sigmoid()

    def random_walk_restart(self, g,  p, seq_length):
        ...

    def generate_path(self, g, sequence, seq_length):
        alpha = self.alpha
        random_walk_length = seq_length * alpha
        global_sample_length = seq_length * (1 - alpha)

    def negative_sample(self, user):
        ...

    def forward(self, source, target, is_positive=True):
        source_embed = self.source_embed(source)
        target_embed = self.target_embed(target)

        sources_bias = self.source_bias(source)
        targets_bias = self.target_bias(target)

        score = th.dot(source_embed, target_embed) + sources_bias + targets_bias

        loss = th.sigmoid(score) if is_positive else th.sigmoid(-score)
        return loss


def train(root_path, dataset):
    config = {
        'user_num': 4378,
        'embed_size': 50,
        'lr': 0.01,
        'epoches': 20,
    }

    train_dataloader = DataLoader(DiffuseSequenceDataset(root_path=root_path, dataset=dataset, train=True),
                                  batch_size=1,
                                  shuffle=True,
                                  collate_fn=sequence_collate_fn)

    test_dataloader = DataLoader(DiffuseSequenceDataset(root_path=root_path, dataset=dataset, train=False),
                                 batch_size=128,
                                 num_workers=4,
                                 collate_fn=sequence_collate_fn)
    evaluator = Evaluator()

    g = nx.Graph()
    model = Inf2Vec(config)
    model = model.cuda()
    optimizer = th.optim.Adam(params=model.parameters(), lr=config['lr'])
    for epoch in range(config['epoches']):
        batch_count = 0
        epoch_loss = 0
        for sequences, _, seq_length in tqdm(train_dataloader):
            sequences = sequences[0].cuda()
            pos_instances = model.generate_path(g, sequences, seq_length)
            for user, pos in zip(sequences, pos_instances):
                loss = model(user, pos, True)
                optimizer.zero_grad()
                batch_count += 1
                epoch_loss += loss.item()
                loss.backward()
                optimizer.step()

                neg_instances = model.negative_sample(user)
                for neg in neg_instances:
                    loss = model(user, neg, False)
                    optimizer.zero_grad()
                    batch_count += 1
                    epoch_loss += loss.item()
                    loss.backward()
                    optimizer.step()

        print('Epoches {}/{}, with average loss of {}'.format(epoch + 1, config['epoches'], epoch_loss / batch_count))
        if epoch % 3 == 0:
            ...

        ...



