"""
@Filename       : model.py
@Create Time    : 2020/11/2 16:00
@Author         : Rylynn
@Description    : 

"""
import datetime
import pickle as pkl

import torch
import torch.nn as nn
import torch.nn.utils.rnn as rnn_utils
import torch.optim as optim
from torch.utils.data import DataLoader
from tqdm import tqdm

from evaluate.metric import run_evaluation
from util.dataloader import DiffuseSequenceDataSet, sequence_collate_fn


class VanillaLSTM(nn.Module):
    def __init__(self, config):
        super(VanillaLSTM, self).__init__()
        self.user_embed = nn.Embedding(config['node_num'] + 1, config['embed_dim'])
        # self.user_embed.weight.requires_grad = False
        self.lstm = nn.LSTM(config['embed_dim'], config['state_dim'], batch_first=True)
        self.linear = nn.Linear(config['embed_dim'], config['node_num'], bias=False)
        self.cross_entropy = nn.CrossEntropyLoss()

    def forward(self, batch_seqs, batch_seqs_length):
        out = self.encode(batch_seqs, batch_seqs_length)
        out = self.linear(out)
        return out

    def encode(self, batch_seqs, batch_seqs_length):
        batch_seqs_embed = self.user_embed(batch_seqs)
        batch_seqs_pack = rnn_utils.pack_padded_sequence(batch_seqs_embed, batch_seqs_length, batch_first=True)
        _, (out, _) = self.lstm(batch_seqs_pack)
        out = out.reshape(out.shape[1], out.shape[2])
        return out

    def query_embed(self, key):
        return self.user_embed(key)

    def loss(self, probs, true_nodes):
        return self.cross_entropy(probs, true_nodes)
