import torch
import numpy as np
import torch.nn as nn
import torch.nn.functional as F
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
import torch.optim as optim

import sys
sys.path.append(".")
from utils import EntityDictionary, WordDictionary

class ModelArgs:
    embedding_dim = 256
    hidden_dim = 200


class BiLSTM(nn.Module):
    def __init__(self, d1:WordDictionary, d2:EntityDictionary):
        super(BiLSTM, self).__init__()

        self.embedding_layer = nn.Embedding(len(d1), ModelArgs.embedding_dim)
        self.bilstm = nn.LSTM(ModelArgs.embedding_dim, ModelArgs.hidden_dim, batch_first=True, num_layers=3, bidirectional=True, dropout=0.3)
        self.score_layer = nn.Linear(2*ModelArgs.hidden_dim, len(d2))

    def forward(self, padded, length):
        embedding = self.embedding_layer(padded)
        packed = pack_padded_sequence(embedding, length, batch_first=True, enforce_sorted=False)
        bout, _ = self.bilstm(packed)
        bout, _ = pad_packed_sequence(bout, batch_first=True)
        score = self.score_layer(bout)

        return score
