import torch
from torch import nn

from P03_NER.LSTM_CRF.utils.data_loader import *

config = Config()

class NERLSTM(nn.Module):
    def __init__(self, embedding_dim, hidden_dim, dropout, word2id, tag2id):
        super(NERLSTM, self).__init__()
        self.name = 'BiLSTM'
        self.embedding_dim = embedding_dim
        self.hidden_dim = hidden_dim
        self.vocab_size = len(word2id)
        self.tag_size = len(tag2id)

        self.word_embeds = nn.Embedding(self.vocab_size, self.embedding_dim)
        self.dropout = nn.Dropout(p=dropout)
        self.bi_lstm = nn.LSTM(input_size=self.embedding_dim,
                               hidden_size=self.hidden_dim // 2,
                               bidirectional=True,
                               batch_first=True)
        self.hidden2tag = nn.Linear(in_features=hidden_dim, out_features=self.tag_size)

    def forward(self, input, mask):
        embedding = self.word_embeds(input)
        outputs, _ = self.bi_lstm(embedding)
        outputs = outputs * mask.unsqueeze(-1)
        outputs = self.dropout(outputs)
        outputs = self.hidden2tag(outputs)
        return outputs

def testNerLstm():
    ner_lstm = NERLSTM(config.embedding_dim, config.hidden_dim, config.dropout, word2index, config.tag2id)
    print(f'ner_lstm-->{ner_lstm}')
    train_dataloader, dev_dataloader = get_data()
    for input_ids_padded, labels_padded, attention_mask in train_dataloader:
        result = ner_lstm(input_ids_padded, attention_mask)
        print(f'result-->{result.shape}')
        exit()

if __name__ == '__main__':
    testNerLstm()
