import torch
import torch.nn as nn
import numpy as np

from torchcrf import CRF


class XModel(nn.Module):
    def __init__(self, config):
        super().__init__()
        self.config = config
        self.encoder = nn.Embedding(config["vocab_size"]+1, config["hidden_size"])
        self.lstm = nn.LSTM(config["hidden_size"], config["hidden_size"], num_layers=config.get("num_layers",1), bidirectional=True, batch_first=True)
        self.dropout = nn.Dropout(config["hidden_dropout_prob"])
        self.classifier = nn.Linear(config["hidden_size"] * 2 , config["num_labels"])
        self.loss_fct = nn.CrossEntropyLoss()

        self.crf_layer = CRF(config["num_labels"], batch_first=True)
        self.use_crf = config.get("use_crf", False)


    def forward(self, input_ids, y_true=None):
        x = self.encoder(input_ids) # [batch_size, seq_len, hidden_size]
        x, _ = self.lstm(x) # [batch_size, seq_len, hidden_size * 2]
        x = self.dropout(x) # [batch_size, seq_len, hidden_size * 2]
        x = self.classifier(x) # [batch_size, seq_len, num_labels]
        if y_true is not None:
            if self.use_crf:
                mask = y_true.gt(-1) 
                loss = -self.crf_layer(x, y_true,mask=mask,reduction="mean")
            else:
                loss = self.loss_fct(x.view(-1, self.config["num_labels"]), y_true.view(-1))
            return loss
        else:
            if self.use_crf:
                # mask = y_true.gt(-1)
                x = self.crf_layer.decode(x)
                return x
            else:
                return x

if __name__ == '__main__':
    config = {
        "vocab_size": 4621,
        "num_layers": 1,
        "hidden_size": 128,
        "hidden_dropout_prob": 0.01,
        "num_labels": 9
    }

    model = XModel(config)
    x = torch.LongTensor([[1,2,3], [4,5,6]])
    print(model.forward(x))