from transformers import Seq2SeqTrainer
import torch.nn as nn

seq_criterion = nn.NLLLoss()
# seq_criterion = nn.CrossEntropyLoss()


class MyTrainer(Seq2SeqTrainer):
    def __init__(self, *args, **kwargs):
        super(MyTrainer, self).__init__(*args, **kwargs)
        self.seq_criterion = nn.NLLLoss()

    def compute_loss(self, model, inputs, return_outputs=False):
        labels = inputs.get("labels")
        outputs = model(**inputs)
        logits = outputs.get("logits")

        probs = nn.functional.log_softmax(logits, dim=-1)

        loss = seq_criterion(
            probs.view(-1, probs.size(-1)),
            labels.view(-1)
        )

        return (loss, outputs) if return_outputs else loss
