import torch
import torch.nn as nn
import torch.nn.functional as F
from .utils import shift_right_mask_loss


class LanguageModelCrossEntropy(nn.Module):
    def __init__(self, eps=0.):
        super().__init__()
        self.eps = eps

    @shift_right_mask_loss
    def forward(self, logits, targets):
        if torch.__version__.startswith('1.10'):
            return F.cross_entropy(logits, targets, label_smoothing=self.eps)
        return F.cross_entropy(logits, targets)

class LanguageModelSparseCrossEntropy(nn.Module):
    def __init__(self, k=10):
        super().__init__()
        self.k = k

    @shift_right_mask_loss
    def forward(self, logits, targets):
        pos_loss = torch.gather(logits, -1, targets.unsqueeze(-1)).squeeze(-1)
        neg_loss = torch.topk(logits, self.k)[0].logsumexp(dim=-1)
        loss = neg_loss - pos_loss
        return loss.mean()


class CopyNll(nn.Module):
    def __init__(self):
        super().__init__()

    @shift_right_mask_loss
    def forward(self, logits, targets):
        return F.nll_loss(logits.log(), targets)
