import paddle
import paddle.nn as nn
import paddle.nn.functional as F
from .utils import shift_right_mask_loss


class LanguageModelCrossEntropy(nn.Layer):
    def __init__(self, eps=0.):
        super().__init__()
        self.eps = eps

    @shift_right_mask_loss
    def forward(self, logits, targets):
        if self.eps == 0:
            return F.cross_entropy(logits, targets)
        else:
            one_hot_target = F.one_hot(targets, logits.shape[-1])
            soft_target = F.label_smooth(one_hot_target, epsilon=self.eps)
            return (F.log_softmax(logits, -1) * soft_target).sum(-1).mean()


class CopyNll(nn.Layer):
    def __init__(self, eps=0.):
        super().__init__()
        self.eps = eps

    @shift_right_mask_loss
    def forward(self, logits, targets):
        if self.eps == 0:
            return F.nll_loss(paddle.log(logits), targets)
        one_hot_target = F.one_hot(targets, logits.shape[-1])
        soft_target = F.label_smooth(one_hot_target, epsilon=self.eps)
        return (paddle.log(logits) * soft_target).sum(-1).mean()
