"""NLLLoss"""

from mindspore.nn.loss.loss import LossBase
from mindspore import ops
from mindspore.ops import functional as F

__all__ = ['NLLLoss']


class NLLLoss(LossBase):
    """NLLLoss"""

    def __init__(self, reduction="mean"):
        super(NLLLoss, self).__init__(reduction)
        self.one_hot = ops.OneHot()
        self.reduce_sum = ops.ReduceSum()
        self.reshape = ops.Reshape()

    def construct(self, logits, labels, weights=None, method=None):
        """NLLLoss construct."""

        if method is True:
            logits = self.reshape(logits, (-1, 50))
            labels = labels.view(-1, 1).squeeze()
            label_one_hot = self.one_hot(labels, F.shape(logits)[-1], F.scalar_to_array(1.0), F.scalar_to_array(0.0))
            loss = self.reduce_sum(-1.0 * logits * label_one_hot, (1,))
            return self.get_loss(loss)

        elif weights is not None:
            """WeightedNLLLoss construct."""
            logits = ops.LogSoftmax(1)(logits)
            label_one_hot = self.one_hot(labels, F.shape(logits)[-1], F.scalar_to_array(1.0), F.scalar_to_array(0.0))
            loss = self.reduce_sum(-1.0 * logits * label_one_hot, (1,))
            return self.get_loss(loss, weights)

        else:
            """Default NLLLoss construct."""
            label_one_hot = self.one_hot(labels, F.shape(logits)[-1], F.scalar_to_array(1.0), F.scalar_to_array(0.0))
            loss = self.reduce_sum(-1.0 * logits * label_one_hot, (1,))
            return self.get_loss(loss)
