"""Softmax Cross Entropy with Logits"""

import mindspore.nn as nn
from mindspore.nn.loss.loss import _Loss

class SoftmaxCrossEntropyWithLogits(_Loss):
    """Softmax Cross Entropy with Logits"""
    def __init__(self, sparse=True, reduction='mean'):
        super(SoftmaxCrossEntropyWithLogits, self).__init__()
        self.sparse = sparse
        self.reduction = reduction
        self.loss = nn.SoftmaxCrossEntropyWithLogits(sparse=self.sparse, reduction=self.reduction)

    def construct(self, data, label):
        loss = self.loss(data, label)
        return loss
