import torch

def loss_label_smoothing(outputs, labels):
    """
    loss function for label smoothing regularization
    """
    alpha = 0.1
    N = outputs.size(0)  # batch_size
    C = outputs.size(1)  # number of classes
    smoothed_labels = torch.full(size=(N, C), fill_value= alpha / (C - 1)).cuda()
    smoothed_labels.scatter_(dim=1, index=torch.unsqueeze(labels, dim=1), value=1-alpha)

    log_prob = torch.nn.functional.log_softmax(outputs, dim=1)
    loss = -torch.sum(log_prob * smoothed_labels) / N

    return loss


def loss_gauss_label_smoothing(outputs, teacher_smooth, params):
    N = outputs.size(0)  # batch_size
    log_prob = torch.nn.functional.log_softmax(outputs, dim=1)
    if params.cuda:
        log_prob = log_prob.cuda(async=True)
    loss = -torch.sum(log_prob * teacher_smooth) / N
    return loss