import torch
import numpy as np
import torch.nn as nn
import random

def loss_label_smoothing(outputs, labels):
    """
    loss function for label smoothing regularization
    """
    alpha = 0.1
    N = outputs.size(0)  # batch_size
    C = outputs.size(1)  # number of classes
    smoothed_labels = torch.full(size=(N, C), fill_value= alpha / (C - 1)).cuda()
    smoothed_labels.scatter_(dim=1, index=torch.unsqueeze(labels, dim=1), value=1-alpha)

    log_prob = torch.nn.functional.log_softmax(outputs, dim=1)
    loss = -torch.sum(log_prob * smoothed_labels) / N

    return loss




if __name__ == '__main__':
    print(torch.cuda.is_available())
    batch = []
    labels = []
    for i in range(128):
        batch.append(np.random.normal(10, 3, 10))

        label = random.randint(0,9)

        labels.append(label)

    batch = torch.tensor(batch)
    labels = torch.tensor(labels)
    loss_label_smoothing(batch, labels)