import torch.nn as nn
import torch.nn.functional as F
import torch
class CELoss(nn.Module):
    def __init__(self,label_smooth = None,class_num = 518):
        super(CELoss, self).__init__()
        self.label_smooth = label_smooth
        self.class_num = class_num

    def forward(self,pred,target):
        eps = 1e-12
        if self.label_smooth is not None:
            logprobs = F.log_softmax(pred,dim = 1)
            target = F.one_hot(target,self.class_num)
            #label smooth
            target = torch.clamp(target.float(),min = self.label_smooth/(self.class_num-1),max = 1.0 -self.label_smooth)
            loss = -1*torch.sum(target*logprobs,1)
        else:
            loss = -1. * pred.gather(1,target.unsqueeze(-1))+torch.log(torch.exp(pred+eps).sum(dim =1))
        return loss.mean()


class SoftTargetCrossEntropy(nn.Module):

    def __init__(self):
        super(SoftTargetCrossEntropy, self).__init__()

    def forward(self, x, target):
        loss = torch.sum(-target * F.log_softmax(x, dim=-1), dim=-1)
        return loss.mean()

if __name__=="__main__":
    loss = CELoss(label_smooth=0.05,class_num=3)
    x = torch.tensor([[1,8,1],[1,1,8]],dtype = torch.float)
    y = torch.tensor([1,2])
    logprobs = F.log_softmax(x, dim=1)
    target = F.one_hot(y, 3)
    print(x.gather(1,y.unsqueeze(-1)))