import torch 
import torch.nn as nn 


def my_loss(x, label):
    # 自己定义bce loss
    loss = -label * torch.log(x) - (1 - label) * torch.log(1 - x)
    return loss

def my_loss_mean(x, label) :
    loss = (-label * torch.log(x) - (1 - label) * torch.log(1 - x))
    return loss.mean()


def my_entropy_loss(x, label):
    loss = -label * torch.log(torch.exp(x) / torch.exp(x).sum(dim=1, keepdim=True) )
    return loss

if __name__ == "__main__":
    # 测试一个最简单的BCE损失  binary cross entropy loss 
    loss_bce = nn.BCELoss(reduction="none") # 一般用reduction = mean
    x = torch.tensor([0.5, 0.6, 0.9], dtype=torch.float32)
    label = torch.tensor([0, 1, 1], dtype=torch.float32)

    print(loss_bce(x, label))
    print(my_loss(x, label))
     
    print("~~~~~~~~~~~~~")
    loss_bce_mean = nn.BCELoss(reduction="mean") # 一般用reduction = mean
    print(loss_bce_mean(x, label))
    print(my_loss_mean(x, label))

    print("~~~~~~~~~~~")
    t1 = torch.tensor([[1, 2, 3], [3, 2, 1]], dtype=torch.float32)# 三类
    t2 = torch.tensor([0, 1]) # target 
    loss_func = nn.CrossEntropyLoss(reduction="none")
    print(loss_func(t1, t2))

    t3 = torch.tensor([[1, 0, 0], [0, 1, 0]]) # 将t2转为one hot 编码
    print(my_entropy_loss(t1, t3))
