import torch
from torch import nn
from torch.nn import functional as F
def one_hot(label, depth=10):
    out = torch.zeros(label.size(0), depth)
    idx = torch.LongTensor(label).view(-1, 1)
    out.scatter_(dim=1, index=idx, value=1)
    return out

criteon = nn.CrossEntropyLoss() # 多分类用的交叉熵损失函数，用这个 loss 前面不需要加 Softmax 层。
bceloss = nn.BCELoss() # 二分类用的交叉熵，用的时候需要在该层前面加上 Sigmoid 函数。
bcelosslogits = nn.BCEWithLogitsLoss() # 二分类用的交叉熵, 用的时候前面不加Sigmoid 函数。
mseloss = nn.MSELoss() # 均方损失函数

sigmoid = nn.Sequential(
    nn.Sigmoid()
)

logits = torch.rand(128)
target = torch.ones(128)

slogits = sigmoid(logits)
loss1 = bceloss(slogits, target)
loss2 = bcelosslogits(logits, target)
print(loss1.item())
print(loss2.item())

print('F:', F.binary_cross_entropy(slogits, target).item())
print('F:', F.binary_cross_entropy_with_logits(logits, target).item())

data1 = torch.randn(128, 100)
data1 = F.softmax(data1, dim=1)
data2 = torch.randint(0, 100, [128,])
one_hot_data2 = one_hot(data2, 100)
loss3 = mseloss(data1, one_hot_data2)

print(loss3.item())
print('F:', F.mse_loss(data1, one_hot_data2).item())

data1 = torch.randn(128, 100)
data2 = torch.randint(0, 100, [128,])
loss4 = criteon(data1, data2)
print(loss4.item())

print('F:', F.cross_entropy(data1, data2).item())