import torch
import torch.nn as nn


'''
CrossEntropyLoss  input 不需要softmax target 不需要one-hot编码

'''

# 假设我们有 3 个样本，3 个类别
logits = torch.randn(3, 3)  # shape: (batch_size=3, num_classes=3)

# 真实标签，不需要 one-hot 编码
targets = torch.tensor([0, 2, 1])  # shape: (batch_size=3, )

loss_fn = nn.CrossEntropyLoss()
loss = loss_fn(logits, targets)
print(loss)

'''
CrossEntropyLoss  target 需要softmax且需要经过log target 不需要one-hot编码
'''

import torch
import torch.nn as nn

# 假设有 3 个样本，3 个类别
logits = torch.randn(3, 3)  # shape: (batch_size=3, num_classes=3)
# log_probs = torch.log_softmax(logits, dim=1)
log_probs = torch.nn.LogSoftmax(dim=1)(logits)

# 真实类别标签（不需要 one-hot）
targets = torch.tensor([0, 2, 1])  # shape: (batch_size=3, )

loss_fn = nn.NLLLoss()
loss = loss_fn(log_probs, targets)
print(loss)

