import torch
logits = torch.autograd.Variable(torch.tensor([[2,  0.5,6], [0.1,0,  3]]))
labels = torch.autograd.Variable(torch.LongTensor([2,1]))
print(logits)
print(labels)
ss = torch.nn.Softmax(dim=1) #实例化一个softmax
print('Softmax:',ss(logits))
logsoftmax = torch.nn.LogSoftmax(dim=1)(logits)
print('logsoftmax:',logsoftmax)
output = torch.nn.NLLLoss()(logsoftmax, labels)
print('NLLLoss:',output)
crossLoss = torch.nn.CrossEntropyLoss()
print ( 'CrossEntropyLoss:', crossLoss(logits, labels) )