import torch
import torch.nn as nn

# 准备数据   cross_entropy损失函数期望目标张量是Long类型
batch_size = 5
torch.manual_seed(42)
x = torch.rand(batch_size, 16, 16)
label_name = {
    0: "汽车",
    1: "飞机",
    2: "轮船",
}
y = torch.tensor([1, 2, 1, 0, 1])

# 1. 超参数
lr = 0.01
Epochs = 10000

model = nn.Sequential(
    nn.Flatten(),
    nn.Linear(16 * 16, 64),
    nn.ReLU(),
    nn.Linear(64, 128),
    nn.ReLU(),
    nn.Linear(128, 64),
    nn.ReLU(),
    nn.Linear(64, 3),
    # nn.Softmax()  显式激活，使用NLLLoss()损失函数，内部不带，输出层需要进行softmax激活
    # 使用CrossEntropyLoss()损失函数，内部已经带有Logsofmax激活函数了，输出层可以不用激活了
)
loss_fn = nn.CrossEntropyLoss()
optimizer = torch.optim.SGD(model.parameters(), lr=lr)

model.train()
for epoch in range(Epochs):
    # 前向传播
    y_pre = model(x)
    loss = loss_fn(y_pre, y)
    optimizer.zero_grad()
    loss.backward()
    optimizer.step()
    if epoch == 0 or (epoch + 1) % 100 == 0:
        print(f"[{epoch + 1}/{Epochs}] Loss:{loss.item():.4f}")


y_pre = model(x)
print(y_pre)

# 读取最大值索引，通过该索引下标去读取对应的标签名
idx = y_pre.argmax(dim=-1)
print(idx)

for i in idx:
    print(label_name[i.item()])
