scheduler = optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=20)

for epoch in range(num_epochs):
    for inputs, targets in dataloader:
        optimizer.zero_grad()
        outputs = model(inputs)
        loss = nn.functional.cross_entropy(outputs, targets)
        loss.backward()
        optimizer.step()
    
    scheduler.step()  # 更新学习率
