import torch.nn
import torch.optim as optim
from torch.optim import lr_scheduler

# 训练前的初始化
net = torch.nn.Linear()
optimizer = optim.Adam(net.parameters(), lr=0.001)
scheduler = lr_scheduler.StepLR(optimizer, 10, 0.1)  # # 每过10个epoch，学习率乘以0.1

# 训练过程中
for n in range(3):
    scheduler.step()
