import torch
import torch.optim as optim

def adam_optimizer_demo():
    """Adam优化器实战示例"""
    
    # 创建简单的线性回归模型
    model = torch.nn.Sequential(
        torch.nn.Linear(10, 50),  # 输入10维，隐藏层50维
        torch.nn.ReLU(),
        torch.nn.Linear(50, 1)    # 输出1维
    )
    
    # 使用Adam优化器
    optimizer = optim.Adam(model.parameters(), 
                          lr=0.001,      # 学习率
                          betas=(0.9, 0.999),  # 一阶和二阶矩衰减率
                          eps=1e-8)      # 数值稳定性常数
    
    # 模拟训练过程
    losses = []
    for epoch in range(100):
        # 生成模拟数据
        inputs = torch.randn(32, 10)  # 批量大小32
        targets = torch.randn(32, 1)
        
        # 前向传播
        outputs = model(inputs)
        loss = torch.nn.functional.mse_loss(outputs, targets)
        
        # 反向传播
        optimizer.zero_grad()  # 清零梯度
        loss.backward()        # 计算梯度
        optimizer.step()       # 更新参数
        
        losses.append(loss.item())
        
        if epoch % 20 == 0:
            print(f'Epoch {epoch}, Loss: {loss.item():.4f}')
    
    return losses

# 运行Adam示例
losses = adam_optimizer_demo()