import streamlit as st
import numpy as np
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
import torch.optim as optim
from sklearn.datasets import make_moons
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler

# 设置页面布局为宽屏模式
st.set_page_config(layout="wide")

st.title("学习率 (Learning Rate)")

# ================= 基础介绍 =================
with st.form("lr_form1"):
    st.markdown("## 一、学习率基础")
    st.markdown("""
    ### 1. 定义
    学习率决定了每次参数更新的步长，是最重要的超参数之一：
    $$ \theta_{new} = \theta_{old} - \alpha \cdot \nabla J(\theta) $$

    ### 2. 影响
    - 学习率过大：Loss 震荡甚至发散
    - 学习率过小：收敛过慢，易陷入局部最优
    - 学习率合适：Loss 平稳下降

    ### 3. 学习率调度策略
    - **StepLR**：每隔一定 epoch 降低学习率
    - **ExponentialLR**：指数衰减
    - **CosineAnnealingLR**：余弦退火
    - **ReduceLROnPlateau**：当指标不再改善时降低学习率
    """)

    col1, col2 = st.columns([10, 1])
    with col2:
        submit = st.form_submit_button("我已学习")
    if submit:
        st.success("恭喜你，已了解学习率基础")
        st.balloons()

# ================= 代码演示 =================
with st.form("lr_form2"):
    st.markdown("## 二、学习率实验与可视化")

    # 数据集
    X, y = make_moons(n_samples=500, noise=0.2, random_state=42)
    X = StandardScaler().fit_transform(X)
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)

    X_train_t = torch.tensor(X_train, dtype=torch.float32)
    y_train_t = torch.tensor(y_train, dtype=torch.long)

    # 简单模型
    class SimpleNet(nn.Module):
        def __init__(self):
            super().__init__()
            self.fc1 = nn.Linear(2, 8)
            self.fc2 = nn.Linear(8, 2)
        def forward(self, x):
            return self.fc2(torch.relu(self.fc1(x)))

    # 训练函数
    def train_with_lr(lr):
        model = SimpleNet()
        criterion = nn.CrossEntropyLoss()
        optimizer = optim.SGD(model.parameters(), lr=lr)
        losses = []
        for epoch in range(50):
            optimizer.zero_grad()
            outputs = model(X_train_t)
            loss = criterion(outputs, y_train_t)
            loss.backward()
            optimizer.step()
            losses.append(loss.item())
        return losses

    # 不同学习率对比
    lr_values = [0.001, 0.05, 1.0]
    losses_dict = {f"lr={lr}": train_with_lr(lr) for lr in lr_values}

    fig1, ax1 = plt.subplots()
    for name, losses in losses_dict.items():
        ax1.plot(losses, label=name)
    ax1.set_xlabel("Epoch")
    ax1.set_ylabel("Loss")
    ax1.set_title("不同学习率下的训练Loss对比")
    ax1.legend()
    st.pyplot(fig1)

    # ========== 学习率调度策略 ==========
    schedulers = {
        "StepLR": lambda opt: optim.lr_scheduler.StepLR(opt, step_size=10, gamma=0.5),
        "ExponentialLR": lambda opt: optim.lr_scheduler.ExponentialLR(opt, gamma=0.9),
        "CosineAnnealingLR": lambda opt: optim.lr_scheduler.CosineAnnealingLR(opt, T_max=50)
    }

    fig2, ax2 = plt.subplots()
    for name, scheduler_fn in schedulers.items():
        model = SimpleNet()
        optimizer = optim.SGD(model.parameters(), lr=0.1)
        scheduler = scheduler_fn(optimizer)

        lrs = []
        for epoch in range(50):
            lrs.append(optimizer.param_groups[0]['lr'])
            optimizer.step()
            scheduler.step()
        ax2.plot(lrs, label=name)

    ax2.set_xlabel("Epoch")
    ax2.set_ylabel("Learning Rate")
    ax2.set_title("不同调度策略下学习率变化")
    ax2.legend()
    st.pyplot(fig2)

    st.code("""
# 固定学习率
optimizer = optim.SGD(model.parameters(), lr=0.05)

# StepLR 每10个epoch 学习率减半
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=10, gamma=0.5)

for epoch in range(50):
    optimizer.step()
    scheduler.step()
    print(optimizer.param_groups[0]['lr'])
""", language="python")

    col1, col2 = st.columns([10, 1])
    with col2:
        submit = st.form_submit_button("我已学习")
    if submit:
        st.success("恭喜你，已掌握学习率影响与调度策略")
        st.balloons()
