import streamlit as st
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import make_regression
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error, r2_score
from config.matplotlib_config import apply_matplotlib_cn


apply_matplotlib_cn()

st.set_page_config(page_title="线性回归", page_icon="📈")

if not st.session_state.get('logged_in', False):
    st.warning("请先在首页登录以访问此内容")
    st.stop()

st.title("线性回归算法")

tab1, tab2, tab3 = st.tabs(["原理讲解", "代码示例", "试一试"])

with tab1:
    st.header("线性回归算法原理")
    st.markdown("""
        线性回归是一种利用直线(线性)关系来建模自变量和因变量之间关系的统计方法。

        ### 数学模型
        简单线性回归方程：$y = \\beta_0 + \\beta_1x + \\epsilon$

        多元线性回归方程：$y = \\beta_0 + \\beta_1x_1 + \\beta_2x_2 + ... + \\beta_nx_n + \\epsilon$

        ### 参数估计
        最小二乘法：通过最小化误差的平方和来估计参数

        $$\\min_{\\beta} \\sum_{i=1}^{n}(y_i - \\hat{y_i})^2$$

        ### 评估指标
        - 均方误差(MSE)：$\\frac{1}{n}\\sum_{i=1}^{n}(y_i - \\hat{y_i})^2$
        - 决定系数(R²)：$1 - \\frac{\\sum_{i=1}^{n}(y_i - \\hat{y_i})^2}{\\sum_{i=1}^{n}(y_i - \\bar{y})^2}$

        ### 优缺点
        **优点**：
        - 简单易懂，解释性强
        - 计算效率高
        - 适用于线性关系的数据

        **缺点**：
        - 对非线性关系建模能力差
        - 对异常值敏感
        - 假设特征之间相互独立
        """)

with tab2:
    st.header("线性回归代码示例")
    st.code(
        """
from sklearn.linear_model import LinearRegression
from sklearn.datasets import make_regression
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error, r2_score

# 生成回归数据
X, y = make_regression(n_samples=100, n_features=1, noise=10, random_state=42)

# 划分训练集和测试集
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)

# 创建线性回归模型
model = LinearRegression()

# 训练模型
model.fit(X_train, y_train)

# 预测
y_pred = model.predict(X_test)

# 评估模型
mse = mean_squared_error(y_test, y_pred)
r2 = r2_score(y_test, y_pred)

print(f"均方误差(MSE): {mse:.2f}")
print(f"决定系数(R²): {r2:.2f}")
print(f"系数: {model.coef_}")
print(f"截距: {model.intercept_}")
        """,
        language="python",
    )

with tab3:
    st.header("线性回归演示")

    X, y = make_regression(n_samples=100, n_features=1, noise=10, random_state=42)

    col1, col2 = st.columns(2)

    with col1:
        noise_level = st.slider("噪声水平", 1, 30, 10)
        test_size = st.slider("测试集比例", 0.1, 0.5, 0.2)

        X, y = make_regression(
            n_samples=100, n_features=1, noise=noise_level, random_state=42
        )
        X_train, X_test, y_train, y_test = train_test_split(
            X, y, test_size=test_size, random_state=42
        )

        model = LinearRegression()
        model.fit(X_train, y_train)
        y_pred = model.predict(X_test)

        mse = mean_squared_error(y_test, y_pred)
        r2 = r2_score(y_test, y_pred)

        st.metric("均方误差(MSE)", f"{mse:.4f}")
        st.metric("决定系数(R²)", f"{r2:.4f}")
        st.write(f"系数: {model.coef_[0]:.4f}")
        st.write(f"截距: {model.intercept_:.4f}")

    with col2:
        fig, ax = plt.subplots(figsize=(8, 6))
        plt.scatter(X_test, y_test, color='blue', label='实际值')
        plt.plot(X_test, y_pred, color='red', linewidth=2, label='预测值')
        plt.title("线性回归拟合结果")
        plt.xlabel("特征")
        plt.ylabel("目标值")
        plt.legend()
        st.pyplot(fig) 