{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "7998ad23",
   "metadata": {},
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "072d4937",
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "from sklearn.linear_model import LinearRegression, LogisticRegression\n",
    "from sklearn.datasets import make_classification, make_regression\n",
    "\n",
    "# 设置中文显示\n",
    "plt.rcParams[\"font.family\"] = [\"SimHei\", \"WenQuanYi Micro Hei\", \"Heiti TC\"]\n",
    "plt.rcParams['axes.unicode_minus'] = False  # 解决负号显示问题\n",
    "\n",
    "# ===== 线性回归：解析解 vs 梯度下降 =====\n",
    "# 生成数据\n",
    "X_reg, y_reg = make_regression(n_samples=100, n_features=1, noise=10, random_state=42)\n",
    "\n",
    "# 解析解\n",
    "X_bias = np.hstack([np.ones((X_reg.shape[0], 1)), X_reg])\n",
    "beta_analytical = np.linalg.inv(X_bias.T @ X_bias) @ X_bias.T @ y_reg\n",
    "\n",
    "# 梯度下降解\n",
    "lr_reg = LinearRegression()\n",
    "lr_reg.fit(X_reg, y_reg)\n",
    "beta_gradient = [lr_reg.intercept_, lr_reg.coef_[0]]\n",
    "\n",
    "# 可视化\n",
    "plt.figure(figsize=(15, 6))\n",
    "\n",
    "# 左图：线性回归解析解与梯度下降解对比\n",
    "plt.subplot(1, 2, 1)\n",
    "plt.scatter(X_reg, y_reg, color='blue', alpha=0.6, label='数据点')\n",
    "x_line = np.linspace(X_reg.min(), X_reg.max(), 100)\n",
    "plt.plot(x_line, beta_analytical[0] + beta_analytical[1] * x_line, 'r-', \n",
    "         linewidth=2, label=f'解析解: y={beta_analytical[0]:.2f}+{beta_analytical[1]:.2f}x')\n",
    "plt.plot(x_line, beta_gradient[0] + beta_gradient[1] * x_line, 'g--', \n",
    "         linewidth=2, label=f'梯度下降: y={beta_gradient[0]:.2f}+{beta_gradient[1]:.2f}x')\n",
    "plt.title('线性回归：解析解 vs 梯度下降')\n",
    "plt.xlabel('X')\n",
    "plt.ylabel('Y')\n",
    "plt.legend()\n",
    "plt.grid(True)\n",
    "\n",
    "# ===== 逻辑回归：无解析解 =====\n",
    "# 生成数据\n",
    "X_clf, y_clf = make_classification(n_samples=200, n_features=2, n_redundant=0, random_state=42)\n",
    "\n",
    "# 训练逻辑回归模型（梯度下降）\n",
    "lr_clf = LogisticRegression(solver='lbfgs')\n",
    "lr_clf.fit(X_clf, y_clf)\n",
    "\n",
    "# 可视化决策边界\n",
    "plt.subplot(1, 2, 2)\n",
    "plt.scatter(X_clf[y_clf==0][:, 0], X_clf[y_clf==0][:, 1], c='lightblue', \n",
    "            marker='o', edgecolor='k', label='类别0')\n",
    "plt.scatter(X_clf[y_clf==1][:, 0], X_clf[y_clf==1][:, 1], c='lightcoral', \n",
    "            marker='x', edgecolor='k', label='类别1')\n",
    "\n",
    "# 绘制决策边界\n",
    "h = 0.02\n",
    "x_min, x_max = X_clf[:, 0].min() - 1, X_clf[:, 0].max() + 1\n",
    "y_min, y_max = X_clf[:, 1].min() - 1, X_clf[:, 1].max() + 1\n",
    "xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))\n",
    "Z = lr_clf.predict(np.c_[xx.ravel(), yy.ravel()])\n",
    "Z = Z.reshape(xx.shape)\n",
    "plt.contourf(xx, yy, Z, alpha=0.2, cmap=plt.cm.coolwarm)\n",
    "plt.contour(xx, yy, Z, levels=[0.5], linewidths=2, colors=['black'])\n",
    "\n",
    "plt.title('逻辑回归：决策边界（梯度下降求解）')\n",
    "plt.xlabel('特征1')\n",
    "plt.ylabel('特征2')\n",
    "plt.legend()\n",
    "plt.grid(True)\n",
    "\n",
    "plt.tight_layout()\n",
    "plt.show()\n",
    "\n",
    "# ===== 打印参数对比 =====\n",
    "print(\"线性回归参数对比：\")\n",
    "print(f\"解析解: β₀={beta_analytical[0]:.4f}, β₁={beta_analytical[1]:.4f}\")\n",
    "print(f\"梯度下降: β₀={beta_gradient[0]:.4f}, β₁={beta_gradient[1]:.4f}\")\n",
    "\n",
    "print(\"\\n逻辑回归参数：\")\n",
    "print(f\"梯度下降: β₀={lr_clf.intercept_[0]:.4f}, β₁={lr_clf.coef_[0][0]:.4f}, β₂={lr_clf.coef_[0][1]:.4f}\")\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "my_env",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "name": "python",
   "version": "3.12.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
