{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "3663ee33-c7c4-463c-9e24-54e205a4d908",
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import sympy as sp\n",
    "import matplotlib.pyplot as plt\n",
    "from mpl_toolkits.mplot3d import Axes3D\n",
    "%matplotlib widget\n",
    "import pandas as pd"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "077e6187-0177-4297-a574-9b35cc84fa62",
   "metadata": {},
   "source": [
    "**梯度下降法是用来求目标函数极值的方法。**\n",
    "[梯度下降介绍](https://mp.weixin.qq.com/s/EXumVg7EPcl0ZeRVeUk82g?poc_token=HBbc9WejaZE2S56dRjDqhYPXS7zZ7KvYanIsxWF0)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "d0578996-b18f-4da7-ae8b-ff099a6a2288",
   "metadata": {},
   "source": [
    "**一元函数的梯度下降**\n",
    "$$\n",
    "f(x) = x\\sin(x)\n",
    "$$"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "ea474fa4-7250-4b25-b159-cd9fc10b54f4",
   "metadata": {},
   "outputs": [],
   "source": [
    "def f1(x):\n",
    "    return x * np.sin(x)\n",
    "\n",
    "def grad1(x):\n",
    "    return np.sin(x) + np.cos(x) * x\n",
    "# 绘制图片\n",
    "x = np.linspace(-10,10,1000)\n",
    "y = f1(x)\n",
    "\n",
    "plt.figure(figsize=(10, 6))\n",
    "plt.cla()\n",
    "plt.plot(x, y, label=\"f(x) = x sin(x)\", color=\"blue\")\n",
    "plt.xlabel(\"x\")\n",
    "plt.ylabel(\"f(x)\")\n",
    "plt.title(\"Graph of f(x) = x sin(x)\")\n",
    "\n",
    "plt.show()\n",
    "\n",
    "x = 3.0  # 初始点\n",
    "learning_rate = 0.01       # 学习率\n",
    "tolerance = 1e-7          # 收敛条件\n",
    "max_iterations = 3000     # 最大迭代次数\n",
    "\n",
    "history = []  # 记录函数值的变化\n",
    "x_history = []# 记录x的变化\n",
    "for i in range(max_iterations):\n",
    "    val = f1(x)\n",
    "    dx = grad1(x)\n",
    "    x_history.append(x)\n",
    "    history.append(val)\n",
    "    \n",
    "    if i % 50 == 0:\n",
    "        print(f\"Iteration {i}: x = {x:.6f}, f(x) = {val:.6f}, gradient = {dx:.6f}\")\n",
    "    \n",
    "    if abs(dx) < tolerance:\n",
    "        break\n",
    "    \n",
    "    x = x - learning_rate * dx  # 更新 x\n",
    "\n",
    "print(f\"Total iterations: {i+1}\")\n",
    "print(f\"Final x = {x:.6f}, f(x) = {val:.6f}, gradient = {dx:.6f}\")\n",
    "# 散点图（梯度下降过程）\n",
    "plt.scatter(x_history,history,s=25,c='red',alpha=0.5)\n",
    "\n",
    "# 绘制函数值的变化\n",
    "plt.figure(figsize=(10, 6))\n",
    "plt.cla()\n",
    "plt.plot(history)\n",
    "plt.xlabel(\"Iteration\")\n",
    "plt.ylabel(\"f(x)\")\n",
    "plt.title(\"Convergence of Gradient Descent\")\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "11e3f6f5-7f42-4f6a-b603-37f6e5d650c6",
   "metadata": {},
   "source": [
    "**二元函数的梯度下降**\n",
    "$$\n",
    "f(x,y) = (1-x)^2 + 10(y-x^2)^2\n",
    "$$"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "2ed659fd-c443-40ee-9a76-eebf02a62d01",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 定义Rosenbrock函数\n",
    "def f2(x, y):\n",
    "    return (1 - x)**2 + 10 * (y - x**2)**2\n",
    "\n",
    "# 定义梯度函数\n",
    "def grad2(x, y):\n",
    "    dx = -2 * (1 - x) - 40 * x * (y - x**2)\n",
    "    dy = 20 * (y - x**2)\n",
    "    return dx, dy\n",
    "\n",
    "# 绘制函数图像\n",
    "x = np.linspace(-3, 3, 500)\n",
    "y = np.linspace(-3, 3, 500)\n",
    "X, Y = np.meshgrid(x, y)\n",
    "Z = f2(X, Y)\n",
    "\n",
    "fig = plt.figure()\n",
    "plt.cla()\n",
    "ax = fig.add_axes(Axes3D(fig)) # ax是坐标轴实例\n",
    "ax.plot_surface(X,Y,Z,\n",
    "                rstride=1,cstride=1, # 跨度，修改看看效果即可\n",
    "                cmap=plt.get_cmap('rainbow'))\n",
    "\n",
    "# 梯度下降法求极值\n",
    "x = 2.0  # 初始 x 值\n",
    "y = 2.0   # 初始 y 值\n",
    "learning_rate = 0.01  # 学习率（较小的学习率，因为Rosenbrock函数的梯度较大）\n",
    "tolerance = 1e-7  # 收敛条件\n",
    "max_iterations = 10000  # 最大迭代次数\n",
    "\n",
    "history = []\n",
    "\n",
    "\n",
    "for i in range(max_iterations):\n",
    "    dx, dy = grad2(x, y)\n",
    "    history.append([x,y,f2(x,y),dx,dy])\n",
    "    gradient_magnitude = np.sqrt(dx**2 + dy**2)\n",
    "    if gradient_magnitude < tolerance:\n",
    "        break\n",
    "    x = x - learning_rate * dx\n",
    "    y = y - learning_rate * dy\n",
    "    \n",
    "    if i % 500 == 0:\n",
    "        print(f\"Iteration {i}: x = {x:.3f}, y = {y:.3f}, f(x, y) = {f2(x, y):.6f}, grad = [{dx},{dy}]\")\n",
    "\n",
    "print(f\"Total iterations: {i+1}\")\n",
    "print(f\"Final x = {x:.6f}, y = {y:.6f}, f(x, y) = {f2(x, y):.6f}\")\n",
    "\n",
    "# 绘制梯度下降路径\n",
    "x_history, y_history, f_history, _, _ = zip(*history)\n",
    "ax.scatter(x_history, y_history, f_history, color='black',s=11, label='Gradient Descent Path')\n",
    "ax.set_title(\"3D Surface Plot with Gradient Descent Path\")\n",
    "ax.set_xlabel(\"x\")\n",
    "ax.set_ylabel(\"y\")\n",
    "ax.set_zlabel(\"z\")\n",
    "ax.legend(loc=\"upper right\")\n",
    "plt.show()\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "dc986571-b043-4f21-b95b-85eaf9303461",
   "metadata": {},
   "outputs": [],
   "source": [
    "# 保存 history 到 Excel\n",
    "df = pd.DataFrame(history, columns=['x', 'y', 'f(x, y)', 'dx', 'dy'])\n",
    "df.to_excel('gradient_descent_history.xlsx', index=False)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "65790922-aa9f-4564-8c59-7c7e4bc33972",
   "metadata": {},
   "source": [
    "**三元函数的梯度下降**\n",
    "$$\n",
    "f(x, y, z) = e^{-x^2 - y^2 - z^2} + \\sin(x) + \\sin(y) + \\sin(z) + xy + yz + zx\n",
    "$$"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "e5fc9c48-2571-4b7f-a66c-20b98d84c23f",
   "metadata": {},
   "outputs": [],
   "source": [
    "def f(x, y, z):\n",
    "    return np.exp(-x**2 - y**2 - z**2) + np.sin(x) + np.sin(y) + np.sin(z) + x*y + y*z + z*x\n",
    "def gradient(x, y, z):\n",
    "    dx = -2*x*np.exp(-x**2 - y**2 - z**2) + np.cos(x) + y + z\n",
    "    dy = -2*y*np.exp(-x**2 - y**2 - z**2) + np.cos(y) + x + z\n",
    "    dz = -2*z*np.exp(-x**2 - y**2 - z**2) + np.cos(z) + x + y\n",
    "    return np.array([dx, dy, dz])\n",
    "\n",
    "# 初始化变量\n",
    "x, y, z = 1.0, 1.0, 1.0  # 初始点\n",
    "learning_rate = 0.1       # 学习率\n",
    "tolerance = 1e-6          # 收敛条件\n",
    "max_iterations = 1000     # 最大迭代次数\n",
    "\n",
    "for i in range(max_iterations):\n",
    "    grad = gradient(x, y, z)\n",
    "    # 梯度下降更新\n",
    "    x -= learning_rate * grad[0]\n",
    "    y -= learning_rate * grad[1]\n",
    "    z -= learning_rate * grad[2]\n",
    "    \n",
    "    # 检查收敛条件\n",
    "    if np.linalg.norm(grad) < tolerance:\n",
    "        print(f\"循环次数：{i+1}\")\n",
    "        break\n",
    "\n",
    "print(f\"最小值点为： x = {x}, y = {y}, z = {z}\")\n",
    "print(f\"该点梯度为：dx = {grad[0]}, dy = {grad[1]}, dz = {grad[2]}\")\n",
    "print(f\"该点梯度二范数：{np.linalg.norm(grad)}\")\n",
    "print(\"函数值为：\",f(x,y,z))"
   ]
  },
  {
   "cell_type": "raw",
   "id": "292be622-a797-44f0-997b-e5a4c9beb01c",
   "metadata": {},
   "source": [
    "方法1：手动计算梯度表达式"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "bbf4e651-90b3-43ae-8798-51a6d030e08d",
   "metadata": {},
   "outputs": [],
   "source": [
    "def f2(x):\n",
    "    return(np.sin(x)*x)\n",
    "# 生成 x 的范围\n",
    "x = np.linspace(-10, 10, 1000)  \n",
    "y = f2(x)  # 计算对应的 y 值\n",
    "\n",
    "def grad(x):\n",
    "    return(np.sin(x)+np.cos(x)*x)\n",
    "# 绘制图像\n",
    "plt.figure(figsize=(10, 6))\n",
    "plt.plot(x, y, label=\"f(x) = x sin(x)\", color=\"blue\")\n",
    "plt.xlabel(\"x\")\n",
    "plt.ylabel(\"f(x)\")\n",
    "plt.title(\"Graph of f(x) = x sin(x)\")\n",
    "plt.grid(True)\n",
    "plt.legend()\n",
    "plt.show()\n",
    "\n",
    "\n",
    "x = 3  # 初始点\n",
    "learning_rate = 0.01       # 学习率\n",
    "tolerance = 1e-8          # 收敛条件\n",
    "max_iterations = 3000     # 最大迭代次数\n",
    "\n",
    "for i in range(max_iterations):\n",
    "    val = f2(x)\n",
    "    dx = grad(x)\n",
    "    if i%50 == 0:\n",
    "        print(f\"x = {x}, current_val = {val}, dx = {dx}\")\n",
    "    if abs(dx) < tolerance:\n",
    "        break\n",
    "    x = x - learning_rate * dx\n",
    "print(f\"循环总次数：{i+1}\")\n",
    "print(f\"x = {x}, current_val = {val}, dx = {dx}\")"
   ]
  },
  {
   "cell_type": "raw",
   "id": "20e69a29-f169-4ea4-9027-b7eec323805a",
   "metadata": {},
   "source": [
    "方法2：自动计算梯度表达式"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "e5e5b185-c5d0-4f82-b5d1-dbfa7ba3c38e",
   "metadata": {},
   "outputs": [],
   "source": [
    "import sympy as sp\n",
    "import numpy as np\n",
    "# 定义符号变量\n",
    "x, y, z = sp.symbols('x y z')\n",
    "\n",
    "# 定义符号函数\n",
    "func = sp.exp(-x**2 - y**2 - z**2) + sp.sin(x) + sp.sin(y) + sp.sin(z) + x*y + y*z + z*x\n",
    "\n",
    "# 计算梯度\n",
    "gradient = [sp.diff(func, var) for var in (x, y, z)]\n",
    "print(\"梯度表达式：\", gradient)\n",
    "\n",
    "# 将符号表达式转换为数值函数\n",
    "gradient_func = sp.lambdify((x, y, z), gradient, 'numpy')\n",
    "f_func = sp.lambdify((x, y, z), func, 'numpy')\n",
    "\n",
    "# 定义梯度下降法\n",
    "def gradient_descent(func, variables, initial_point, learning_rate=0.1, tolerance=1e-7, max_iterations=1000):\n",
    "    # 将初始点转换为numpy数组\n",
    "    current_point = np.array(initial_point, dtype=float)\n",
    "    \n",
    "    for i in range(max_iterations):\n",
    "        # 计算当前点的梯度\n",
    "        current_gradient = np.array(gradient_func(*current_point))\n",
    "        \n",
    "        # 更新变量\n",
    "        current_point = current_point - learning_rate * current_gradient\n",
    "        \n",
    "        # 检查收敛条件\n",
    "        if np.linalg.norm(current_gradient) < tolerance:\n",
    "            print(f\"循环次数：{i+1}\")\n",
    "            break\n",
    "    \n",
    "    return current_point\n",
    "\n",
    "# 初始化点\n",
    "initial_point = [1.0, 1.0, 1.0]\n",
    "\n",
    "# 执行梯度下降\n",
    "result = gradient_descent(f, (x, y, z), initial_point)\n",
    "\n",
    "# 打印结果\n",
    "print(\"找到的极值点：\", result)\n",
    "\n",
    "# 计算极值点处的梯度和函数值\n",
    "final_gradient = gradient_func(*result)\n",
    "final_function_value = f_func(*result)\n",
    "\n",
    "# 打印极值点处的梯度和函数值\n",
    "print(\"极值点处的梯度：\", final_gradient)\n",
    "print(\"极值点处的函数值：\", final_function_value)"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "144f35fd-06d0-4bd5-b7fd-a32703249ed6",
   "metadata": {},
   "source": [
    "$$\n",
    "f(x, y, z) =x^2 + y^2 + z^2 + 3\\cos(x) + 3\\cos(y) + 3\\cos(z) + e^{-x^2 - y^2 - z^2} + 0.1(xy + yz + zx)\n",
    "$$"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "0254a8d9-f960-4212-bcf9-f09dbaca42bd",
   "metadata": {},
   "outputs": [],
   "source": [
    "def fun(x, y, z):\n",
    "    return (x**2 + y**2 + z**2 \n",
    "            + 3*(np.cos(x) + np.cos(y) + np.cos(z)) \n",
    "            + np.exp(-x**2 - y**2 - z**2) \n",
    "            + 0.1 * (x*y + y*z + z*x))\n",
    "\n",
    "def fun_gradient(x, y, z):\n",
    "    # 预计算公共指数项提升效率\n",
    "    exp_term = np.exp(-x**2 - y**2 - z**2)\n",
    "    dx = 2*x - 3*np.sin(x) - 2*x*exp_term + 0.1*(y + z)\n",
    "    dy = 2*y - 3*np.sin(y) - 2*y*exp_term + 0.1*(x + z)\n",
    "    dz = 2*z - 3*np.sin(z) - 2*z*exp_term + 0.1*(x + y)\n",
    "    return np.array([dx, dy, dz])\n",
    "\n",
    "# 初始化参数\n",
    "x, y, z = -5, -5, -5\n",
    "tolerance = 1e-7\n",
    "max_iterations = 3000\n",
    "\n",
    "# 记录迭代过程\n",
    "history = []\n",
    "for i in range(max_iterations):\n",
    "    grad = fun_gradient(x, y, z)\n",
    "    grad_norm = np.linalg.norm(grad)\n",
    "    \n",
    "    # 记录每100次迭代状态\n",
    "    if i % 100 == 0:\n",
    "        current_value = fun(x, y, z)\n",
    "        history.append((i, x, y, z, current_value, grad_norm))\n",
    "        print(f\"Iter {i:5d}: f={current_value:.6f}, |grad|={grad_norm:.6f}\")\n",
    "    \n",
    "    # 先检查收敛性，再决定是否更新参数\n",
    "    if grad_norm < tolerance:\n",
    "        print(f\"在第 {i} 次迭代收敛\")\n",
    "        break\n",
    "    \n",
    "    # 参数更新\n",
    "    learning_rate = 0.01 * (0.95 ** (i // 100))  # 每100次迭代衰减5%\n",
    "    x -= learning_rate * grad[0]\n",
    "    y -= learning_rate * grad[1]\n",
    "    z -= learning_rate * grad[2]\n",
    "else:\n",
    "    print(\"达到最大迭代次数未收敛\")\n",
    "\n",
    "# 最终重新计算梯度确保准确性\n",
    "final_grad = fun_gradient(x, y, z)\n",
    "final_grad_norm = np.linalg.norm(final_grad)\n",
    "\n",
    "print(\"\\n=== 优化结果 ===\")\n",
    "print(f\"最小值点: x = {x:.8f}, y = {y:.8f}, z = {z:.8f}\")\n",
    "print(f\"最终梯度模长: {final_grad_norm:.8e}\")\n",
    "print(f\"函数值: {fun(x, y, z):.8f}\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "f4ed7662-07e2-470b-a34b-cfb44de83aee",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.12.7"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
