{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "provenance": []
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    },
    "language_info": {
      "name": "python"
    }
  },
  "cells": [
    {
      "cell_type": "code",
      "execution_count": 1,
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "aUP269oZ8734",
        "outputId": "8279bb02-c4ce-4969-88f7-b51c62ce349c"
      },
      "outputs": [
        {
          "output_type": "stream",
          "name": "stdout",
          "text": [
            "===== 迭代前后的函数关系变化 =====\n",
            "1. 函数结构（不变）：\n",
            "   输出y_hat = sigmoid(W3·sigmoid(W2·sigmoid(W1·x + b1) + b2) + b3)\n",
            "   （多层sigmoid嵌套结构不变，仅参数W/b更新）\n",
            "\n",
            "2. 关键参数变化（部分）：\n",
            "   W3: 从 [[1.1 1.2]] → [[1.0995 1.1995]]\n",
            "   b3: 从 [[1.3]] → [[1.2994]]\n",
            "   W2[0][0]: 从 0.5 → 0.4999\n",
            "   b1[0][0]: 从 0.3 → 0.3\n",
            "\n",
            "3. 各层计算结果变化（输入x=0.5）：\n",
            "   隐藏层1预激活z1: 从 [[0.35]\n",
            " [0.5 ]] → [[0.35]\n",
            " [0.5 ]]\n",
            "   隐藏层1输出a1: 从 [[0.5866]\n",
            " [0.6225]] → [[0.5866]\n",
            " [0.6225]]\n",
            "   隐藏层2预激活z2: 从 [[1.5668]\n",
            " [1.9086]] → [[1.5666]\n",
            " [1.9085]]\n",
            "   隐藏层2输出a2: 从 [[0.8273]\n",
            " [0.8709]] → [[0.8273]\n",
            " [0.8708]]\n",
            "   输出层预激活z3: 从 [[3.2551]] → [[3.2536]]\n",
            "   预测值y_hat: 从 [[0.9629]] → [[0.9628]]（更接近真实标签0.8）\n",
            "   损失L: 从 [[0.013261]] → [[0.013252]]（损失下降）\n",
            "\n",
            "4. 函数关系变化本质：\n",
            "   输入x到输出y_hat的非线性映射被微调，参数向“减少损失”的方向更新，\n",
            "   对于相同输入x，输出更接近真实标签y，映射关系更贴合数据规律。\n"
          ]
        }
      ],
      "source": [
        "import numpy as np\n",
        "\n",
        "# 1. 激活函数sigmoid及其导数\n",
        "def sigmoid(x):\n",
        "    return 1 / (1 + np.exp(-x))\n",
        "\n",
        "def sigmoid_derivative(x):\n",
        "    s = sigmoid(x)\n",
        "    return s * (1 - s)\n",
        "\n",
        "\n",
        "# 2. 初始化参数（与例子一致）\n",
        "W1 = np.array([[0.1], [0.2]])  # 隐藏层1权重 (2×1)\n",
        "b1 = np.array([[0.3], [0.4]])  # 隐藏层1偏置 (2×1)\n",
        "W2 = np.array([[0.5, 0.6], [0.7, 0.8]])  # 隐藏层2权重 (2×2)\n",
        "b2 = np.array([[0.9], [1.0]])  # 隐藏层2偏置 (2×1)\n",
        "W3 = np.array([[1.1, 1.2]])  # 输出层权重 (1×2)\n",
        "b3 = np.array([[1.3]])  # 输出层偏置 (1×1)\n",
        "\n",
        "x = np.array([[0.5]])  # 输入\n",
        "y = np.array([[0.8]])  # 真实标签\n",
        "learning_rate = 0.1\n",
        "\n",
        "\n",
        "# 3. 前向传播（返回中间结果用于对比）\n",
        "def forward_propagation(x, W1, b1, W2, b2, W3, b3):\n",
        "    z1 = np.dot(W1, x) + b1\n",
        "    a1 = sigmoid(z1)\n",
        "    z2 = np.dot(W2, a1) + b2\n",
        "    a2 = sigmoid(z2)\n",
        "    z3 = np.dot(W3, a2) + b3\n",
        "    y_hat = sigmoid(z3)\n",
        "    loss = 0.5 * np.square(y_hat - y)\n",
        "    return z1, a1, z2, a2, z3, y_hat, loss\n",
        "\n",
        "\n",
        "# 4. 反向传播\n",
        "def backward_propagation(x, y, z1, a1, z2, a2, z3, y_hat, W2, W3):\n",
        "    delta3 = (y_hat - y) * sigmoid_derivative(z3)\n",
        "    dW3 = np.dot(delta3, a2.T)\n",
        "    db3 = delta3\n",
        "\n",
        "    delta2 = np.dot(W3.T, delta3) * sigmoid_derivative(z2)\n",
        "    dW2 = np.dot(delta2, a1.T)\n",
        "    db2 = delta2\n",
        "\n",
        "    delta1 = np.dot(W2.T, delta2) * sigmoid_derivative(z1)\n",
        "    dW1 = np.dot(delta1, x.T)\n",
        "    db1 = delta1\n",
        "\n",
        "    return dW1, db1, dW2, db2, dW3, db3\n",
        "\n",
        "\n",
        "# 5. 参数更新\n",
        "def update_parameters(W1, b1, W2, b2, W3, b3, dW1, db1, dW2, db2, dW3, db3, lr):\n",
        "    return (\n",
        "        W1 - lr*dW1, b1 - lr*db1,\n",
        "        W2 - lr*dW2, b2 - lr*db2,\n",
        "        W3 - lr*dW3, b3 - lr*db3\n",
        "    )\n",
        "\n",
        "\n",
        "# 6. 主程序：执行迭代并对比函数关系变化\n",
        "if __name__ == \"__main__\":\n",
        "    # 迭代前的前向传播（保存中间结果用于对比）\n",
        "    z1_old, a1_old, z2_old, a2_old, z3_old, y_hat_old, loss_old = forward_propagation(\n",
        "        x, W1, b1, W2, b2, W3, b3\n",
        "    )\n",
        "\n",
        "    # 反向传播与参数更新\n",
        "    dW1, db1, dW2, db2, dW3, db3 = backward_propagation(\n",
        "        x, y, z1_old, a1_old, z2_old, a2_old, z3_old, y_hat_old, W2, W3\n",
        "    )\n",
        "    W1_new, b1_new, W2_new, b2_new, W3_new, b3_new = update_parameters(\n",
        "        W1, b1, W2, b2, W3, b3, dW1, db1, dW2, db2, dW3, db3, learning_rate\n",
        "    )\n",
        "\n",
        "    # 迭代后的前向传播（获取新的中间结果）\n",
        "    z1_new, a1_new, z2_new, a2_new, z3_new, y_hat_new, loss_new = forward_propagation(\n",
        "        x, W1_new, b1_new, W2_new, b2_new, W3_new, b3_new\n",
        "    )\n",
        "\n",
        "    # ==============================================\n",
        "    # 打印迭代前后的函数关系对比\n",
        "    # ==============================================\n",
        "    print(\"===== 迭代前后的函数关系变化 =====\")\n",
        "    print(\"1. 函数结构（不变）：\")\n",
        "    print(\"   输出y_hat = sigmoid(W3·sigmoid(W2·sigmoid(W1·x + b1) + b2) + b3)\")\n",
        "    print(\"   （多层sigmoid嵌套结构不变，仅参数W/b更新）\\n\")\n",
        "\n",
        "    print(\"2. 关键参数变化（部分）：\")\n",
        "    print(f\"   W3: 从 {W3.round(4)} → {W3_new.round(4)}\")\n",
        "    print(f\"   b3: 从 {b3.round(4)} → {b3_new.round(4)}\")\n",
        "    print(f\"   W2[0][0]: 从 {W2[0][0].round(4)} → {W2_new[0][0].round(4)}\")\n",
        "    print(f\"   b1[0][0]: 从 {b1[0][0].round(4)} → {b1_new[0][0].round(4)}\\n\")\n",
        "\n",
        "    print(\"3. 各层计算结果变化（输入x=0.5）：\")\n",
        "    print(f\"   隐藏层1预激活z1: 从 {z1_old.round(4)} → {z1_new.round(4)}\")\n",
        "    print(f\"   隐藏层1输出a1: 从 {a1_old.round(4)} → {a1_new.round(4)}\")\n",
        "    print(f\"   隐藏层2预激活z2: 从 {z2_old.round(4)} → {z2_new.round(4)}\")\n",
        "    print(f\"   隐藏层2输出a2: 从 {a2_old.round(4)} → {a2_new.round(4)}\")\n",
        "    print(f\"   输出层预激活z3: 从 {z3_old.round(4)} → {z3_new.round(4)}\")\n",
        "    print(f\"   预测值y_hat: 从 {y_hat_old.round(4)} → {y_hat_new.round(4)}（更接近真实标签0.8）\")\n",
        "    print(f\"   损失L: 从 {loss_old.round(6)} → {loss_new.round(6)}（损失下降）\\n\")\n",
        "\n",
        "    print(\"4. 函数关系变化本质：\")\n",
        "    print(\"   输入x到输出y_hat的非线性映射被微调，参数向“减少损失”的方向更新，\")\n",
        "    print(\"   对于相同输入x，输出更接近真实标签y，映射关系更贴合数据规律。\")"
      ]
    }
  ]
}