{
 "metadata": {
  "orig_nbformat": 2,
  "kernelspec": {
   "name": "python3",
   "display_name": "Python 3",
   "language": "python"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2,
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 加载飞桨和相关类库\n",
    "import paddle"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "True\nFalse\n"
     ]
    }
   ],
   "source": [
    "# Tensor的stop_gradient属性\n",
    "a = paddle.to_tensor([1.0, 2.0, 3.0]) # 默认不计算梯度\n",
    "# 设置为计算梯度\n",
    "b = paddle.to_tensor([1.0, 2.0, 3.0], stop_gradient=False)\n",
    "print(a.stop_gradient) # True\n",
    "print(b.stop_gradient) # False"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "output_type": "stream",
     "name": "stdout",
     "text": [
      "Tensor x的梯度值： [10. 14. 18.]\nTensor y的梯度值： [2. 4. 6.]\nTensor z的梯度值： [1. 1. 1.]\n"
     ]
    }
   ],
   "source": [
    "x = paddle.to_tensor([1.0, 2.0, 3.0], stop_gradient=False)\n",
    "y = paddle.to_tensor([4.0, 5.0, 6.0], stop_gradient=False)\n",
    "# 假设损失函数为z\n",
    "z = x ** 2 + 2 * x * y\n",
    "# 对z调用backward()方法以反向传播计算梯度，\n",
    "# 梯度值将保存在每个Tensor的的grad属性中\n",
    "z.backward()\n",
    "print('Tensor x的梯度值：', x.grad)\n",
    "print('Tensor y的梯度值：', y.grad)\n",
    "print('Tensor z的梯度值：', z.grad)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 模型使用自动微分的一个例子\n",
    "from paddle.vision.models import vgg11\n",
    "import paddle.nn.functional as F\n",
    "import numpy as np\n",
    "\n",
    "model = vgg11()\n",
    "x = paddle.rand([1, 3, 224, 224])\n",
    "label = paddle.randint(0, 1000)\n",
    "# 前向传播\n",
    "predicts = model(x) # 获得预测结果\n",
    "# 计算损失函数：交叉熵\n",
    "loss = F.cross_entropy(predicts, label)\n",
    "# 开始反向传播，计算的梯度保存在grad属性中\n",
    "loss.backward()\n",
    "# 传入模型的所有参数以设置优化器\n",
    "optim = paddle.optimizer.Adam(learning_rate=0.001, \n",
    "    parameters=model.parameters())\n",
    "# 通过step执行优化器以更新模型参数\n",
    "optim.step()\n",
    "\n",
    "# print('x.grad: ', loss.grad)\n",
    "# print('model.parameters: \\n', model.parameters())"
   ]
  }
 ]
}