{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "from graphviz import Digraph\n",
    "import math\n",
    "import torch"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "# -*- coding: UTF-8 -*-\n",
    "'''\n",
    "此脚本用于定义Scalar类，以及相应的可视化工具\n",
    "'''\n",
    "\n",
    "\n",
    "from graphviz import Digraph\n",
    "import math\n",
    "\n",
    "\n",
    "class Scalar:\n",
    "    \n",
    "    def __init__(self, value, prevs=[], op=None, label='', requires_grad=True):\n",
    "        # 节点的值\n",
    "        self.value = value\n",
    "        # 节点的标识（label）和对应的运算（op），用于作图\n",
    "        self.label = label\n",
    "        self.op = op\n",
    "        # 节点的前节点，即当前节点是运算的结果，而前节点是参与运算的量\n",
    "        self.prevs = prevs\n",
    "        # 是否需要计算该节点偏导数，即∂loss/∂self（loss表示最后的模型损失）\n",
    "        self.requires_grad = requires_grad\n",
    "        # 该节点偏导数，即∂loss/∂self\n",
    "        self.grad = 0.0\n",
    "        # 如果该节点的prevs非空，存储所有的∂self/∂prev\n",
    "        self.grad_wrt = dict()\n",
    "        # 作图需要，实际上对计算没有作用\n",
    "        self.back_prop = dict()\n",
    "        \n",
    "    def __repr__(self):\n",
    "        return f'Scalar(value={self.value:.2f}, grad={self.grad:.2f})'\n",
    "    \n",
    "    def __add__(self, other):\n",
    "        '''\n",
    "        定义加法，self + other将触发该函数\n",
    "        '''\n",
    "        if not isinstance(other, Scalar):\n",
    "            other = Scalar(other, requires_grad=False)\n",
    "        # output = self + other\n",
    "        output = Scalar(self.value + other.value, [self, other], '+')\n",
    "        output.requires_grad = self.requires_grad or other.requires_grad\n",
    "        # 计算偏导数 ∂output/∂self = 1\n",
    "        output.grad_wrt[self] = 1\n",
    "        # 计算偏导数 ∂output/∂other = 1\n",
    "        output.grad_wrt[other] = 1\n",
    "        return output\n",
    "    \n",
    "    def __sub__(self, other):\n",
    "        '''\n",
    "        定义减法，self - other将触发该函数\n",
    "        '''\n",
    "        if not isinstance(other, Scalar):\n",
    "            other = Scalar(other, requires_grad=False)\n",
    "        # output = self - other\n",
    "        output = Scalar(self.value - other.value, [self, other], '-')\n",
    "        output.requires_grad = self.requires_grad or other.requires_grad\n",
    "        # 计算偏导数 ∂output/∂self = 1\n",
    "        output.grad_wrt[self] = 1\n",
    "        # 计算偏导数 ∂output/∂other = -1\n",
    "        output.grad_wrt[other] = -1\n",
    "        return output\n",
    "    \n",
    "    def __mul__(self, other):\n",
    "        '''\n",
    "        定义乘法，self * other将触发该函数\n",
    "        '''\n",
    "        if not isinstance(other, Scalar):\n",
    "            other = Scalar(other, requires_grad=False)\n",
    "        # output = self * other\n",
    "        output = Scalar(self.value * other.value, [self, other], '*')\n",
    "        output.requires_grad = self.requires_grad or other.requires_grad\n",
    "        # 计算偏导数 ∂output/∂self = other\n",
    "        output.grad_wrt[self] = other.value\n",
    "        # 计算偏导数 ∂output/∂other = self\n",
    "        output.grad_wrt[other] = self.value\n",
    "        return output\n",
    "    \n",
    "    def __pow__(self, other):\n",
    "        '''\n",
    "        定义乘方，self**other将触发该函数\n",
    "        '''\n",
    "        assert isinstance(other, (int, float))\n",
    "        # output = self ** other\n",
    "        output = Scalar(self.value ** other, [self], f'^{other}')\n",
    "        output.requires_grad = self.requires_grad\n",
    "        # 计算偏导数 ∂output/∂self = other * self**(other-1)\n",
    "        output.grad_wrt[self] = other * self.value**(other - 1)\n",
    "        return output\n",
    "    \n",
    "    def sigmoid(self):\n",
    "        '''\n",
    "        定义sigmoid\n",
    "        '''\n",
    "        s = 1 / (1 + math.exp(-1 * self.value))\n",
    "        output = Scalar(s, [self], 'sigmoid')\n",
    "        output.requires_grad = self.requires_grad\n",
    "        # 计算偏导数 ∂output/∂self = output * (1 - output)\n",
    "        output.grad_wrt[self] = s * (1 - s)\n",
    "        return output\n",
    "    \n",
    "    def __rsub__(self, other):\n",
    "        '''\n",
    "        定义右减法，other - self将触发该函数\n",
    "        '''\n",
    "        if not isinstance(other, Scalar):\n",
    "            other = Scalar(other, requires_grad=False)\n",
    "        output = Scalar(other.value - self.value, [self, other], '-')\n",
    "        output.requires_grad = self.requires_grad or other.requires_grad\n",
    "        # 计算偏导数 ∂output/∂self = -1\n",
    "        output.grad_wrt[self] = -1\n",
    "        # 计算偏导数 ∂output/∂other = 1\n",
    "        output.grad_wrt[other] = 1\n",
    "        return output\n",
    "    \n",
    "    def __radd__(self, other):\n",
    "        '''\n",
    "        定义右加法，other + self将触发该函数\n",
    "        '''\n",
    "        return self.__add__(other)\n",
    "    \n",
    "    def __rmul__(self, other):\n",
    "        '''\n",
    "        定义右乘法，other * self将触发该函数\n",
    "        '''\n",
    "        return self * other\n",
    "    \n",
    "    def backward(self, fn=None):\n",
    "        '''\n",
    "        由当前节点出发，求解以当前节点为顶点的计算图中每个节点的偏导数，i.e. ∂self/∂node\n",
    "        参数\n",
    "        ----\n",
    "        fn ：画图函数，如果该变量不等于None，则会返回向后传播每一步的计算的记录\n",
    "        返回\n",
    "        ----\n",
    "        re ：向后传播每一步的计算的记录\n",
    "        '''\n",
    "        def _topological_order():\n",
    "            '''\n",
    "            利用深度优先算法，返回计算图的拓扑排序（topological sorting）\n",
    "            '''\n",
    "            def _add_prevs(node):\n",
    "                if node not in visited:\n",
    "                    visited.add(node)\n",
    "                    for prev in node.prevs:\n",
    "                        _add_prevs(prev)\n",
    "                    ordered.append(node)\n",
    "            ordered, visited = [], set()\n",
    "            _add_prevs(self)\n",
    "            return ordered\n",
    "\n",
    "        def _compute_grad_of_prevs(node):\n",
    "            '''\n",
    "            由node节点出发，向后传播\n",
    "            '''\n",
    "            # 作图需要，实际上对计算没有作用\n",
    "            node.back_prop = dict()\n",
    "            # 得到当前节点在计算图中的梯度。由于一个节点可以在多个计算图中出现，\n",
    "            # 使用cg_grad记录当前计算图的梯度\n",
    "            dnode = cg_grad[node]\n",
    "            # 使用node.grad记录节点的累积梯度\n",
    "            node.grad += dnode\n",
    "            for prev in node.prevs:\n",
    "                # 由于node节点的偏导数已经计算完成，可以向后扩散（反向传播）\n",
    "                # 需要注意的是，向后扩散到上游节点是累加关系\n",
    "                grad_spread = dnode * node.grad_wrt[prev]\n",
    "                cg_grad[prev] = cg_grad.get(prev, 0.0) + grad_spread\n",
    "                node.back_prop[prev] = node.back_prop.get(prev, 0.0) + grad_spread\n",
    "        \n",
    "        # 当前节点的偏导数等于1，因为∂self/∂self = 1。这是反向传播算法的起点\n",
    "        cg_grad = {self: 1}\n",
    "        # 为了计算每个节点的偏导数，需要使用拓扑排序的倒序来遍历计算图\n",
    "        ordered = reversed(_topological_order())\n",
    "        re = []\n",
    "        for node in ordered:\n",
    "            _compute_grad_of_prevs(node)\n",
    "            # 作图需要，实际上对计算没有作用\n",
    "            if fn is not None:\n",
    "                re.append(fn(self, 'backward'))\n",
    "        return re\n",
    "\n",
    "\n",
    "def _get_node_attr(node, direction='forward'):\n",
    "    '''\n",
    "    节点的属性\n",
    "    '''\n",
    "    node_type = _get_node_type(node)\n",
    "    # 设置字体\n",
    "    res = {'fontname': 'Menlo'}\n",
    "    def _forward_attr():\n",
    "        if node_type == 'param':\n",
    "            node_text = f'{{ grad=None | value={node.value:.2f} | {node.label}}}'\n",
    "            res.update(\n",
    "                dict(label=node_text, shape='record', fontsize='10', fillcolor='lightgreen', style='filled, bold'))\n",
    "            return res\n",
    "        elif node_type == 'computation':\n",
    "            node_text = f'{{ grad=None | value={node.value:.2f} | {node.op}}}'\n",
    "            res.update(\n",
    "                dict(label=node_text, shape='record', fontsize='10', fillcolor='gray94', style='filled, rounded'))\n",
    "            return res\n",
    "        elif node_type == 'input':\n",
    "            if node.label == '':\n",
    "                node_text = f'input={node.value:.2f}'\n",
    "            else:\n",
    "                node_text = f'{node.label}={node.value:.2f}'\n",
    "            res.update(dict(label=node_text, shape='oval', fontsize='10'))\n",
    "            return res\n",
    "    \n",
    "    def _backward_attr():\n",
    "        attr = _forward_attr()\n",
    "        attr['label'] = attr['label'].replace('grad=None', f'grad={node.grad:.2f}')\n",
    "        if not node.requires_grad:\n",
    "            attr['style'] = 'dashed'\n",
    "        # 为了作图美观\n",
    "        # 如果向后扩散（反向传播）的梯度等于0，或者扩散给不需要梯度的节点，那么该节点用虚线表示\n",
    "        grad_back = [v if k.requires_grad else 0 for (k, v) in node.back_prop.items()]\n",
    "        if len(grad_back) > 0 and sum(grad_back) == 0:\n",
    "            attr['style'] = 'dashed'\n",
    "        return attr \n",
    "    \n",
    "    if direction == 'forward':\n",
    "        return _forward_attr()\n",
    "    else:\n",
    "        return _backward_attr()\n",
    "    \n",
    "    \n",
    "def _get_node_type(node):\n",
    "    '''\n",
    "    决定节点的类型，计算节点、参数以及输入数据\n",
    "    '''\n",
    "    if node.op is not None:\n",
    "        return 'computation'\n",
    "    if node.requires_grad:\n",
    "        return 'param'\n",
    "    return 'input'\n",
    "\n",
    "\n",
    "def _trace(root):\n",
    "    '''\n",
    "    遍历图中的所有点和边\n",
    "    '''\n",
    "    nodes, edges = set(), set()\n",
    "    def _build(v):\n",
    "        if v not in nodes:\n",
    "            nodes.add(v)\n",
    "            for prev in v.prevs:\n",
    "                edges.add((prev, v))\n",
    "                _build(prev)\n",
    "    _build(root)\n",
    "    return nodes, edges\n",
    "\n",
    "\n",
    "def _draw_node(graph, node, direction='forward'):\n",
    "    '''\n",
    "    画节点\n",
    "    '''\n",
    "    node_attr = _get_node_attr(node, direction)\n",
    "    uid = str(id(node)) + direction\n",
    "    graph.node(name=uid, **node_attr)\n",
    "\n",
    "\n",
    "def _draw_edge(graph, n1, n2, direction='forward'):\n",
    "    '''\n",
    "    画边\n",
    "    '''\n",
    "    uid1 = str(id(n1)) + direction\n",
    "    uid2 = str(id(n2)) + direction\n",
    "    def _draw_back_edge():\n",
    "        if n1.requires_grad and n2.requires_grad:\n",
    "            grad = n2.back_prop.get(n1, None)\n",
    "            if grad is None:\n",
    "                graph.edge(uid2, uid1, arrowhead='none', color='deepskyblue')   \n",
    "            elif grad == 0:\n",
    "                graph.edge(uid2, uid1, style='dashed', label=f'{grad:.2f}', color='deepskyblue', fontname='Menlo')\n",
    "            else:\n",
    "                graph.edge(uid2, uid1, label=f'{grad:.2f}', color='deepskyblue', fontname='Menlo')\n",
    "        else:\n",
    "            graph.edge(uid2, uid1, style='dashed', arrowhead='none', color='deepskyblue')\n",
    "\n",
    "    if direction == 'forward':\n",
    "        graph.edge(uid1, uid2)\n",
    "    elif direction == 'backward':\n",
    "        _draw_back_edge()\n",
    "    else:\n",
    "        _draw_back_edge()\n",
    "        graph.edge(uid1, uid2)\n",
    "\n",
    "\n",
    "def draw_graph(root, direction='forward'):\n",
    "    '''\n",
    "    图形化展示由root为顶点的计算图\n",
    "    参数\n",
    "    ----\n",
    "    root ：Scalar，计算图的顶点\n",
    "    direction ：str，向前传播（forward）或者反向传播（backward）\n",
    "    返回\n",
    "    ----\n",
    "    re ：Digraph，计算图\n",
    "    '''\n",
    "    nodes, edges = _trace(root)\n",
    "    rankdir = 'BT' if direction == 'forward' else 'TB'\n",
    "    graph = Digraph(format='svg', graph_attr={'rankdir': rankdir})\n",
    "    for item in nodes:\n",
    "        _draw_node(graph, item, direction)\n",
    "    for n1, n2 in edges:\n",
    "        _draw_edge(graph, n1, n2, direction)\n",
    "    return graph"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 用于定义线性模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "def mse(errors):\n",
    "    '''\n",
    "    计算均方误差\n",
    "    '''\n",
    "    n = len(errors)\n",
    "    wrt = {}\n",
    "    value = 0.0\n",
    "    requires_grad = False\n",
    "    for item in errors:\n",
    "        value += item.value ** 2 / n\n",
    "        wrt[item] = 2 / n * item.value\n",
    "        requires_grad = requires_grad or item.requires_grad\n",
    "    output = Scalar(value, errors, 'mse')\n",
    "    output.requires_grad=requires_grad\n",
    "    output.grad_wrt = wrt\n",
    "    return output\n",
    "\n",
    "\n",
    "class Linear:\n",
    "    \n",
    "    def __init__(self):\n",
    "        '''\n",
    "        定义线性回归模型的参数：a, b\n",
    "        '''\n",
    "        self.a = Scalar(0.0, label='a')\n",
    "        self.b = Scalar(0.0, label='b')\n",
    "\n",
    "    def forward(self, x):\n",
    "        '''\n",
    "        根据当前的参数估计值，得到模型的预测结果\n",
    "        '''\n",
    "        return self.a * x + self.b\n",
    "    \n",
    "    def error(self, x, y):\n",
    "        '''\n",
    "        当前数据的模型误差\n",
    "        '''\n",
    "        return y - self.forward(x)\n",
    "\n",
    "    def string(self):\n",
    "        '''\n",
    "        输出当前模型的结果\n",
    "        '''\n",
    "        return f'y = {self.a.value:.2f} * x + {self.b.value:.2f}'"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "import matplotlib.pyplot as plt"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "x = torch.linspace(100,300,200)\n",
    "x = (x - x.mean()) / x.std()\n",
    "epsilon = torch.randn(x.shape)\n",
    "y = 10 * x + 5 + epsilon"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "plt.plot(x,y)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "model = Linear()\n",
    "\n",
    "batch_size = 32\n",
    "lr = 0.1\n",
    "\n",
    "for t in range(20):\n",
    "    ix = (t * batch_size) % len(x)\n",
    "    xx = x[ix:ix+batch_size]\n",
    "    yy = y[ix:ix+batch_size]\n",
    "    loss = mse([model.error(_x,_y) for _x,_y in zip(xx,yy)])\n",
    "    loss.backward()\n",
    "    model.a -= lr * model.a.grad\n",
    "    model.b -= lr * model.b.grad\n",
    "    model.a.grad = 0\n",
    "    model.b.grad = 0\n",
    "\n",
    "    print(model.string())"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": []
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 计算图膨胀"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "base",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.11.7"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
