{
 "metadata": {
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.3-final"
  },
  "orig_nbformat": 2,
  "kernelspec": {
   "name": "python383jvsc74a57bd08c207cf98bc84bffc44ccf9494458b90a53a5c17a03e11155797855f32f2c5f8",
   "display_name": "Python 3.8.3 64-bit"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2,
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "tensor([[-0.2160, -0.1143, -0.0337,  0.1489, -0.4111,  0.1263,  0.1162,  0.0039,\n",
       "          0.1488, -0.1439],\n",
       "        [-0.0418, -0.1601, -0.0392,  0.1658, -0.3759,  0.0549,  0.1396,  0.0470,\n",
       "          0.2667, -0.1451]], grad_fn=<AddmmBackward>)"
      ]
     },
     "metadata": {},
     "execution_count": 1
    }
   ],
   "source": [
    "import torch \n",
    "from torch import nn \n",
    "from torch.nn import functional as F \n",
    "\n",
    "net = nn.Sequential(nn.Linear(20, 256), nn.ReLU(), nn.Linear(256, 10))\n",
    "\n",
    "X = torch.rand(2, 20)\n",
    "net(X)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "class MLP(nn.Module):\n",
    "    def __init__(self):\n",
    "        super().__init__()\n",
    "        self.hidden = nn.Linear(20, 256)\n",
    "        self.out = nn.Linear(256, 10)\n",
    "    \n",
    "    def forward(self, X):\n",
    "        return self.out(F.relu(self.hidden(X)))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "tensor([[ 0.2052, -0.0978, -0.1020, -0.1729,  0.1571,  0.0185, -0.2298, -0.2130,\n",
       "          0.0804,  0.0998],\n",
       "        [ 0.1945, -0.0903, -0.1311, -0.1092,  0.2217,  0.1485, -0.1845, -0.2118,\n",
       "          0.0992, -0.0764]], grad_fn=<AddmmBackward>)"
      ]
     },
     "metadata": {},
     "execution_count": 4
    }
   ],
   "source": [
    "net = MLP()\n",
    "net(X)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "class MySeqential(nn.Module):\n",
    "    def __init__(self, *args):\n",
    "        super().__init__()\n",
    "        for block in args:\n",
    "            self._modules[block] = block\n",
    "        \n",
    "    def forward(self, X):\n",
    "        for block in self._modules.values():\n",
    "            X = block(X)\n",
    "        return X "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "tensor([[ 0.0077,  0.0162,  0.0025,  0.1602, -0.1094, -0.0490, -0.0182, -0.0246,\n",
       "         -0.0120,  0.1802],\n",
       "        [ 0.0675,  0.0551, -0.0742,  0.2513, -0.0506,  0.0222, -0.1162, -0.0975,\n",
       "          0.0189,  0.1680]], grad_fn=<AddmmBackward>)"
      ]
     },
     "metadata": {},
     "execution_count": 6
    }
   ],
   "source": [
    "net = MySeqential(nn.Linear(20, 256), nn.ReLU(), nn.Linear(256, 10))\n",
    "net(X)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "class FixedHiddenMLP(nn.Module):\n",
    "    def __init__(self):\n",
    "        super().__init__()\n",
    "        self.rand_weight = torch.rand((20, 20), requires_grad=False)\n",
    "        self.linear = nn.Linear(20, 20)\n",
    "    \n",
    "    def forward(self, X):\n",
    "        X = self.linear(X)\n",
    "        X = F.relu(torch.mm(X, self.rand_weight) + 1)\n",
    "        X = self.linear(X)\n",
    "        while X.abs().sum() > 1:\n",
    "            X /= 2 \n",
    "        return X.sum()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "tensor(-0.1400, grad_fn=<SumBackward0>)"
      ]
     },
     "metadata": {},
     "execution_count": 8
    }
   ],
   "source": [
    "net = FixedHiddenMLP()\n",
    "net(X)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "output_type": "execute_result",
     "data": {
      "text/plain": [
       "tensor(-0.1024, grad_fn=<SumBackward0>)"
      ]
     },
     "metadata": {},
     "execution_count": 12
    }
   ],
   "source": [
    "class NestMLP(nn.Module):\n",
    "    def __init__(self):\n",
    "        super().__init__()\n",
    "        self.net = nn.Sequential(nn.Linear(20, 64), nn.ReLU(),\n",
    "                                 nn.Linear(64, 32), nn.ReLU())\n",
    "        self.linear = nn.Linear(32, 16)\n",
    "    \n",
    "    def forward(self, X):\n",
    "        return self.linear(self.net(X))\n",
    "\n",
    "chimera = nn.Sequential(NestMLP(), nn.Linear(16, 20), FixedHiddenMLP())\n",
    "chimera(X)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ]
}