{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "#|default_exp models.TCN"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# TCN"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "This is an unofficial PyTorch implementation by Ignacio Oguiza (oguiza@timeseriesAI.co) based on:\n",
    "\n",
    "* Bai, S., Kolter, J. Z., & Koltun, V. (2018). <span style=\"color:dodgerblue\">**An empirical evaluation of generic convolutional and recurrent networks for sequence modeling**</span>. arXiv preprint arXiv:1803.01271.\n",
    "* Official TCN PyTorch implementation: https://github.com/locuslab/TCN"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "#|export\n",
    "from torch.nn.utils import weight_norm\n",
    "from tsai.imports import *\n",
    "from tsai.utils import *\n",
    "from tsai.models.layers import *\n",
    "from tsai.models.utils import *"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "#|export\n",
    "# This is an unofficial PyTorch implementation by Ignacio Oguiza - oguiza@timeseriesAI.co based on:\n",
    "\n",
    "# Bai, S., Kolter, J. Z., & Koltun, V. (2018). An empirical evaluation of generic convolutional and recurrent networks for sequence modeling. arXiv preprint arXiv:1803.01271.\n",
    "# Official TCN PyTorch implementation: https://github.com/locuslab/TCN\n",
    "\n",
    "\n",
    "class TemporalBlock(Module):\n",
    "    def __init__(self, ni, nf, ks, stride, dilation, padding, dropout=0.):\n",
    "        self.conv1 = weight_norm(nn.Conv1d(ni,nf,ks,stride=stride,padding=padding,dilation=dilation))\n",
    "        self.chomp1 = Chomp1d(padding)\n",
    "        self.relu1 = nn.ReLU()\n",
    "        self.dropout1 = nn.Dropout(dropout)\n",
    "        self.conv2 = weight_norm(nn.Conv1d(nf,nf,ks,stride=stride,padding=padding,dilation=dilation))\n",
    "        self.chomp2 = Chomp1d(padding)\n",
    "        self.relu2 = nn.ReLU()\n",
    "        self.dropout2 = nn.Dropout(dropout)\n",
    "        self.net = nn.Sequential(self.conv1, self.chomp1, self.relu1, self.dropout1, \n",
    "                                 self.conv2, self.chomp2, self.relu2, self.dropout2)\n",
    "        self.downsample = nn.Conv1d(ni,nf,1) if ni != nf else None\n",
    "        self.relu = nn.ReLU()\n",
    "        self.init_weights()\n",
    "\n",
    "    def init_weights(self):\n",
    "        self.conv1.weight.data.normal_(0, 0.01)\n",
    "        self.conv2.weight.data.normal_(0, 0.01)\n",
    "        if self.downsample is not None: self.downsample.weight.data.normal_(0, 0.01)\n",
    "\n",
    "    def forward(self, x):\n",
    "        out = self.net(x)\n",
    "        res = x if self.downsample is None else self.downsample(x)\n",
    "        return self.relu(out + res)\n",
    "\n",
    "def TemporalConvNet(c_in, layers, ks=2, dropout=0.):\n",
    "    temp_layers = []\n",
    "    for i in range(len(layers)):\n",
    "        dilation_size = 2 ** i\n",
    "        ni = c_in if i == 0 else layers[i-1]\n",
    "        nf = layers[i]\n",
    "        temp_layers += [TemporalBlock(ni, nf, ks, stride=1, dilation=dilation_size, padding=(ks-1) * dilation_size, dropout=dropout)]\n",
    "    return nn.Sequential(*temp_layers)\n",
    "\n",
    "class TCN(Module):\n",
    "    def __init__(self, c_in, c_out, layers=8*[25], ks=7, conv_dropout=0., fc_dropout=0.):\n",
    "        self.tcn = TemporalConvNet(c_in, layers, ks=ks, dropout=conv_dropout)\n",
    "        self.gap = GAP1d()\n",
    "        self.dropout = nn.Dropout(fc_dropout) if fc_dropout else None\n",
    "        self.linear = nn.Linear(layers[-1],c_out)\n",
    "        self.init_weights()\n",
    "\n",
    "    def init_weights(self):\n",
    "        self.linear.weight.data.normal_(0, 0.01)\n",
    "\n",
    "    def forward(self, x):\n",
    "        x = self.tcn(x)\n",
    "        x = self.gap(x)\n",
    "        if self.dropout is not None: x = self.dropout(x)\n",
    "        return self.linear(x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "TCN(\n",
       "  (tcn): Sequential(\n",
       "    (0): TemporalBlock(\n",
       "      (conv1): Conv1d(3, 25, kernel_size=(7,), stride=(1,), padding=(6,))\n",
       "      (chomp1): Chomp1d()\n",
       "      (relu1): ReLU()\n",
       "      (dropout1): Dropout(p=0.0, inplace=False)\n",
       "      (conv2): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(6,))\n",
       "      (chomp2): Chomp1d()\n",
       "      (relu2): ReLU()\n",
       "      (dropout2): Dropout(p=0.0, inplace=False)\n",
       "      (net): Sequential(\n",
       "        (0): Conv1d(3, 25, kernel_size=(7,), stride=(1,), padding=(6,))\n",
       "        (1): Chomp1d()\n",
       "        (2): ReLU()\n",
       "        (3): Dropout(p=0.0, inplace=False)\n",
       "        (4): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(6,))\n",
       "        (5): Chomp1d()\n",
       "        (6): ReLU()\n",
       "        (7): Dropout(p=0.0, inplace=False)\n",
       "      )\n",
       "      (downsample): Conv1d(3, 25, kernel_size=(1,), stride=(1,))\n",
       "      (relu): ReLU()\n",
       "    )\n",
       "    (1): TemporalBlock(\n",
       "      (conv1): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(12,), dilation=(2,))\n",
       "      (chomp1): Chomp1d()\n",
       "      (relu1): ReLU()\n",
       "      (dropout1): Dropout(p=0.0, inplace=False)\n",
       "      (conv2): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(12,), dilation=(2,))\n",
       "      (chomp2): Chomp1d()\n",
       "      (relu2): ReLU()\n",
       "      (dropout2): Dropout(p=0.0, inplace=False)\n",
       "      (net): Sequential(\n",
       "        (0): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(12,), dilation=(2,))\n",
       "        (1): Chomp1d()\n",
       "        (2): ReLU()\n",
       "        (3): Dropout(p=0.0, inplace=False)\n",
       "        (4): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(12,), dilation=(2,))\n",
       "        (5): Chomp1d()\n",
       "        (6): ReLU()\n",
       "        (7): Dropout(p=0.0, inplace=False)\n",
       "      )\n",
       "      (relu): ReLU()\n",
       "    )\n",
       "    (2): TemporalBlock(\n",
       "      (conv1): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(24,), dilation=(4,))\n",
       "      (chomp1): Chomp1d()\n",
       "      (relu1): ReLU()\n",
       "      (dropout1): Dropout(p=0.0, inplace=False)\n",
       "      (conv2): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(24,), dilation=(4,))\n",
       "      (chomp2): Chomp1d()\n",
       "      (relu2): ReLU()\n",
       "      (dropout2): Dropout(p=0.0, inplace=False)\n",
       "      (net): Sequential(\n",
       "        (0): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(24,), dilation=(4,))\n",
       "        (1): Chomp1d()\n",
       "        (2): ReLU()\n",
       "        (3): Dropout(p=0.0, inplace=False)\n",
       "        (4): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(24,), dilation=(4,))\n",
       "        (5): Chomp1d()\n",
       "        (6): ReLU()\n",
       "        (7): Dropout(p=0.0, inplace=False)\n",
       "      )\n",
       "      (relu): ReLU()\n",
       "    )\n",
       "    (3): TemporalBlock(\n",
       "      (conv1): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(48,), dilation=(8,))\n",
       "      (chomp1): Chomp1d()\n",
       "      (relu1): ReLU()\n",
       "      (dropout1): Dropout(p=0.0, inplace=False)\n",
       "      (conv2): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(48,), dilation=(8,))\n",
       "      (chomp2): Chomp1d()\n",
       "      (relu2): ReLU()\n",
       "      (dropout2): Dropout(p=0.0, inplace=False)\n",
       "      (net): Sequential(\n",
       "        (0): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(48,), dilation=(8,))\n",
       "        (1): Chomp1d()\n",
       "        (2): ReLU()\n",
       "        (3): Dropout(p=0.0, inplace=False)\n",
       "        (4): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(48,), dilation=(8,))\n",
       "        (5): Chomp1d()\n",
       "        (6): ReLU()\n",
       "        (7): Dropout(p=0.0, inplace=False)\n",
       "      )\n",
       "      (relu): ReLU()\n",
       "    )\n",
       "    (4): TemporalBlock(\n",
       "      (conv1): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(96,), dilation=(16,))\n",
       "      (chomp1): Chomp1d()\n",
       "      (relu1): ReLU()\n",
       "      (dropout1): Dropout(p=0.0, inplace=False)\n",
       "      (conv2): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(96,), dilation=(16,))\n",
       "      (chomp2): Chomp1d()\n",
       "      (relu2): ReLU()\n",
       "      (dropout2): Dropout(p=0.0, inplace=False)\n",
       "      (net): Sequential(\n",
       "        (0): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(96,), dilation=(16,))\n",
       "        (1): Chomp1d()\n",
       "        (2): ReLU()\n",
       "        (3): Dropout(p=0.0, inplace=False)\n",
       "        (4): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(96,), dilation=(16,))\n",
       "        (5): Chomp1d()\n",
       "        (6): ReLU()\n",
       "        (7): Dropout(p=0.0, inplace=False)\n",
       "      )\n",
       "      (relu): ReLU()\n",
       "    )\n",
       "    (5): TemporalBlock(\n",
       "      (conv1): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(192,), dilation=(32,))\n",
       "      (chomp1): Chomp1d()\n",
       "      (relu1): ReLU()\n",
       "      (dropout1): Dropout(p=0.0, inplace=False)\n",
       "      (conv2): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(192,), dilation=(32,))\n",
       "      (chomp2): Chomp1d()\n",
       "      (relu2): ReLU()\n",
       "      (dropout2): Dropout(p=0.0, inplace=False)\n",
       "      (net): Sequential(\n",
       "        (0): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(192,), dilation=(32,))\n",
       "        (1): Chomp1d()\n",
       "        (2): ReLU()\n",
       "        (3): Dropout(p=0.0, inplace=False)\n",
       "        (4): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(192,), dilation=(32,))\n",
       "        (5): Chomp1d()\n",
       "        (6): ReLU()\n",
       "        (7): Dropout(p=0.0, inplace=False)\n",
       "      )\n",
       "      (relu): ReLU()\n",
       "    )\n",
       "    (6): TemporalBlock(\n",
       "      (conv1): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(384,), dilation=(64,))\n",
       "      (chomp1): Chomp1d()\n",
       "      (relu1): ReLU()\n",
       "      (dropout1): Dropout(p=0.0, inplace=False)\n",
       "      (conv2): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(384,), dilation=(64,))\n",
       "      (chomp2): Chomp1d()\n",
       "      (relu2): ReLU()\n",
       "      (dropout2): Dropout(p=0.0, inplace=False)\n",
       "      (net): Sequential(\n",
       "        (0): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(384,), dilation=(64,))\n",
       "        (1): Chomp1d()\n",
       "        (2): ReLU()\n",
       "        (3): Dropout(p=0.0, inplace=False)\n",
       "        (4): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(384,), dilation=(64,))\n",
       "        (5): Chomp1d()\n",
       "        (6): ReLU()\n",
       "        (7): Dropout(p=0.0, inplace=False)\n",
       "      )\n",
       "      (relu): ReLU()\n",
       "    )\n",
       "    (7): TemporalBlock(\n",
       "      (conv1): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(768,), dilation=(128,))\n",
       "      (chomp1): Chomp1d()\n",
       "      (relu1): ReLU()\n",
       "      (dropout1): Dropout(p=0.0, inplace=False)\n",
       "      (conv2): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(768,), dilation=(128,))\n",
       "      (chomp2): Chomp1d()\n",
       "      (relu2): ReLU()\n",
       "      (dropout2): Dropout(p=0.0, inplace=False)\n",
       "      (net): Sequential(\n",
       "        (0): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(768,), dilation=(128,))\n",
       "        (1): Chomp1d()\n",
       "        (2): ReLU()\n",
       "        (3): Dropout(p=0.0, inplace=False)\n",
       "        (4): Conv1d(25, 25, kernel_size=(7,), stride=(1,), padding=(768,), dilation=(128,))\n",
       "        (5): Chomp1d()\n",
       "        (6): ReLU()\n",
       "        (7): Dropout(p=0.0, inplace=False)\n",
       "      )\n",
       "      (relu): ReLU()\n",
       "    )\n",
       "  )\n",
       "  (gap): GAP1d(\n",
       "    (gap): AdaptiveAvgPool1d(output_size=1)\n",
       "    (flatten): Flatten(full=False)\n",
       "  )\n",
       "  (linear): Linear(in_features=25, out_features=2, bias=True)\n",
       ")"
      ]
     },
     "execution_count": null,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "bs = 16\n",
    "nvars = 3\n",
    "seq_len = 128\n",
    "c_out = 2\n",
    "xb = torch.rand(bs, nvars, seq_len)\n",
    "model = TCN(nvars, c_out, fc_dropout=.5)\n",
    "test_eq(model(xb).shape, (bs, c_out))\n",
    "model = TCN(nvars, c_out, conv_dropout=.2)\n",
    "test_eq(model(xb).shape, (bs, c_out))\n",
    "model = TCN(nvars, c_out)\n",
    "test_eq(model(xb).shape, (bs, c_out))\n",
    "model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "application/javascript": "IPython.notebook.save_checkpoint();",
      "text/plain": [
       "<IPython.core.display.Javascript object>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "/Users/nacho/notebooks/tsai/nbs/113_models.TCN.ipynb saved at 2022-11-09 13:14:18\n",
      "Correct notebook to script conversion! 😃\n",
      "Wednesday 09/11/22 13:14:21 CET\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "\n",
       "                <audio  controls=\"controls\" autoplay=\"autoplay\">\n",
       "                    <source src=\"data:audio/wav;base64,UklGRvQHAABXQVZFZm10IBAAAAABAAEAECcAACBOAAACABAAZGF0YdAHAAAAAPF/iPh/gOoOon6w6ayCoR2ZeyfbjobxK+F2Hs0XjKc5i3DGvzaTlEaraE+zz5uLUl9f46fHpWJdxVSrnfmw8mYEScqUP70cb0Q8X41uysJ1si6Eh1jYzXp9IE2DzOYsftYRyoCY9dJ/8QICgIcEun8D9PmAaBPlfT7lq4MFIlh61tYPiCswIHX+yBaOqT1QbuW7qpVQSv9lu6+xnvRVSlyopAypbGBTUdSalrSTaUBFYpInwUpxOzhti5TOdndyKhCGrdwAfBUcXIJB69p+Vw1egB76+n9q/h6ADglbf4LvnIHfF/981ODThF4m8HiS0riJVjQ6c+/EOZCYQfJrGrhBmPVNMmNArLKhQlkXWYqhbaxXY8ZNHphLuBJsZUEckCTFVHMgNKGJytIDeSUmw4QN4Qx9pReTgb3vYX/TCBuApf75f+P5Y4CRDdN+B+tngk8c8nt03CKGqipgd13OhotwOC5x9MCAknFFcmlmtPmagFFFYOCo0qRzXMhVi57pryNmIEqJlRi8bm52PfuNM8k4dfQv+4cO12l6zCGdg3jl730uE/KAPvS+f0wEAoAsA89/XfXQgBESIn6S5luDtiC8eh/YmIfpLqt1OMp5jXg8/24MveqUNUnPZsqw0Z3yVDldnaUOqIZfXlKrm36zzWhjRhaT+r+ncHI5/otUzfd2uSt7hl/bqXtoHaCC6+mqfrAOeoDD+PJ/xf8RgLMHfH/b8GeBihZIfSXidoQSJWB52NM1iRkzz3MkxpKPbUCrbDu5d5fgTAxkSK3JoEhYD1p2omere2LZTuqYLbdWa49Cx5Dww7tyXDUnioXRkHhwJyKFvd/AfPoYy4Fl7j1/LQorgEr9/X89+0qAOAwAf13sJoL8Gkd8wt25hWIp3Heez/eKODfPcSPCzpFNRDVqf7UlmnNQKGHgqd+jgVvJVm2f265QZTpLS5byur1tpT6ajvrHq3Q2MXWIxtUCehoj8YMk5LB9hRQegeTypn+nBQWA0QHgf7f2q4C5EFt+5ucOg2YfHXtq2SSHpS0ydnTL4IxFO6pvNb4ulBdInWfcsfSc7VMmXpSmE6eeXmZThJxpsgRohEfOk86+AHCoOpOMFsx1dv8s6oYT2k17uR7ngpXod34IEJqAaPfnfyABCIBZBpl/NPI2gTQVjX134x2ExSPMeR7VtYjZMWJ0W8ftjkA/YW1durCWykvjZFKu4p9LVwVbZKNkqpxh6U+6mRC2mGq2Q3SRvsIgcpc2sIpD0Bp4uiiFhW3ecXxOGgaCDe0Vf4cLPoDv+/5/mfw1gN4KKX+17emBqBmYfBHfVYUZKFR44NBtiv41bHJUwx+RJkP1apu2VJlkTwli4qrwoo1ax1dToNCtemRSTBGXz7kJbdM/PY/Dxht0dTLziH7Ul3loJEiE0uJsfdsVTYGL8Yt/AgcMgHYA7X8S+IqAYA+QfjzpxIIVHnp7tdqzhmAstXaxzEqMETpScGC/dJP3Rmdo8LIZnOVSEF+Opxumsl1sVF+dVrE5Z6NIiZSkvVdv2zsqjdnK8HVDLlyHyNjuegogM4NA5z9+YRG9gA722H97AgOA/gSyf43zCIHdE899yuTIg3ciNXpm1jmImTDwdJPITI4RPhRugbvslbFKt2Vfr/6eTFb4W1WkY6m6YPdQjJr2tNZp3EQlko7BgXHRNz2LAc+gdwMq7IUf3R58ohtFgrbr6n7hDFWAlPr8f/T9I4CECU9/De+vgVQY5nxh4POEzybJeCTS5YnCNAZzhsRzkP1Bsmu4t4aYU07nYuerA6KWWcJYO6HHrKJjaE3Zl624UWz/QOOPjcWHc7QzdIk40yl5tCWjhIDhJX0xF4CBMvBsf10IF4Ac//Z/bPlsgAcOwn6S6n6CwxzUewLcRoYaKzV38M23i9o493CNwL6S1UUuaQe0QpvbUfdfiqglpcRccFU+nkWwambASUiVfLyqbg49xY2eyWh1hy/Sh37XjHpaIYKD7OUEfrgS5IC09MV/1gMBgKMDyH/n9N6AhhINfh7mdoMoIZt6r9fAh1cvfHXNya6N4DzDbqi8K5WWSYlmbbAdnkpV6FxJpWSo1V8DUmGb3rMRaQBG2JJgwN9wCDnNi8HNI3dKK1aG0dvHe/UciIJf6rt+Og5wgDn59X9P/xWAKQhxf2XweYH+FjB9suGVhIMlOnlo02GJhTOdc7vFyo/TQGxs2Li7lz9NwmPurBihnVi7WSWiwKvGYntOpJiOt5drKUKMkFnE8HLxNPmJ9NG4eP8mAYUv4Np8hhi3gdruSX+3CSWAwP38f8f6UoCuDPF+6Os8gnAbKnxQ3d2F0imydzDPKIuiN5lxu8EKkrFE82kftW2az1DbYImpMqTUW3FWIJ83r5hl2koJlla7+m0+PmSOZcjcdMgwS4g11iZ6qCLUg5jkxn0QFA6BWvOvfzEFBIBHAtp/Qfa3gC4RSH5y5yeD2B/8evnYS4cULgR2CMsUja47cG/QvW6UeEhXZ3+xP51GVNVdP6Zpp+1eDFM5nMeySWghR4+TNL85cD46YIyCzKJ2kCzEhoTabXtGHs+CCemJfpMPjoDe9+t/qQALgM8Gj3++8UaBqRV2fQTjO4Q3JKd5r9TgiEYyMHTxxiWPpz8jbfq585YpTJpk960xoKFXsVoTo7yq6GGMTw==\" type=\"audio/wav\" />\n",
       "                    Your browser does not support the audio element.\n",
       "                </audio>\n",
       "              "
      ],
      "text/plain": [
       "<IPython.lib.display.Audio object>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "#|eval: false\n",
    "#|hide\n",
    "from tsai.export import get_nb_name; nb_name = get_nb_name(locals())\n",
    "from tsai.imports import create_scripts; create_scripts(nb_name)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "python3",
   "language": "python",
   "name": "python3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
