{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "name": "rnn_pytorch.ipynb",
      "version": "0.3.2",
      "provenance": [],
      "collapsed_sections": []
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    },
    "accelerator": "GPU"
  },
  "cells": [
    {
      "metadata": {
        "collapsed": true,
        "id": "jqZrGQOG2ng-",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "# Recurrent Neural Network"
      ]
    },
    {
      "metadata": {
        "id": "EB7M0gMP2ng_",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "## 1. Libraries\n",
        "*Installing and importing necessary packages*\n",
        "\n",
        "*Working with **Python 3.6** and **PyTorch 1.0.1** *"
      ]
    },
    {
      "metadata": {
        "id": "-HDxCZeVAPb-",
        "colab_type": "code",
        "outputId": "08d09105-fa52-4196-d4d7-cca9cd92c630",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 289
        }
      },
      "cell_type": "code",
      "source": [
        "!nvidia-smi"
      ],
      "execution_count": 11,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Thu Apr  4 12:34:13 2019       \n",
            "+-----------------------------------------------------------------------------+\n",
            "| NVIDIA-SMI 418.56       Driver Version: 410.79       CUDA Version: 10.0     |\n",
            "|-------------------------------+----------------------+----------------------+\n",
            "| GPU  Name        Persistence-M| Bus-Id        Disp.A | Volatile Uncorr. ECC |\n",
            "| Fan  Temp  Perf  Pwr:Usage/Cap|         Memory-Usage | GPU-Util  Compute M. |\n",
            "|===============================+======================+======================|\n",
            "|   0  Tesla K80           Off  | 00000000:00:04.0 Off |                    0 |\n",
            "| N/A   73C    P0    73W / 149W |   2430MiB / 11441MiB |      0%      Default |\n",
            "+-------------------------------+----------------------+----------------------+\n",
            "                                                                               \n",
            "+-----------------------------------------------------------------------------+\n",
            "| Processes:                                                       GPU Memory |\n",
            "|  GPU       PID   Type   Process name                             Usage      |\n",
            "|=============================================================================|\n",
            "+-----------------------------------------------------------------------------+\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "Xdemin4U2nhA",
        "colab_type": "code",
        "outputId": "066af4ef-98e5-463b-e84f-e062e16a45e0",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 292
        }
      },
      "cell_type": "code",
      "source": [
        "import sys\n",
        "import os\n",
        "# !{sys.executable} -m pip install http://download.pytorch.org/whl/cu80/torch-0.4.0-cp36-cp36m-linux_x86_64.whl\n",
        "# !{sys.executable} -m pip install torch torchvision matplotlib\n",
        "!{sys.executable} -m pip install https://download.pytorch.org/whl/cu100/torch-1.0.1.post2-cp36-cp36m-linux_x86_64.whl\n",
        "!{sys.executable} -m pip install torchvision matplotlib\n",
        "\n",
        "import torch\n",
        "import torch.nn as nn\n",
        "import torchvision.datasets as datasets\n",
        "import torchvision.transforms as transforms\n",
        "import torch.nn.functional as F\n",
        "from torch.autograd import Variable\n",
        "\n",
        "%matplotlib inline\n",
        "import matplotlib\n",
        "import matplotlib.pyplot as plt\n",
        "import numpy as np\n",
        "\n",
        "from timeit import default_timer as timer\n",
        "\n",
        "print(\"PyTorch version: {}\".format(torch.__version__))\n",
        "cudnn_enabled = torch.backends.cudnn.enabled\n",
        "print(\"CuDNN enabled\" if cudnn_enabled else \"CuDNN disabled\")"
      ],
      "execution_count": 12,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Requirement already satisfied: torch==1.0.1.post2 from https://download.pytorch.org/whl/cu100/torch-1.0.1.post2-cp36-cp36m-linux_x86_64.whl in /usr/local/lib/python3.6/dist-packages (1.0.1.post2)\n",
            "Requirement already satisfied: torchvision in /usr/local/lib/python3.6/dist-packages (0.2.2.post3)\n",
            "Requirement already satisfied: matplotlib in /usr/local/lib/python3.6/dist-packages (3.0.3)\n",
            "Requirement already satisfied: pillow>=4.1.1 in /usr/local/lib/python3.6/dist-packages (from torchvision) (4.1.1)\n",
            "Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from torchvision) (1.11.0)\n",
            "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from torchvision) (1.14.6)\n",
            "Requirement already satisfied: torch in /usr/local/lib/python3.6/dist-packages (from torchvision) (1.0.1.post2)\n",
            "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib) (2.5.3)\n",
            "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib) (2.3.1)\n",
            "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib) (1.0.1)\n",
            "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.6/dist-packages (from matplotlib) (0.10.0)\n",
            "Requirement already satisfied: olefile in /usr/local/lib/python3.6/dist-packages (from pillow>=4.1.1->torchvision) (0.46)\n",
            "Requirement already satisfied: setuptools in /usr/local/lib/python3.6/dist-packages (from kiwisolver>=1.0.1->matplotlib) (40.8.0)\n",
            "PyTorch version: 1.0.1.post2\n",
            "CuDNN enabled\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "ateauqQT2nhF",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "## 2. Variables\n",
        "*Indicate the root directory where the data must be downloaded, the directory where the results should be saved and the type of RNN (conventional, LSTM, GRU) and its respective hyper-parameters*"
      ]
    },
    {
      "metadata": {
        "id": "Jt3gMpMY2nhG",
        "colab_type": "code",
        "outputId": "7c0dcb74-324f-4c13-9ee7-3f843aa0fc86",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 34
        }
      },
      "cell_type": "code",
      "source": [
        "# Make reproducible run\n",
        "torch.manual_seed(1)\n",
        "\n",
        "# Settable parameters\n",
        "params = {'root': './data/',\n",
        "          'results_dir': './results/',\n",
        "          'hidden_size': 128,\n",
        "          'input_size': 28,  # MNIST data input (img shape: 28*28)\n",
        "          'sequence_length': 28,\n",
        "          'lr': 1e-3,\n",
        "          'weight_decay': 1e-10, # 5e-4,  # 1e-10,\n",
        "          'momentum': 0.9,\n",
        "          'num_classes': 10,  # class 0-9\n",
        "          'batch_size': 128,\n",
        "          'model_type': 'GRU',  # Options = [RNN, LSTM, GRU]\n",
        "          'optim_type': 'Adam',  # Options = [Adam, SGD, RMSprop]\n",
        "          'criterion_type': 'CrossEntropyLoss', # Options = [L1Loss, SmoothL1Loss, NLLLoss, CrossEntropyLoss]\n",
        "          'num_layers': 1,\n",
        "          'epochs': 30,\n",
        "          'save_step': 200,\n",
        "          'use_cuda': True,\n",
        "         }\n",
        "\n",
        "# GPU usage\n",
        "print(\"GPU: {}, number: {}\".format(torch.cuda.is_available(), torch.cuda.device_count()))\n",
        "device = torch.device('cuda') if params['use_cuda'] and torch.cuda.is_available() else torch.device('cpu')\n",
        "\n",
        "# Ensure results directory exists\n",
        "if not os.path.exists(params['results_dir']):\n",
        "    os.mkdir(params['results_dir'])"
      ],
      "execution_count": 138,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "GPU: True, number: 1\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "BamOukfB2nhL",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "## 3. Dataset\n",
        "\n",
        "*Normalizing between (0.1307, 0.3081): global mean and standard deviation of the MNIST dataset*"
      ]
    },
    {
      "metadata": {
        "id": "jpbW8mtb2nhN",
        "colab_type": "code",
        "outputId": "b0bc736b-2f2e-4544-83ab-f9ca88489ee7",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 34
        }
      },
      "cell_type": "code",
      "source": [
        "# Get train and test datasets\n",
        "# trans = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5,), (1.0,))])\n",
        "trans = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081,))])\n",
        "# trans = transforms.Compose([transforms.ToTensor()])\n",
        "mnist_train = datasets.MNIST(\n",
        "    root=params['root'],  # directory where the data is or where it will be saved\n",
        "    train=True,  # train dataset\n",
        "    download=True,  # download if you don't have it\n",
        "    transform=trans)  # converts PIL.image or np.ndarray to torch.FloatTensor of shape (C, H, W) and normalizes from (0.0, 1.0)\n",
        "mnist_test = datasets.MNIST(root=params['root'], train=False, download=True, transform=trans)  # transforms.ToTensor()\n",
        "print(\"MNIST Train {}, Test {}\".format(len(mnist_train), len(mnist_test)))\n",
        "\n",
        "# Dataloader: mini-batch during training\n",
        "mnist_train_dataloader = torch.utils.data.DataLoader(dataset=mnist_train, batch_size=params['batch_size'], shuffle=True)\n",
        "mnist_test_dataloader = torch.utils.data.DataLoader(dataset=mnist_test, batch_size=params['batch_size'], shuffle=True)"
      ],
      "execution_count": 117,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "MNIST Train 60000, Test 10000\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "QhKhpBRaU4b4",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "*Dataset examples*"
      ]
    },
    {
      "metadata": {
        "id": "ikWHBC-2U6-B",
        "colab_type": "code",
        "outputId": "750904ab-edcf-4c33-b891-6c9c044833cd",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 140
        }
      },
      "cell_type": "code",
      "source": [
        "# Plot examples\n",
        "examples = enumerate(mnist_test_dataloader)\n",
        "batch_idx, (example_data, example_targets) = next(examples)\n",
        "\n",
        "fig, axes = plt.subplots(nrows=1, ncols=4)\n",
        "for i, ax in enumerate(axes.flat):\n",
        "  ax.imshow(example_data[i][0]) \n",
        "  ax.set_title('{}'.format(example_targets[i]))\n",
        "  ax.set_xticks([])\n",
        "  ax.set_yticks([])\n",
        "  plt.tight_layout()"
      ],
      "execution_count": 72,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAagAAAB7CAYAAAAhbxT1AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAADzhJREFUeJzt3X+wlVPfx/HPKf2gRHFOftSIcBXR\ndJD4A49GzfO4yaj7jsFDJPlZzTMaJMN4ECNTmUJ+FXfRmTQ43OE2UuE4JkVp6kqlntL4TT+oker5\nA8t3XZ192uecffa19j7v11/f1Vp778V19lnnWmtd31WyZ88eAQAQmmZpdwAAgJowQAEAgsQABQAI\nEgMUACBIDFAAgCAxQAEAgrRf2h3ItSiKBkn63+Q/S2oXx/HWFLqEGkRRdISk6ZKOk7RF0k1xHC9I\nt1f4E9+j8EVR1EXS55LWmH/+KI7j/06nR7lXdANUHMezJc3+sxxF0T8kDeZLFZzpkubGcXxeFEX/\nIekmSQxQgeB7VDC+jOO4W9qdaCxFN0BZURS11u9/Bf5n2n3BX6Io6izpFEn/JUlxHM+TNC/VTiEj\nvkdIS1EPUJKukfR+HMdr9tkS+dRT0heSxkVR9DdJX0kaGcfxknS7hQz4HoWrXRRFL0vqJmmdpFFx\nHK9It0u5U7SbJKIoaibpfyQ9nHZfsJeDJZ0kaUEcx5Gkf0qaE0VRsf/BVHD4HgVtq6SZkkZKOkHS\nvyW9Ukzfo6IdoCSdIWlbHMfL0+4I9rJZ0tdxHL/yR/kpSR0kHZ9el5AB36NAxXH8fRzHN8VxvC6O\n492SHpHUUUX0PSrmAepvkv6VdidQo/WSDvzjr3PFcbxH0m5Ju1LtFWrC9yhQURS1j6Lo6MQ/N5e0\nM43+NIZiHqB6Siqaudgis0zSJklDJSmKor9L+lH+dlmEge9RuE6T9E4URaV/lK+V9H+S1qbXpdwq\n5gGqk35ffEdg/rhjGiRpaBRFa/X7Gsff4zj+Ld2eoQZ8jwIVx/FbkqZIej+KopWSBksaGMdx0cxE\nlHAeFAAgRMV8BwUAKGAMUACAIDFAAQCCxAAFAAjSvp44ZgdFekrq0JbrlJ5srxPXKD1co/DVeI24\ngwIABIkBCgAQJAYoAECQGKAAAEFigAIABIkBCgAQJAYoAECQGKAAAEFigAIABIkBCgAQJAYoAECQ\nGKAAAEFigAIABGlf2cyBepk7d65XjqLIxWVlZS5u27Zt3voEoLBwBwUACBIDFAAgSEzxoU4qKyu9\n8rx581y8ZcsWF0+fPt1r16JFCxc3b97cxcuWLfPadenSJRfdBJAF+51dtGiRV1dVVZXxdU899ZSL\n169f7+I2bdp47VavXu3ijh071rl/3EEBAILEAAUACBJTfKiTlStXeuXZs2e7+Msvv8z4urPOOsvF\nRx99tIsff/xxr924ceMa2sUm6ZJLLnHxm2++6dXZ63LAAQfkrU9ouM2bN3vl+fPnu3jdunUuXrJk\nScZ2e/bs8epKSkpcvHXrVhd///33DeqrJP38889e+ccff3QxU3wAgKLBAAUACBIDFAAgSDlZgxo7\ndqxXPuecc1zct2/fXHwEUvTVV1+5eMKECV7dN9984+Lrr7/exWPGjPHadejQwcX77ffXj91vv/2W\ns342ZcuXL3dxct3Cfgdr2zqMMMycOdPFkydP9uoa8/q1bt3aK/fs2dPFn332mVc3aNAgF+/YscPF\nvXr18to19LER7qAAAEFigAIABKkkuQUxodZK9yZm26IkHX744S62TycfccQRdepcaOxU14wZM7y6\nG2+80cXJW+V6Ktl3Eyer65StZLaIoUOHuvi7777z6uy03qRJk1zcrFmT+dsn2+uU02uU9OCDD7r4\n9ttvz9iue/fuLk5Ow/bv37/B/Tj44INdbDOGpCyIa1Qbuz27U6dOLk5O12ZiH92QpC+++MLFZ555\npld36aWXuri8vNzFJ554oteuXbt2WX12jtR4jZrMbxEAQGFhgAIABIkBCgAQpJxsM2/fvr1X/vrr\nr11cUVHhYrtOI/kZrkOxfft2FydTxtj1Fru9WpI2bdrk4vHjxzdS7/LDZiiX9l53suw6RhNadwpO\naWlpxjq7Jrp27VoXX3755Tnvh12btWsdqN2qVatcXNu6k11fvOaaa1xs160kadeuXS5O/p61j3mE\njt8oAIAgMUABAIKUk23mn3zyiVc+5ZRTamw3bNgwr3zHHXe42G5PlaQDDzwwm4/Omr1tXrFihVc3\nZ84cF7/22msujuM46/fv1q2bi+1T/Q2Q2jbz/fff3yv/+uuvLrbTnJI0ceJEFwe0rTifgtjCbJ/0\nP/nkk706O1VttxLbn3VJOumkk1ycPHjOsr8zbNYYSercubOLly5duo9e500Q16g21dXVLj7jjDNc\nnJw2t79re/To0fgdyx+2mQMACgcDFAAgSDmZ4tu9e7dXvvLKK138wgsv/PVmic+yU0ktW7b06lq1\nauXiY445xsW//PKL187uZLGSh2/ZJKdbtmyp8TUN8cADD7h49OjRuXjLvE7x/fDDDy62mUAkP6Hr\nxo0bvbpk23yxPwfJ3aF2qip5QJ892C85rVxPQUwfTZkyxcX33HOPV2d31eZacveg/ewbbrih0T63\njoK4RlYySXLXrl1dvGHDBheXlZV57Ww2myLDFB8AoHAwQAEAgsQABQAIUk4eKU5uhXz++edd3Lt3\nbxfff//9XjubjcFmcEhKZm2wRowYkXU//5Rct7LbbWubr7fbqKdNm+bVDRw4sM79CMm4ceNcXNsh\ngjlat2mwp59+2sXJjB/PPfdcxtdNnTrVxTZjxkEHHZTD3qUrebpAY0p+9/P52YXMZi+X/HUna/bs\n2fnoTrC4gwIABIkBCgAQpEbPGnjzzTe7+Pzzz/fqZs6c6WK7DVzyE1zarcyLFy/22mXKXnDrrbd6\nZfvZkydP9uoybTtPJlW0GSjs1vdisH79ehcfddRRXl1ya3la7FTvggULXJyclrWPKNiD/CRp5MiR\nLn711VddfMUVV+Ssn2no27eviz/44ING/Syb2NQ+noDs2Z/fJLt132bmkKSdO3e6OMRk27nGHRQA\nIEgMUACAIOUkk0Rjs2fY2HOXpMy7ymxSTMnPaJHtdM4zzzzjlW2GjDzIayYJu/uqe/fuXt3q1atd\nnDyrJplYNpeS51D17NnTxbU9UW/P4xo+fLhXd+yxx7rYTh2/8847Xrs6JCsOLktBY/v0009d3KtX\nL6/OTp8nEwunKLhrVFlZ6ZUHDBiQ1etsItlHH33UxeXl5bnpWHrIJAEAKBwMUACAIDFAAQCCVBCH\n09st3dlu706urVVUVGT1Ops92Ga+LnY2G/udd97p1dn/l8kt+bleg7KHIybXoDKtO9l5eUkaMmSI\ni+3jCpK/LnLXXXe5eNasWV67oUOHZtnjpuett95KuwsF77TTTvPK9me4qqoq4+ts3amnnuri008/\n3Wt36KGHujh5qKTNvpN8lCY03EEBAILEAAUACFLY93cNkEwwm9zWaXXo0MHFCxcudLHNSFDsbrvt\nNhe//PLLXt3HH3/s4vvuu8+rswmA27Zt2+B+2ES1L730UsZ2o0aNcvHYsWO9Opv41U4ZStL8+fNr\nfL/GPNQvVDt27PDKNoHpIYcckvF1NrNIcip98ODBOepdcTvssMO88rvvvuvie++918UfffSR1+7D\nDz90sZ1ur66uzvhZr7/+uldet26dix955BEXh5iZgjsoAECQGKAAAEFigAIABKkgUh3Vxy233OKV\nbQqWdu3aeXX28LuLL764cTuWvbymOqpN165dXWznryXphBNOcPHcuXNd3L59e69dmzZt6vy5NsO6\n5D9i0K1bNxcvWbLEa9eyZcuM72nX0Ow2c7stV6rTmlRwaXSyNWzYMK9sTxewh3pedtllXruJEye6\n2KYQk/zDSmtbI7HrIjblVPI9bHqrBijYa5T07bffunjbtm0unjNnjtfuiSeecLFNVZZkH7tInvKQ\nZ6Q6AgAUDgYoAECQimqKz26bTU7j7dq1y8VPPvmkV3f11Vc3bsfqJ5gpPpvBoU+fPl7dhg0banzN\n8ccf75UvvPDCOn+unc6QpOnTp9fY7qKLLvLKNmN50tKlS11sMyLYKRGpTpkkCnb6KPn/d/To0S7O\n9P+6vuzWf8mfQkxmDLngggtcPHDgwFx8fMFeo/r66aefXHz33Xd7dZMmTXKxnXrfunVro/erFkzx\nAQAKBwMUACBIRZVJYsqUKS62U3pJzZs3z0d3ioZ96t0+yS75h6Y99NBDLl61apXX7uGHH26k3u2d\n+aI25513nouPPPJIF/fv3z+nfSoEpaWlXtlOfdvko9OmTfPa2QMLk+8xdepUF9uDL5MHQNrsLcg9\ne5Br586dM7br169fPrpTb9xBAQCCxAAFAAgSAxQAIEgFv83cbnO2mQZ2796d8TXJp64HDBiQ+441\nXDDbzLP19ttvu3jevHle3WOPPVbn9xs5cqRXfu+991y8aNEiF1977bVeO5vJ/uyzz/bqli1b5uLx\n48fXuU81aHJbmO11WblypVf3xhtv5Ls72QjiGq1Zs8bFyewn9lGJ+hwiuHPnTq/87LPPunj48OEZ\nX2czp48ZM6bOn5tDbDMHABQOBigAQJAKforPZo/o3bu3i5cvX57xNZ9//rlXtlODASm4Kb4mKojp\no3waMWKEi1esWOHV2ewcAUnlGm3atMkr20cZkr+frrrqKhfbxzVsFhfJT8C7YMECF8+aNctrZx8F\nSP6O79Kli4vt9WvduvVe/w15xBQfAKBwMEABAILEAAUACFLBpzpq0aKFi5MZzC07v2pT3ACom5KS\nv5YLmjXjb9xMNm/e7JVrWxe36aRmzJjh4mTKttoen8kkeXjoqFGjXJzyutM+8dMFAAgSAxQAIEgF\nP8W3ceNGF1dVVWVsZw9Ia9WqVaP2CWgqFi5c6JUXL17s4vLy8nx3JygdO3b0yp06dXKx/b2VlMwK\nkQ2bvVySzj33XBdfd911Xp3N6B867qAAAEFigAIABKngp/iyNWTIkLS7ABSd7du3e+Xq6moXN/Up\nvuShjBUVFS5+8cUXvbrKykoX9+jRw8XHHXec185mvbHTeMnpxOTOvULFHRQAIEgMUACAIDFAAQCC\nVPBrUGVlZS7u16+fi+3hdsl2AJBvffr0qTGWpAkTJuS7OwWBOygAQJAYoAAAQSr4Awstu+V127Zt\nXl1paWm+u9NQHFhYGJrcgYUFiGsUPg4sBAAUDgYoAECQGKAAAEEqqjWoIsMaVGFgfSN8XKPwsQYF\nACgcDFAAgCDta4oPAIBUcAcFAAgSAxQAIEgMUACAIDFAAQCCxAAFAAgSAxQAIEj/D49OSB5oVIuT\nAAAAAElFTkSuQmCC\n",
            "text/plain": [
              "<Figure size 432x288 with 4 Axes>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "metadata": {
        "id": "dWCCngwF2nhS",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "## 4. The Model: RNN\n",
        "$h_t = \\sigma(W x_t + U h_{t-1})$\n",
        "\n",
        "Some important information is: \n",
        "* Input size: number of expected features in input $x$\n",
        "* Hidden size: number of features in hidden state $h$\n",
        "* After forward propagation, output has shape (batch_size, seq_length, hidden_size)\n",
        "* If you want to initialize of RNN with hidden and cell states different than zero, modify variables $h0$ and $c0$. Otherwise, you may set them as *None*."
      ]
    },
    {
      "metadata": {
        "id": "N2UAizM92nhT",
        "colab_type": "code",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "class RNN(nn.Module):\n",
        "    def __init__(self):\n",
        "        super(RNN, self).__init__()\n",
        "        self.input_size = params['input_size']\n",
        "        self.hidden_size = params['hidden_size']\n",
        "        self.num_layers = params['num_layers']\n",
        "        self.model_type = params['model_type']\n",
        "        \n",
        "        if self.model_type == 'RNN':\n",
        "            self.rnn = nn.RNN(self.input_size, self.hidden_size, num_layers=self.num_layers, bias=True, nonlinearity='tanh', dropout=0.2, batch_first=True)\n",
        "        elif self.model_type == 'GRU':\n",
        "            self.rnn = nn.GRU(self.input_size, self.hidden_size, num_layers=self.num_layers, bias=True, dropout=0.2, batch_first=True)\n",
        "        else:  # 'LSTM'\n",
        "            self.rnn = nn.LSTM(self.input_size, self.hidden_size, num_layers=self.num_layers, dropout=0.2, batch_first=True)\n",
        "\n",
        "        self.bn = nn.BatchNorm1d(self.hidden_size)\n",
        "        self.fc = nn.Linear(self.hidden_size, params['num_classes'])\n",
        "        self.softmax = nn.LogSoftmax()  # nn.ReLU()  # nn.LogSoftmax()  # Softmax()\n",
        "        \n",
        "    def forward(self, x):           \n",
        "        # Set initial hidden state $h0$ and cell state $c0$\n",
        "        h0 = torch.zeros(self.num_layers, self.input_size, self.hidden_size, dtype=torch.float32)\n",
        "        c0 = torch.zeros(self.num_layers, self.input_size, self.hidden_size, dtype=torch.float32)\n",
        "\n",
        "        # Forward propagate RNN\n",
        "        if self.model_type == 'LSTM':\n",
        "            out, hidden = self.rnn(x, None)  # (h0, c0)) \n",
        "        else:\n",
        "            out, hidden = self.rnn(x, None)  # h0)\n",
        "        \n",
        "        # Decode last hidden state\n",
        "        out_fc = self.bn(out[:, -1, :])\n",
        "        out_fc = self.fc(out_fc)\n",
        "        out_fc = self.softmax(out_fc)\n",
        "        # print(\"Input {} -> Output shape {} -> {} | Last shape {}\".format(x.shape, out.shape, out_fc.shape, out[:, -1, :].shape))\n",
        "        return out_fc"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "metadata": {
        "id": "cw-AqMCfOgPN",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "*Instantiate model and optimizer*"
      ]
    },
    {
      "metadata": {
        "id": "9T6W1JeOOlTW",
        "colab_type": "code",
        "outputId": "15884eb1-b75a-4dce-c0fc-5c2f20db7884",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 88
        }
      },
      "cell_type": "code",
      "source": [
        "# Instantiate model\n",
        "model = RNN()\n",
        "# Allow for parallelism if multiple GPUs are detected\n",
        "#  model = nn.DataParallel(model)\n",
        "# Transfer model to device (CPU or GPU according to your preference and what's available)\n",
        "model = model.to(device)\n",
        "\n",
        "# Loss criterion\n",
        "if 'CrossEntropyLoss' in params['criterion_type']:\n",
        "  criterion = nn.CrossEntropyLoss()\n",
        "elif 'L1Loss' in params['criterion_type']:\n",
        "  criterion = nn.L1Loss()\n",
        "elif 'SmoothL1Loss' in params['criterion_type']:\n",
        "  criterion = nn.SmoothL1Loss()\n",
        "else:  # NLLLoss\n",
        "  criterion = nn.NLLLoss()  \n",
        "\n",
        "# Optimizer\n",
        "if 'Adam' in params['optim_type']:\n",
        "  optimizer = torch.optim.Adam(model.parameters(), lr=params['lr'], weight_decay=params['weight_decay'])\n",
        "elif 'SGD' in params['optim_type']:\n",
        "  optimizer = torch.optim.SGD(model.parameters(), lr=params['lr'], weight_decay=params['weight_decay'], momentum=params['momentum'])\n",
        "elif 'RMSprop' in params['optim_type']:\n",
        "  optimizer = torch.optim.RMSprop(model.parameters(), lr=params['lr'], weight_decay=params['weight_decay'], momentum=params['momentum'])\n",
        "\n",
        "# Scheduler to reduce learning rate after it plateaus\n",
        "scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer)\n",
        "\n",
        "# New results dir based on model's parameters\n",
        "res_dir = params['results_dir'] + '{}_{}layers_sgd_lr{}_weight{}_trainSize_{}_testSize_{}/'.\\\n",
        "    format(params['model_type'], params['num_layers'], params['lr'],\n",
        "           params['weight_decay'], len(mnist_train), len(mnist_test))\n",
        "\n",
        "if not os.path.exists(res_dir):\n",
        "    os.mkdir(res_dir)\n",
        "\n",
        "print(\"res_dir: {}\".format(res_dir))\n",
        "log_file = open(res_dir + 'log.txt', 'w')"
      ],
      "execution_count": 139,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "res_dir: ./results/GRU_1layers_sgd_lr0.001_weight1e-10_trainSize_60000_testSize_10000/\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "stream",
          "text": [
            "/usr/local/lib/python3.6/dist-packages/torch/nn/modules/rnn.py:46: UserWarning: dropout option adds dropout after all but last recurrent layer, so non-zero dropout expects num_layers greater than 1, but got dropout=0.2 and num_layers=1\n",
            "  \"num_layers={}\".format(dropout, num_layers))\n"
          ],
          "name": "stderr"
        }
      ]
    },
    {
      "metadata": {
        "id": "t7ZCKLry2nhV",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "## 5. Train"
      ]
    },
    {
      "metadata": {
        "id": "qaLuhdcu2nhW",
        "colab_type": "code",
        "outputId": "7e0dd432-08ef-4edb-9d5e-b9d9f7fb9b4d",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 1849
        }
      },
      "cell_type": "code",
      "source": [
        "start_timer = timer()\n",
        "\n",
        "loss_arr = []\n",
        "train_acc_arr = []\n",
        "first_time = True\n",
        "total_num_steps = len(mnist_train_dataloader)\n",
        "\n",
        "# model.train()\n",
        "model.zero_grad()\n",
        "optimizer.zero_grad()\n",
        "for e in range(1, params['epochs']+1):\n",
        "    for i, (img, label) in enumerate(mnist_train_dataloader):\n",
        "        img = Variable(torch.squeeze(img)).to(device)\n",
        "        label = Variable(label).to(device)\n",
        "        \n",
        "        # Forward\n",
        "        out = model(img)\n",
        "        loss = criterion(out, label)\n",
        "        \n",
        "        # Backward\n",
        "        optimizer.zero_grad()\n",
        "        \n",
        "        # start debugger\n",
        "        # import pdb; pdb.set_trace()\n",
        "\n",
        "        loss.backward()\n",
        "        optimizer.step()\n",
        "        scheduler.step(loss)\n",
        "\n",
        "        loss_arr.append(loss.item())\n",
        "        \n",
        "        if i % params['save_step'] == 0:\n",
        "            # Train Accuracy\n",
        "            _, predicted = torch.max(out.data, 1)\n",
        "            total = label.size(0)\n",
        "            correct = (predicted == label).sum().item()\n",
        "            acc = 100 * correct / total\n",
        "            train_acc_arr.append(acc)\n",
        "            # Print update\n",
        "            perc = 100 * ((e-1)*total_num_steps + (i+1))/float(params['epochs'] * total_num_steps)\n",
        "            str_res = \"Completed {:.2f}%: Epoch/step [{}/{} - {}/{}], loss {:.4f}, acc {:.2f}, best acc {:.2f}\".format(perc, e, params['epochs'], i+1, total_num_steps, loss.item(), acc, max(train_acc_arr))\n",
        "            print(str_res)  # print(\"\\r\" + str_res, end=\"\")\n",
        "            # Save log\n",
        "            log_file.write(str_res)\n",
        "            \n",
        "# Save training loss\n",
        "plt.plot(loss_arr)\n",
        "# plt.semilogy(range(len(loss_arr)), loss_arr)\n",
        "plt.savefig(res_dir + 'loss.png')\n",
        "\n",
        "# Save model checkpoint\n",
        "torch.save(model.state_dict(), res_dir + 'model.ckpt')\n",
        "plt.show()\n",
        "log_file.close()\n",
        "\n",
        "end_timer = timer() - start_timer\n",
        "print(\"Model took {:.4f} mins ({:.4f} hrs) to finish training with best train accuracy of {:.4f}%\".format(end_timer/60, end_timer/3600, max(train_acc_arr)))"
      ],
      "execution_count": 140,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:34: UserWarning: Implicit dimension choice for log_softmax has been deprecated. Change the call to include dim=X as an argument.\n"
          ],
          "name": "stderr"
        },
        {
          "output_type": "stream",
          "text": [
            "Completed 0.01%: Epoch/step [1/30 - 1/469], loss 2.3744, acc 7.81, best acc 7.81\n",
            "Completed 1.43%: Epoch/step [1/30 - 201/469], loss 0.5953, acc 82.81, best acc 82.81\n",
            "Completed 2.85%: Epoch/step [1/30 - 401/469], loss 0.6471, acc 82.81, best acc 82.81\n",
            "Completed 3.34%: Epoch/step [2/30 - 1/469], loss 0.6236, acc 82.03, best acc 82.81\n",
            "Completed 4.76%: Epoch/step [2/30 - 201/469], loss 0.6567, acc 81.25, best acc 82.81\n",
            "Completed 6.18%: Epoch/step [2/30 - 401/469], loss 0.5539, acc 85.94, best acc 85.94\n",
            "Completed 6.67%: Epoch/step [3/30 - 1/469], loss 0.6836, acc 77.34, best acc 85.94\n",
            "Completed 8.10%: Epoch/step [3/30 - 201/469], loss 0.6340, acc 78.91, best acc 85.94\n",
            "Completed 9.52%: Epoch/step [3/30 - 401/469], loss 0.6380, acc 82.03, best acc 85.94\n",
            "Completed 10.01%: Epoch/step [4/30 - 1/469], loss 0.5994, acc 82.81, best acc 85.94\n",
            "Completed 11.43%: Epoch/step [4/30 - 201/469], loss 0.5376, acc 88.28, best acc 88.28\n",
            "Completed 12.85%: Epoch/step [4/30 - 401/469], loss 0.5952, acc 85.94, best acc 88.28\n",
            "Completed 13.34%: Epoch/step [5/30 - 1/469], loss 0.6287, acc 77.34, best acc 88.28\n",
            "Completed 14.76%: Epoch/step [5/30 - 201/469], loss 0.6367, acc 82.81, best acc 88.28\n",
            "Completed 16.18%: Epoch/step [5/30 - 401/469], loss 0.6259, acc 82.81, best acc 88.28\n",
            "Completed 16.67%: Epoch/step [6/30 - 1/469], loss 0.6444, acc 85.94, best acc 88.28\n",
            "Completed 18.10%: Epoch/step [6/30 - 201/469], loss 0.6338, acc 77.34, best acc 88.28\n",
            "Completed 19.52%: Epoch/step [6/30 - 401/469], loss 0.6168, acc 85.16, best acc 88.28\n",
            "Completed 20.01%: Epoch/step [7/30 - 1/469], loss 0.6262, acc 81.25, best acc 88.28\n",
            "Completed 21.43%: Epoch/step [7/30 - 201/469], loss 0.5777, acc 85.94, best acc 88.28\n",
            "Completed 22.85%: Epoch/step [7/30 - 401/469], loss 0.6334, acc 79.69, best acc 88.28\n",
            "Completed 23.34%: Epoch/step [8/30 - 1/469], loss 0.5715, acc 82.03, best acc 88.28\n",
            "Completed 24.76%: Epoch/step [8/30 - 201/469], loss 0.5828, acc 83.59, best acc 88.28\n",
            "Completed 26.18%: Epoch/step [8/30 - 401/469], loss 0.6105, acc 83.59, best acc 88.28\n",
            "Completed 26.67%: Epoch/step [9/30 - 1/469], loss 0.5210, acc 86.72, best acc 88.28\n",
            "Completed 28.10%: Epoch/step [9/30 - 201/469], loss 0.5730, acc 88.28, best acc 88.28\n",
            "Completed 29.52%: Epoch/step [9/30 - 401/469], loss 0.7762, acc 74.22, best acc 88.28\n",
            "Completed 30.01%: Epoch/step [10/30 - 1/469], loss 0.7092, acc 75.78, best acc 88.28\n",
            "Completed 31.43%: Epoch/step [10/30 - 201/469], loss 0.5960, acc 85.94, best acc 88.28\n",
            "Completed 32.85%: Epoch/step [10/30 - 401/469], loss 0.6185, acc 86.72, best acc 88.28\n",
            "Completed 33.34%: Epoch/step [11/30 - 1/469], loss 0.6776, acc 77.34, best acc 88.28\n",
            "Completed 34.76%: Epoch/step [11/30 - 201/469], loss 0.5619, acc 87.50, best acc 88.28\n",
            "Completed 36.18%: Epoch/step [11/30 - 401/469], loss 0.6417, acc 84.38, best acc 88.28\n",
            "Completed 36.67%: Epoch/step [12/30 - 1/469], loss 0.6395, acc 81.25, best acc 88.28\n",
            "Completed 38.10%: Epoch/step [12/30 - 201/469], loss 0.5894, acc 82.81, best acc 88.28\n",
            "Completed 39.52%: Epoch/step [12/30 - 401/469], loss 0.6575, acc 82.03, best acc 88.28\n",
            "Completed 40.01%: Epoch/step [13/30 - 1/469], loss 0.6557, acc 82.03, best acc 88.28\n",
            "Completed 41.43%: Epoch/step [13/30 - 201/469], loss 0.5732, acc 86.72, best acc 88.28\n",
            "Completed 42.85%: Epoch/step [13/30 - 401/469], loss 0.6268, acc 82.81, best acc 88.28\n",
            "Completed 43.34%: Epoch/step [14/30 - 1/469], loss 0.5557, acc 87.50, best acc 88.28\n",
            "Completed 44.76%: Epoch/step [14/30 - 201/469], loss 0.5617, acc 85.16, best acc 88.28\n",
            "Completed 46.18%: Epoch/step [14/30 - 401/469], loss 0.6255, acc 80.47, best acc 88.28\n",
            "Completed 46.67%: Epoch/step [15/30 - 1/469], loss 0.4741, acc 89.06, best acc 89.06\n",
            "Completed 48.10%: Epoch/step [15/30 - 201/469], loss 0.5339, acc 87.50, best acc 89.06\n",
            "Completed 49.52%: Epoch/step [15/30 - 401/469], loss 0.5534, acc 81.25, best acc 89.06\n",
            "Completed 50.01%: Epoch/step [16/30 - 1/469], loss 0.5734, acc 85.94, best acc 89.06\n",
            "Completed 51.43%: Epoch/step [16/30 - 201/469], loss 0.6231, acc 82.81, best acc 89.06\n",
            "Completed 52.85%: Epoch/step [16/30 - 401/469], loss 0.6168, acc 80.47, best acc 89.06\n",
            "Completed 53.34%: Epoch/step [17/30 - 1/469], loss 0.6456, acc 78.12, best acc 89.06\n",
            "Completed 54.76%: Epoch/step [17/30 - 201/469], loss 0.5973, acc 82.03, best acc 89.06\n",
            "Completed 56.18%: Epoch/step [17/30 - 401/469], loss 0.5220, acc 85.16, best acc 89.06\n",
            "Completed 56.67%: Epoch/step [18/30 - 1/469], loss 0.6613, acc 81.25, best acc 89.06\n",
            "Completed 58.10%: Epoch/step [18/30 - 201/469], loss 0.5319, acc 87.50, best acc 89.06\n",
            "Completed 59.52%: Epoch/step [18/30 - 401/469], loss 0.6313, acc 82.81, best acc 89.06\n",
            "Completed 60.01%: Epoch/step [19/30 - 1/469], loss 0.6134, acc 82.81, best acc 89.06\n",
            "Completed 61.43%: Epoch/step [19/30 - 201/469], loss 0.6066, acc 85.94, best acc 89.06\n",
            "Completed 62.85%: Epoch/step [19/30 - 401/469], loss 0.5751, acc 85.16, best acc 89.06\n",
            "Completed 63.34%: Epoch/step [20/30 - 1/469], loss 0.5277, acc 85.16, best acc 89.06\n",
            "Completed 64.76%: Epoch/step [20/30 - 201/469], loss 0.7158, acc 78.91, best acc 89.06\n",
            "Completed 66.18%: Epoch/step [20/30 - 401/469], loss 0.5503, acc 86.72, best acc 89.06\n",
            "Completed 66.67%: Epoch/step [21/30 - 1/469], loss 0.7054, acc 79.69, best acc 89.06\n",
            "Completed 68.10%: Epoch/step [21/30 - 201/469], loss 0.6488, acc 75.78, best acc 89.06\n",
            "Completed 69.52%: Epoch/step [21/30 - 401/469], loss 0.6451, acc 84.38, best acc 89.06\n",
            "Completed 70.01%: Epoch/step [22/30 - 1/469], loss 0.6415, acc 77.34, best acc 89.06\n",
            "Completed 71.43%: Epoch/step [22/30 - 201/469], loss 0.6619, acc 79.69, best acc 89.06\n",
            "Completed 72.85%: Epoch/step [22/30 - 401/469], loss 0.6728, acc 79.69, best acc 89.06\n",
            "Completed 73.34%: Epoch/step [23/30 - 1/469], loss 0.6324, acc 82.03, best acc 89.06\n",
            "Completed 74.76%: Epoch/step [23/30 - 201/469], loss 0.5901, acc 84.38, best acc 89.06\n",
            "Completed 76.18%: Epoch/step [23/30 - 401/469], loss 0.5975, acc 83.59, best acc 89.06\n",
            "Completed 76.67%: Epoch/step [24/30 - 1/469], loss 0.6897, acc 77.34, best acc 89.06\n",
            "Completed 78.10%: Epoch/step [24/30 - 201/469], loss 0.6770, acc 82.03, best acc 89.06\n",
            "Completed 79.52%: Epoch/step [24/30 - 401/469], loss 0.6065, acc 84.38, best acc 89.06\n",
            "Completed 80.01%: Epoch/step [25/30 - 1/469], loss 0.5896, acc 83.59, best acc 89.06\n",
            "Completed 81.43%: Epoch/step [25/30 - 201/469], loss 0.6912, acc 82.03, best acc 89.06\n",
            "Completed 82.85%: Epoch/step [25/30 - 401/469], loss 0.5715, acc 84.38, best acc 89.06\n",
            "Completed 83.34%: Epoch/step [26/30 - 1/469], loss 0.5760, acc 84.38, best acc 89.06\n",
            "Completed 84.76%: Epoch/step [26/30 - 201/469], loss 0.6141, acc 82.03, best acc 89.06\n",
            "Completed 86.18%: Epoch/step [26/30 - 401/469], loss 0.5529, acc 85.16, best acc 89.06\n",
            "Completed 86.67%: Epoch/step [27/30 - 1/469], loss 0.6197, acc 79.69, best acc 89.06\n",
            "Completed 88.10%: Epoch/step [27/30 - 201/469], loss 0.6477, acc 77.34, best acc 89.06\n",
            "Completed 89.52%: Epoch/step [27/30 - 401/469], loss 0.6163, acc 85.16, best acc 89.06\n",
            "Completed 90.01%: Epoch/step [28/30 - 1/469], loss 0.7884, acc 74.22, best acc 89.06\n",
            "Completed 91.43%: Epoch/step [28/30 - 201/469], loss 0.6281, acc 82.03, best acc 89.06\n",
            "Completed 92.85%: Epoch/step [28/30 - 401/469], loss 0.5852, acc 83.59, best acc 89.06\n",
            "Completed 93.34%: Epoch/step [29/30 - 1/469], loss 0.5584, acc 84.38, best acc 89.06\n",
            "Completed 94.76%: Epoch/step [29/30 - 201/469], loss 0.6711, acc 79.69, best acc 89.06\n",
            "Completed 96.18%: Epoch/step [29/30 - 401/469], loss 0.6539, acc 82.81, best acc 89.06\n",
            "Completed 96.67%: Epoch/step [30/30 - 1/469], loss 0.6186, acc 83.59, best acc 89.06\n",
            "Completed 98.10%: Epoch/step [30/30 - 201/469], loss 0.6031, acc 82.03, best acc 89.06\n",
            "Completed 99.52%: Epoch/step [30/30 - 401/469], loss 0.5808, acc 82.81, best acc 89.06\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAD4CAYAAAAXUaZHAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3XeYVNX5wPHvbIOtsMDAUqQJHEFA\nhSAg0kSxBDtqjCUajYlg7FES87NGY1TEmhhii41YUSwgKCBNBBHpHOoK7LKwsH3ZvvP7496Znb6z\nuzO7e8f38zw87Nz6TrnvPfecc++xORwOhBBCRJeYlg5ACCFE+ElyF0KIKCTJXQghopAkdyGEiEKS\n3IUQIgrFtXQATrm5xY3utpOenkR+/rFwhhMxVooVrBWvxBoZVooVrBVvOGK121Nt/qZHRck9Li62\npUMImZViBWvFK7FGhpViBWvFG8lYoyK5CyGE8CTJXQghopAkdyGEiEKS3IUQIgpJchdCiCgkyV0I\nIaKQJHchhIhCreYmpsZap3OJz8xnaO/0lg5FCCFaDcuX3D9ZsYfXP9/a0mEIIUSrYvnkXuuAmhoZ\ncEQIIdxZPrkbD1WQ5C6EEO4sn9yxgYwUKIQQniyf3G1IuV0IIbxZPrlL0V0IIXxZPrnbbFJyF0II\nb9ZP7kjBXQghvFk/uduk1l0IIbxZPrljM/q6CyGEqBPS4weUUk8AY83l/661/sht3kTg70ANoIEb\ngXHA+8AWc7FNWus/hjFuF6mWEUIIX/UmdzN5D9Zaj1ZKdQTWAx+5LTIbmKi1PqCUeh84BzgGfKO1\nnhqJoN3ZJLsLIYSPUKpllgGXmX8XAMlKKfdRXYdrrQ+Yf+cCHcMYXwhsUuMuhBBebI4GlHqVUjcB\nY7XW1/iZ1xVYDowEhgD/BHYBHYCHtNaLgm27urrG0ZiRwO9+dhm7swqZ+8T5DV5XCCGigM3fxJAf\n+auUuhC4AZjsZ15n4FNgmtb6qFJqJ/AQ8B7QF1iilOqnta4MtP38/GOhhuKhuroGcJCbW9yo9Zub\n3Z5qmVjBWvFKrJFhpVjBWvGGI1a7PdXv9FAbVM8G7gPO0VoXes1LA+YD92mtFwJorbOAd81Fdiul\ncoDuwN5GRR+M3KAqhBA+6q1zV0q1A54Epmit8/wsMhOYpbVe4LbOVUqpu82/M4AuQFZ4QvZkkzp3\nIYTwEUrJ/QqgE/CeUso5bTGwCfgSuBbor5S60Zz3DjAHeMesykkAbg5WJdMk0ltGCCF81Jvctdaz\nMbo7BtImwPRmaeGU+1OFEMKX5e9QlYK7EEL4snxyN+5iEkII4c7yyd2Z2hvSX18IIaKd9ZO7md0l\ntQshRB3LJ3cXye5CCOFi+eRuM4vuDsnuQgjhYvnk7iRV7kIIUcfyyV06ywghhC/rJ3fzfym5CyFE\nHcsn97qiu2R3IYRwsnxyl5K7EEL4snxyR/q5CyGED8snd5tkdyGE8GH95O7K7ZLdhRDCyfLJ3Unq\n3IUQoo7lk7t0cxdCCF+hjqH6BDDWXP7vWuuP3OadCTwG1ABfaK0fMafPAkZh1IbfprVeG+bYAbfH\nD0jJXQghXEIZQ3UiMFhrPRo4B3jGa5HngEuBMcBkpdQgpdR4oL+5zg3mMhEm2V0IIZxCqZZZBlxm\n/l0AJCulYgGUUn2BPK31fq11LfAFMMn89zGA1nobkK6USgt38CCP/BVCCH9CGUO1Big1X96AUfVS\nY77OAHLdFj8MHI8xoPY6t+m55rJFgfaTnp5EXFxs6JGb2rQx3kLHjimkJiU0eP2WYLentnQIDWKl\neCXWyLBSrGCteCMVa0h17gBKqQsxkvvkIIsFat+st90zP/9YqKF4qKw0zjNHjpRQnhjfqG00J7s9\nldzc4pYOI2RWildijQwrxQrWijccsQY6OYTaoHo2cB9wjta60G1WNkaJ3Km7Oa3Sa3o34GAD4g2Z\nDLMnhBC+QmlQbQc8CUzRWue5z9NaZwJpSqneSqk4YAqw0Pw31Vx/GJCttY7MqVTq3IUQwkcoJfcr\nMOrQ31NKOactBjZprecCNwNzzOnvaq13ADuUUuuUUquAWmB6eMOuY5MWVSGE8BFKg+psYHaQ+cuA\n0X6mz2haaKGRahkhhPBl/TtUpeAuhBA+LJ/cnaTgLoQQdSyf3G0yiKoQQviwfHLfuPsoADU1tS0c\niRBCtB6WT+5lFdUA7M2xxk0LQgjRHCyf3LukJwLQs3NKC0cihBCth+WTu+qZDkhvGSGEcGf55B5j\ntqfW1kp6F0IIJ8sn97rBOiS5CyGEk+WTe4yMxCSEED4sn9yd3dxrJbsLIYRLFCR3KbkLIYS3KEju\nxv8O6S8jhBAulk/uUucuhBC+LJ/cv92SA8CqzTktHIkQQrQelk/uhaWVACzfkN3CkQghROth+eTu\nJPcwCSFEnVAHyB4MfALM0lq/4Da9O/C226J9gRlAAvAIsNucvkhr/WhYIg5AbmISQog69SZ3pVQy\n8Dzwtfc8rXUWMMFcLg5YCszDGBz7Xa313WGMNSjJ7UIIUSeUapkK4Dygvkrt64APtdYlTQ2qMeQm\nJiGEqBPKANnVQLVSqr5FbwQmu70er5RaAMQDd2ut1wdbOT09ibi42Pr2EZTdntqk9ZuLVeJ0slK8\nEmtkWClWsFa8kYo1pDr3+iilRgPbtdZF5qTVQK7W+nNz3hvAkGDbyM8/1uQ4cnNb/4AddnuqJeJ0\nslK8EmtkWClWsFa84Yg10MkhXL1lpgBfOV9orbdrrT83//4WsCulmlYsF0IIEbJwJfcRwAbnC6XU\nPUqpK82/B2OU4mvCtC8hhBD1CKW3zHBgJtAbqFJKTcXoEbNXaz3XXKwrcNhttXeAN5VSfzD3cUM4\ngxZCCBFcKA2q6zC7OwZZZojX6wPAxCZFJoQQotGi5g5VIYQQdSS5CyFEFJLkLoQQUcjyyX3s0K4A\njBmc0cKRCCFE62H55J59pBSAlfI8dyGEcLF8cs/MscadaEII0Zwsn9ydY6gKIYSoY/nkDpLdhRDC\nm+WTe4zkdiGE8GH55C4FdyGE8GX55G6T7C6EED6sn9wltwshhA9J7kIIEYUsn9wT4mUMECGE8Gb5\n5H75xH4AjDqxSwtHIoQQrYflk3t6ShsAOrdPbOFIhBCi9QhpgGxzqLxPgFla6xe85mUC+wHnMHpX\naa2zlFKzgFGAA7hNa702XEG7c9a5OxyR2LoQQlhTKMPsJQPPA18HWexcrXWJ2zrjgf5a69FKqYHA\nq8Dopgbrj83M7g4kuwshhFMo1TIVwHlAdgO2Own4GEBrvQ1IV0qlNTy8+knJXQghfIUyhmo1UK2U\nCrbYS0qp3sAK4M9ABrDObX6uOa0o0AbS05OIi2t4z5ejpVUAJCYmYLenNnj9lmCVOJ2sFK/EGhlW\nihWsFW+kYg2pzr0e9wMLgDyM0vqlfpaptzd6fv6xRu28oMBYr6S0gtzc1v/4X7s91RJxOlkpXok1\nMqwUK1gr3nDEGujk0OTeMlrrN7TWh80S/hfAEIwqHPehkboBB5u6L39KyoyS+4Lv9kVi80IIYUlN\nSu5KqXZKqS+VUgnmpPHAZmAhMNVcZhiQrbWOyKk0J69xJX4hhIhmofSWGQ7MBHoDVUqpqcA8YK/W\neq5S6gtgtVKqDFgPfKC1diil1imlVgG1wPSIvQMhhBA+QmlQXQdMCDL/WeBZP9NnNCmyEMmjZYQQ\nwpfl71AVQgjhS5K7EEJEIesnd3nmrxBC+LB8cpfULoQQviyf3O3p8jRIIYTwZvnk3rNzCgBdOiS1\ncCRCCNF6WD65l5ZXA3BIbmYSQggXyyf3iqqa+hcSQoifGcsnd3mMuxBC+LJ8cpdBOoQQwpflk3ti\nQjieWiyEENHF8sm9SwfpCimEEN4sn9zlNiYhhPBl+eQuTx8QQghflk/uQgghfFk+ucdI0V0IIXyE\n1NVEKTUY+ASYpbV+wWveRODvQA2ggRuBccD7wBZzsU1a6z+GK2gPktuFEMJHKMPsJQPPA18HWGQ2\nMFFrfUAp9T5wDnAM+EZrPTVskQYguV0IIXyFUi1TAZwHZAeYP1xrfcD8OxfoGI7AQmWTahkhhPBR\nb3LXWldrrcuCzC8CUEp1BSYDX5izBiml5imlViilzgpLtEIIIUISlts7lVKdgU+BaVrro0qpncBD\nwHtAX2CJUqqf1roy0DbS05OIi4ttUhx2e2qT1m8uVonTyUrxSqyRYaVYwVrxRirWJid3pVQaMB+4\nT2u9EEBrnQW8ay6yWymVA3QH9gbaTn5+4x/Zm9w2jg5pbcnNLW70NpqL3Z5qiTidrBSvxBoZVooV\nrBVvOGINdHIIR1fImRi9aBY4JyilrlJK3W3+nQF0AbLCsK+AHPL8MCGEcAmlt8xwjATeG6hSSk0F\n5mGUwr8ErgX6K6VuNFd5B5gDvKOUuhBIAG4OViXTZNKoKoQQHupN7lrrdcCEIIu0CTD9/MYEJIQQ\nouksf4eqEEIIX1GU3KXSXQghnKJipIvSsipKy6paOgwhhGg1oqjkLoQQwkmSuxBCRCFJ7kIIEYUk\nuQshRBSS5C6EEFFIkrsQQkQhSe5CCBGFoiK5J7Zp2qOChRAi2kRFcu/bvb0MtyeEEG6iIrmD8fAB\nhzz3VwghgChJ7lv2HAUgv7iihSMRQojWISqSu1P20dKWDkEIIVqFqErutbUtHYEQQrQO0ZXcpc5d\nCCGAEB/5q5QaDHyCMVbqC17zzgQeA2qAL7TWj5jTZwGjMNo6b9Narw1n4P44aiW5CyEEhDaGajLw\nPPB1gEWeA87GGAD7G6XUh4Ad6K+1Hq2UGgi8CowOT8iBSW4XQghDKNUyFcB5QLb3DKVUXyBPa71f\na10LfAFMMv99DKC13gakK6XSwhZ1ANIVUgghDKEMkF0NVCul/M3OAHLdXh8Gjgc6AevcpueayxYF\n2k96ehJxcU270zQlpS12e2qTttEcrBCjOyvFK7FGhpViBWvFG6lYwz3MXqAbReu9gTQ//1iTd15Q\ndIzc3OImbyeS7PbUVh+jOyvFK7FGhpViBWvFG45YA50cmprcszFK5E7dzWmVXtO7AQebuK96OaQr\npBBCAE3sCqm1zgTSlFK9lVJxwBRgoflvKoBSahiQrbWO+Km0rTxATAghgNB6ywwHZgK9gSql1FRg\nHrBXaz0XuBmYYy7+rtZ6B7BDKbVOKbUKqAWmRyJ4p5P6d2LDziN0SU+K5G6EEMIyQmlQXQdMCDJ/\nGX66OWqtZzQpsgbo1imFDTuPNNfuhBCi1YuKO1RzC8oAuUNVCCGcoiK5f7/tEAAz//djC0cihBCt\nQ1Qkd6fC0sqWDkEIIVqFqEruQgghDFGV3ONio+rtCCFEo0VVNmxtz5apqq5p6RCEED9TUZbcWzqC\nOp+tyuT3T31DZk7Ax+mIMCkpq2rpEIRodaIrudN6svtHy/YA8KP0v4+oLZl53Prscuat3NvSodSr\nVp5J3WCHC8ooPiYdJRojupJ7E4+dDbuO8IeZS8k6ImOxWsWPO4yT56K1+1s4kuDmrdjLjU8s4Uhh\nWcT2se9QMet0bv0LWsiMl77ltudWtHQYlhRVyb2pXp+/ncqqWr76vnUnClGnNV2tBfPxCuPKYmtm\nfsT28eBra3lx7iZqZDDhVuHLNft4c6HmWHmV3xssyyqqI7p/Se4WVVvrYOGafeQVlbd0KC3KecjY\nbPU+VbrZVNfUNrlx/0hhWaOrcVpT25NTSVkVFZU/rw4G7y7exZIfsrjlmeXMem+Dx7y12w8zfdYy\nFn73U8T2L8ndjfMmqNZ4cHj7btsh/rd4F/9454ewb7uqupY92UVs3ns04DI5ecdY+mNW2Pfd3ErK\nqjgchrEEnKprarnpyaU898HGRm9jd1Yh9/zrW175fKtr2lsLNZ+uyvTYj5Uakm99djm3PLOs2fdb\nU+t5os3MKeKdRTua3P5x8GgpL3y0icKSipCW37I3j/99vdP1euUm4wno81dFrq1IknszWqdz+fdH\nGz1+bLuyCvnt44vZmpnXoG0VlhgnotwC35J79pFSqmuCX5rvzi5k1Wb/j9h/8LU1/O2N73n63Q0c\nDVBH/JfZq3ljgW7W3kD+Lm3X72haHfNtzy5nxr9XN2kb7pwJd8Nu/yfGQyGcSHYeKATg2y2HXNMW\n/5DFXLORHuD+V9Zw67PL6/2e/amqro14lYBTQUkFB48abVg1QRJqWUU1DoeDHfsLwnrS+sNT3/DQ\na2tdrx9+/Xu+WneAH3c1rKNDdU0tBw6XuF6/9MkWftiRy4ff7AmylqeFzdwuFJXJfe6yPQ3qpeJw\nOLxKbw62ZOah94W3fvTFuZv4bOVeCkrqWv8/XZkJwAdLd3OsvIrS8qb9sHcdKOSvL3/HS59s8ZlX\nWVVDgVnSePSNdbz82Ta/yeHg0brPor4Draw8skli056jZOYU8dGy3dz4jyUepVnA47NsDGe62Xeo\nacMNlFdWM/PdH9mxvyDocvNX73P9nVdUzm8fX8yS9Q2/AsrJM76jiqrQqjpqHQ5XFd5tzy1n+izf\nUvSyDdm8+vm2BscSzJ0vrOS+/3wXdJms3BKmz1rG0+9t4PG3f+Ch19aEbf81tQ72uSVlp0o/96C4\nHwvrd+aS7dax4uXPtnL/q2vYYhbCKs3P3d92GiKSlQRRl9zLKqr5dFUmz324kc17j7Irq7DeddZu\nP+xRelu24SAz//cj/3hnfaNicB54APnFFT71r/7qY202uOWZ5fzxmeUh7cM7GVXX1PLknPV88M1u\nAH7wU6L98+zV3PnCSo8fcb1VUG7za2r9lPiC1HUvXLufV7/wTBafrNjLCx9tqmendWa9t4GHX/+e\nz1YZdZMrN+UAsPdgEZv2+JaO84srQno66NbMPI/v6cHX1roO2EAcDgc/7MilpKyK5z/cyLebc1zz\nVm7KYcvePL8n1UDWmd/Rm19qz/1E4JD/7/zt3P3PVew8UEB5gLrv1+dvZ8WmgyGfMJrK+Tvcvs84\nIW7ZayTOo0UVHCuvYv5qz/rowtJKdh4IfvJsrIVr93PTk0tZviGb8spqnv9wE399ue6ktGbbYQAy\nD5pXqk1s42mOFqJwj6Ha4tyP66ffNRoxXp1xRtB1/CWJQGodDrZl5tO/Rzt2ZRXSvVMy7VLauOYf\nzj/GX2bXnSiWbzxIYps4fjWpv2vanmzjB9IhrW3QA7mmtpacvDK6dUxyNRg6HA6Wbchm9da6S/aS\nsir2Hixi20/BrzTyi41Se2VVXXKvrXWw5IcDDBtg93gfTs7oKiprePi/azl49Bgv3zPRNT/Yj9RZ\nx/jb8wa6pn2ywn8dY1lFNd/rw4wa1IX4EAZKf+S/3/tM27k/n7teXEnn9ESKj1Xx2E2jaJec4Hdf\nT/l5gmhldS0J8YH3/dbCHSxZn0Vim1jKKmpYv/MIowcbo0lu93OV982PWby3ZBd//73PcAc+7n/l\nO36hOnPB6X0aXJx7b/EubG7FtJWbDjL+5O4eyyzfaFTBhVLYWbHxIPb2ifTqksL3Ope3F+1g0vAe\nXHXWAI/lKqtq2HGggEG9OhAT4/tLCHbV97c3vmdPdhEPXDfC7/x3vtrJKreTZ3VNLTP+/S0VlTXM\nnD6G9FTf32owL3y0ifEndws43/lbfW3+dk4ZYA+6rbKKag6ZBYM12w5z0wUOYhqZ7CPZvhdScldK\nzQJGYfzsbtNarzWndwfedlu0LzADSAAeAXab0xdprR8NV9De+nRLY6+ZMNfvDFwHW1hSwZL1WZx9\nak8S2zTuvLZ8Qzb/XVBX0oqLjWH2nyZQWl5FaVkVOXm+ddRL1md5JPd/frwZgH/dOd5tqbofx4ff\n7OaisX14Z9FOlqzP4ndTBjGwdzrtU9rw464jHvsH+Ptb67jijP6Eru4XtXLzQd5auIO3Fu7glRln\n+L2qqHU4uPnpb1yv3etO/f2mHQ4HC77b5zvDS2FJBT8dKmHo8R15e9EOVm3OIbegnEvG9Q26nr8e\nQiVlVaw3+3gfzje+gzueX+H3xB6o6utwfhk/7DAux2tqHR7JbPPeo67qk7IK35Ktv/7lzu9pU4D6\nd/ckfiC3lAO5e7ng9D4+ud39MRbVNbUede/V1bUsWOP5Wf93gWbsSd2IsdmorXV4fEe2AKfjtdsP\nu/5+e9EOn/lfrzvgk9zfWrSDFRsPcs3kAUwc1sNnnXtfWuUzzeFwUFpe7SrgLN+YTZcOviOoOb9D\np5ueXOr6+64XV/LyvRMblFB/2JHr92oWfNtyvAt73lWXf/mPZxvNvkPF9M5IY96KvaQkxTPh5O71\nXj26ene1ZHJXSo0H+mutRyulBgKvYo68pLXOwhylyRxDdSnGEHxTMYbcuzsyYXtq41baeiVIneF/\nPtvK1sx85pn13C/cPpbisqqgH/C8FXs5bUgGVdW11NQ6yMzxrQ4BuO3ZFdQ6HFxxRr+Q47756W8Y\n1Dsd8EySn3/7E907JbuSyX8+M+qZX51xBkcKfRObex25u4KSCvS+Ak4d2DlgV0HnQeQAfsop5qHX\n1/os49uzwPcDyysqp31qG2JsNvZkF/H+0t0+yzgHVXF64LW1FJVW8vANp/KT+bkedKvnDFRNcu9L\n3/qd/uZ83+++tLyKjbuP4nA46NQukeqaWr+ldjBKk+7ck5nzKtCf8srg7Q7OnhGhKnUr8R44XML9\nr9bVQbsnOYCZAeLak13E8d3SuPnpb8hwS57uBeyi0krSzCsb7+qzUDirUbyPCSd/J8E5X+/kq+8P\nuF4v/sF/e0N9VVO1tQ5iYhtfuVFcWvcZz/lqp8e8/3zq2a5T5PYocX8NqM7bCpz3Miz4bh9HA3RR\nzj5SSnLbuAY36DZGKMXXScDHAFrrbUqpdKVUmtbau5vEdcCHWusSpVSYwwyuvj7O7y3exSXj+/ok\nl1tCqN/+eMVevt16yHUZNsHPpV1trcN1pn538S6f+VXVtazekuMzHYySl/v/Tv4aCn/7+OIgkfoe\nDI++sY6jReWkJScwsFe63yXdW/D9JXaHw8FafdhnupPNZuN/X+9k4dr9nNg7nVunnsSjb67zWW7z\nnqM87dbX91DeMddBk1dU4bor2GaDO19YQfdOyUy7eIjffQbrdeHtpU+2uJJQQ+UVldMhrW3QZcor\nq5n2dPAufs46ZW/+3kV1TS3z3a563BO7PwdyfRsLAR5z+w72uzUouu9zd1Yhpwyws/dgUaP6oDt/\n8+6H30fL9pCSGM+Xa/xfubkn9qYoKasixmYjLTmBqupaNuw6QkVVDas25/Dn608F/F/hOc35eicH\n845x7dmKxT8EjmlLZh6rNvk/dp28T0T+CmBO7vX4AHuy668ma6xQknsG4H605prTvJP7jcBkt9fj\nlVILgHjgbq110NbJ9PQk4kKoa22MBWv2cWJ/O8XHGtcT5ZBbw9vSH7N95rdrX//A3LO9SgNOO8xu\nb94t+u8t8T1JBBMb7/tVOksPT85Zz6M3n+aa/uLczQ3atndJplOnVNffj79d189+S2Y+v39qqc/6\ndnsqP3lV0/zZrV3iO7cqgTZt4ykoqaSgpJIn5jSuQdtdKHXMgcx6fwP/undS0MLDwYLQ+jm769Ax\nBYAUP20c3iXzcHMvfPz7063MeeRcHglaaKiTkJhATIyNguIKbp251HXVmpiYwOHiSjI6JvGZW1/8\npqiv3eXOF1YC8PBNo7l/tueV3Ptf7+Smi4bUUxiCpeuzuP3KYUHrvUMZ3c0RE8OxmsbXr9jtqfUv\n1AiNqXj2+aUrpUYD291K86uBXK315+a8NwD/xTBTfhNuJAml6u27jdkBewk01ZEj/ktPzWlmPTcz\n3fevuvrP7fU0vLq7deZSn2lHjjSs2+Cib/dSVBw4CW5yu0Rd43aF81OAy/2GaMpdkVm5pVxw9zye\nvfX0gMs88mrwbn7+XPSnecy6fTzLgpQYm0NlVQ0ffe1bvx7I1Q8s8Dv9y9U/8aVXz5amCvU3+sUK\n32qST5fvoX1SfEjrX3TPpw2Ky5/H/FzxNsQ/31/PZRNCr871FujkEEpXyGyMkrpTN8C7EnEK8JXz\nhdZ6u9b6c/PvbwG7UioyxfIQrWhgvWdDfLjMt345mm3a07Bqjuc+2MiiIM/rce9VUVXd+p6LEokH\nV93xzDf19olvDv4aT63kSICqF+/upa2Z+70P4RRKcl+I0UCKUmoYkK219i5SjQBcFapKqXuUUlea\nfw/GKMVHrPNsSz8uIFz1iFbRkH7qQkTSrgORq7O2unqrZbTWq5RS65RSq4BaYLpS6jqgUGs911ys\nK+De6vYO8KZS6g/mPm4Ib9hCCCGCCanOXWs9w2vSBq/5Q7xeHwAmIoQQokVE3eMHhBBCSHIXQoio\nJMldCCGiUFQk96aOeiOEEC0pEgOvREVy75xe/x2iQgjRWjX2ERnBREVyb5PQovdHCSFEk4QyBkFD\nRUVyF0IIK2vqmK7+REVynzyyV0uHIIQQjRaJZsOoSO79jmvf0iEIIUSjRaJTSFQkd+ktI4SwMqlz\nD6g5hpsVQojIqG7C8+ADiYrkHh8XFW9DCPEztfNA+B//LFlRCCFaWGVV+McxkOQuhBAtLJTR5BpK\nkrsQQkQhSe5CCNHCpJ97lElPbdPSITSrn9v7FaIlhZTclVKzlFLfKqVWKaVGeM3LVEotV0otNf91\nr2+dSEhs03qfL/PMraf7nT6sv72ZI2mc6RcPqX+hEDxw3QiO754Wlm1Fo/492rV0CKIZtU9JiOj2\n603uSqnxQH+t9WiMsVCf87PYuVrrCea/rBDXCasnbj6NLumJDVpn6PEdIxSNp7SkBK6ZPMB3Rpga\nUR67aRRJbUIaMbFRhis7CfENv8i7dHxf7O3bAtArI5W05ATuu+YX9MpIDXeIYfHqjDPqXWb4ADu/\nmtQ/7Pt+atppzLhqWMD5iRH8fkMRzva+my4YFMat1elhT2bECZ0btM71554QkVhC8fQt/gt94RLK\nETsJ+BhAa70NSFdK1Vf8asw6TZLcNp4Hrq+7QPjjpYFLm906JTNmcAZXneUn4XqZNKxHSPsP1Ne+\nhz0FgInDevC3G0d6zmxCPduwAXYun9iPaRcNJqNDEi/cMa7xGwvBA9eN4IIxvV2vn7ttbL3rnDeq\nF4//fjTTLhrMXVec7LGtV2dGT+i2AAARLElEQVScwaO/GxlkbTjrF8f5nX7+ab09rgAuOr0PAO8/\n9kvXtLuuODnggdsuSInp4RtO5dHfjeT528fy+O9H8fBvT/WYP/2SIYw+sUvQuIN5+pYxPtNibDY6\npLXFZrPx1LTT6JhWV33Vq0sqT007jaemneaxzgPXeV4MX3uO4p93jgvpBOXt8on96u2t8fQtY7hs\nwvE+03vYkwFIS04I6QRw/mm9GT4gcAJObtv4k9jDN4wkJSk+6DLXnXsCv5syiOduG8vM6WMYPTij\n2a76p044ntl/msCdV5zk8/3VF3djhPJJZgDr3F7nmtOK3Ka9pJTqDawA/hziOh7S05OIi2v8h2y3\ne5YGe3YL/LyZp24bR2pSAlXVNQCMO6U7g/t2pLC0Ev1TPt9vO+Ra9varhvPr8wZyw98W0aNzCgcO\nl/jd5iUT+1FyrIrPV+4F4PopJzJqSAYd0trSNiHOFeNbD53D1Q8sACAx0fcLtacn8upfJ/Pcu+tZ\ntGafz/wJw3pw11XDg30UvrFN6MdHS3e5Xo89uTvLf8xy7S83vyzgun/+zQjs9lTs9lSGnpDB5NP6\nsC+nmD49O/gsO+PaEQzs04HfPPQlAJ07Gwn43M7+z+t2eypD+3Vi464jrve29IcDANx6+cmcNbIX\ni77f71r+zBE9ue1XpwAw85117M4yfk43XDyUGy4e6rHtMcN6EB8Xy7b9BazenOOa/tStY1G9OnD+\nXZ94LJ+WnOB6n97uuHIYs+b84Io5oaQi0Mfl1xsPnM215mfSt1dH5j5xPh8u3snO/QV8tyWHi8Yf\n79qv3Z7KZZMG8NLcTQBMu+wkVB/fK8xfDOnm8XrKuH6u0v2VkxVzFuqA8Qw/oTPrth92vb5myolc\nM+VEj8/khT9N5JYnl7he9+vTiX59OvH+0t0A3HP1L8gvLmfSiJ7M/ngTU8/oz8OvrCbn6DEum9Sf\nE3p1IC0lgb++tIqKSuM4mzC8B1edN8jjKnBAz/bEx8WyZc9RAGb/5Syuun9+wNgvntCPuUt38dxd\nE7h15lKPeXZ7KtdfMJiiY1WMPDGDf3640TVvxKAu3Hf9SGJjfE9B7z02hdpaBxf+aZ5r2pxHzuXK\n//ON46N/TOGSez9zvf7g8SlUVtXw3eaDPPvuj67p54/ty6fL9wBGoUPvy2dov07YbDa6ZvhWv52s\nuvj97TVFY06T3p/O/cACIA+jtH5pCOv4yM8/1ohQDHZ7Krm5xZ7bK6jb3sM3nMrseVs5kFtCt07J\nlJdWUF5qHKCv3DsRm1uxZdLJ3VjYLY3/fb0TgNzcYmxuy/328cWAUQLIKypn3spMACYP647NZmNY\nv450SGtLu+QEcDgoLizDPTL3L/CUfh1Z/P1+enROYcd+4w61Wy8dSm5uMeeP7kVhcTlrthkH4eN/\nGE1K23gS28T6vFenf905HoDPvs2k1uFg/up9pCUnMGVUT3458jhWbc5h9ZYcfjXxeL7flsPU8ccz\nYmAXftiRy+vztwNw2cTjeX+JcQDbgNOGdvPYX1KsjRO6p5GbW0xcrI3qGgeThvfg12f2x2azUVNR\nN6JMoDjdnT44g427jjCgRzs6t2vrmn5y3w7k5hYzqHc6WzPzAchIb+vaZlVltc9+3D/bI0dKiIuN\n4TeTFeOGdOWJOesB43eW66eUdOWk/gHjHdyzHVNO682gXunk5hYHHTWnXUoChSWVXDyuL3OXGQd3\ntdtncuRIMTE2G1ecpTh8uIiLT+9Np/aJHvse1q8jl0/sx8hBXUhPSfCJa9gAO7m5xTx4/QiOFpVz\nUr9OlBSV4Sx2pJgl0cQ2sZRVGIl1YK90tv1kfI7TLxrs+h3369HOZ/v/vnsC8bGeh6z3Mn06J3NC\njzSOlZRz9ZlGNdXtU4eyfONBzjylu+tK9i9XD+eBV9dw9eQBnDGsB8dKyimurbthZ8avhzF32R5X\ncq84VsFtU4fy7AcbSU9tQ35x3Yn0xD4dOH9UT84f1ROANvGxVFTVMGlYDy4/o58rxgd/N9on3uqq\nGvKO+i+Y+VNWWsEtlwzh01WZ9OmaxtL1WQzp25G8vFKP5YrMPHNSH8/Cjvvvs7iojG7t23LkiO/+\nrzl3IG/O30avTkkhHS/+BDophJLcszFK3U7dgIPOF1rrN5x/K6W+AIbUt04kpSbFU3ysirTkukvv\nHvYU1xm7k1sCATwSu9PkEccx8ZRuxMXGBFxu3EndWLYh22d+n66h1z45q1Mqq2p456sdnDGsB907\nGZe5KYnx/OHCwazZZhyEyW3jSKrnktU5aMml443L5zGDu9I+pY0rvjFDujJmSFcAXrxjvMd7ySsq\np/9x7RnYK52fcopJSYx3bSeQ2NgYqmtqiI2x+f0cQzFc2XnsplF0Tk+kvKKaH3fmcoFZzQJG9cq+\nQyUUlFQwpG9dCfaSccdTUFLJFRP7eWzvqrMGsDur0PV9t0mI5YRe6Y2Kzclms3HJuL6u187L+OED\n7PTr0Y53F+/ixD4dOHdkT/r3aMeO/YWonu05nHfMp33B5rVdf6OIxcXGcM7IngHjudpsv+nZJZWe\nXXwP7FMHdubg0VJGn5jBff/5DoBfKLsruYPxOb29aAe/P/9En/Wdifn803rz6apMjyqb/j3asfNA\nod9qyM7pST6/meM6p/hUFcXGxPDAdSNcx6j3T+ekfp14+d6JxLgVpmbdMobUJM/qtBgzBFuM/2rR\nC0/vwycr9pr7qP/3ee+vT+Ef76x3vR42wM6wAXYcDgfnjexJx3ZG1dmU03rz2apMn/XbpyRQUFIJ\nwMRTuvPlmn1cNLavz3LuLj9zAOOHZhATgbuYQknuC4GHgH8rpYYB2VrrYgClVDvgPeB8rXUlMB74\nAMgKtE6kPXLjSA4eKaVz+0T+eu0vXD8Ah1nBHepHGB+giuiZP55OeZVRGho1qAtbM/M4M0DdcCCz\nbhnD0aIK12V0Qnws15070O+yT007jdyCMpLbNrxOrpt5ogiF+4/wDxcObvC+3M364+kcKw9tTEib\nzUZGByPBJbWNZ8bVw33m98pIpReeSSw9tY1HPb7TpOE9mDQ8cDuJs5ljwHHtXVdLxn5CChcwktPL\n90wkJsZGrcNB145JDDiuvav67USzFHfDlLqGw+S2cZSWVzf6JAhGsl2z/TBpScF7WcTGxHDJOM8k\nm9ExmYtO7+M60U0a3oMzzKtNpweuG0FVdV2p2l+T0IyrhlFd4yDGT/VGQ9TXqO5Mdi/eMY4Ymy3o\naGu2AEe1Z3KvP6auAY4Xm81Gp/Z1nTXGDMngs1WZpHldAT41bYwrzzh/I6F835FI7BBCctdar1JK\nrVNKrQJqgelKqeuAQq31XLO0vlopVQasBz7QWju814lI9H6kJSWQ1tP48fft5laKNn+pTTm4wKib\ndW41IT62UYmwXUob2qWE1ue7Q1pbOqS1rX/BVqRdcoJRLdWKnNCzPdv3FWA3D9IZVw2j1uHgxn8s\nqWdN/5zJLcZmY+jxnepd/qnpYyg3654b6+Jxfbl4XPCSoLe7f3Uyq7ccQplXZe68jwXvhDvh5G6s\n2nyQ6VNP9lgnPi68yejUgV2YtzLTdUXiLlgvoVsvHcqcr3Zy9qn1F65COe7TkhK44/KT6Noh+JjM\nXdKTuOfKU3wKT8Zvom4/Tc01TRVSnbvWeobXpA1u854Fng1hnRaVap5lU/w0YorGaZeUwOHKMtpa\nYAzbu351MqVl1R7VdZEqMfnTJj6WNvHN/zkN6t2BQb19G79D0SGtLU9NG+O3TSucunVKdlXDNITq\nmc6DXr2ZAjl7RGhX1+5Vf8E0taqvOfxs7lC9/ryBTBzWncsmBq9HFqG7/fKTmHByN84+NXD9cGsR\nGxPjkdidBvc1El93s8uqaBmRPtF2t4deRRktWvbOiGbUIa0t10xWLR1GVMnokMS157TcTSDhcOul\nQ8kvrnBV14jo8rspg9iVVehqD/k5+fm9YyHcxMXGSGKPYqMHZzB6cEb9C0ahn021jBBC/JxIchdC\niCgkyV0IIaKQJHchhIhCktyFECIKSXIXQogoJMldCCGikCR3IYSIQjZHJIbdFkII0aKk5C6EEFFI\nkrsQQkQhSe5CCBGFJLkLIUQUkuQuhBBRSJK7EEJEIUnuQggRhSw/WIdSahYwCmMI7Nu01mtbMJYn\ngLEYn+vfgbXAm0AscBC4RmtdoZS6CrgdY/Dw2VrrV5RS8cDrQC+gBrhea70nwvEmApuBR4CvW2us\nZgz3ANXA/cDG1hirUioFeANIB9oADwE5wL8wfp8btdY3m8v+CbjMnP6Q1voLpVQ74B2gHVAC/Fpr\nnReBOAcDnwCztNYvKKWOo4mfp1LqJH/vM0KxvgbEA1XA1VrrnNYQq7943aafDSzQWtvM1xGP19Il\nd6XUeKC/1no0cAPwXAvGMhEYbMZyDvAM8DDwotZ6LLAL+K1SKhkjQZ0JTADuUEp1AH4NFGitTwce\nxTg5RNpfAWfyaJWxKqU6Ag8ApwNTgAtba6zAdYDWWk8EpmIMHP8MRqFjDNBOKXWuUqoP8Cu39/S0\nUioW42Bfasb6EXBvuAM0P6fnMU7mTuH4PH3eZ4Ri/RtGMhwPzAXubA2xBokXpVRb4M8YJ06aK15L\nJ3dgEvAxgNZ6G5CulEproViWYZTEAAqAZIwvbp457VOML3MksFZrXai1LgNWAmMw3stcc9mvzGkR\no5Q6ARgEfG5Oaq2xngl8pbUu1lof1Frf1IpjPQJ0NP9Oxzhx9nG7mnTGOhGYr7Wu1FrnAj9hfBfu\nsTqXDbcK4Dwg223aBJrweSqlEvD/PiMR6zTgQ/PvXIzPuzXEGihegL8ALwKV5utmidfqyT0D4wt2\nyjWnNTutdY3WutR8eQPwBZCsta4wpx0GuuIbs890rXUt4DC/2EiZCdzp9rq1xtobSFJKzVNKLVdK\nTWqtsWqt/wf0VErtwjjZ3w3kB4spyHTntHDHWG0mFHdN+jzNaf7eZ9hj1VqXaq1rzCud6RjVWC0e\na6B4lVIDgJO01u+7TW6WeK2e3L3ZWjoApdSFGMn9Fq9ZgWJr6PQmU0pdC3yrtd7bwH03e6zmtjsC\nl2BUe7zmtb9WE6tS6mpgn9a6H3AG8FYTYmqp33I4Ps+Ixm4m9jeBxVrrr/0s0mpiBWbhWYjyJyLx\nWj25Z+NZUu+GWa/VEsxGk/uAc7XWhUCJ2WgJ0B0jXu+YfaabDSs2rXUlkfFL4EKl1GrgRuD/WnGs\nh4BVZqloN1AMFLfSWMcAXwJorTcAiUCnYDEFme6c1hya9N1jHHMd/SwbKa8BO7XWD5mvW2WsSqnu\nwAnA2+ax1lUp9U1zxWv15L4Qo+EKpdQwIFtrXdwSgZg9HZ4Eprj1cPgKuNT8+1JgAfAdMEIp1d7s\nXTEGWI7xXpx19ucDSyIVq9b6Cq31CK31KOBljN4yrTJWc19nKKVizMbVlFYc6y6M+lSUUr0wTkTb\nlFKnm/MvMWNdDPxSKZWglOqGccBu9YrV+b6aQ5M+T611FbDdz/sMO7OXSaXW+gG3ya0yVq11ltb6\neK31KPNYO2g2BDdLvJZ/5K9S6nFgHEaXoulmiakl4rgJeBDY4Tb5NxjJsy1Go9n1WusqpdRU4E8Y\ndWrPa63fNi81Xwb6YzTMXKe13t8McT8IZGKUON9ojbEqpX6PUdUFRm+Jta0xVvNAfRXogtEd9v8w\nukL+G6Mg9Z3W+k5z2T8CV5mx/lVr/bW5/lsYJbUCjG5+hWGOcThGe0tvjK6EWWYcr9OEz1MpNcjf\n+4xArJ2BcqDIXGyr1npaS8caJN5LnIU9pVSm1rq3+XfE47V8chdCCOHL6tUyQggh/JDkLoQQUUiS\nuxBCRCFJ7kIIEYUkuQshRBSS5C6EEFFIkrsQQkSh/wecT9/Avx2ZGwAAAABJRU5ErkJggg==\n",
            "text/plain": [
              "<Figure size 432x288 with 1 Axes>"
            ]
          },
          "metadata": {
            "tags": []
          }
        },
        {
          "output_type": "stream",
          "text": [
            "Model took 7.3842 mins (0.1231 hrs) to finish training with best train accuracy of 89.0625%\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "bUXii0BC2nhZ",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "## 6. Test"
      ]
    },
    {
      "metadata": {
        "id": "q3Qlsqzo2nha",
        "colab_type": "code",
        "outputId": "6dffc841-769c-40b1-e470-0efcde353ce1",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 402
        }
      },
      "cell_type": "code",
      "source": [
        "# Testing doesn't require the use of gradients since weights aren't being updated\n",
        "model.eval()\n",
        "with torch.no_grad():\n",
        "    correct = 0\n",
        "    total = 0\n",
        "    \n",
        "    for img, label in mnist_test_dataloader:\n",
        "        img = torch.squeeze(img).to(device)\n",
        "        label = label.to(device)\n",
        "        \n",
        "        # Forward\n",
        "        out = model(img)\n",
        "        \n",
        "        # Test\n",
        "        _, predicted = torch.max(out.data, 1)\n",
        "        total += label.size(0)\n",
        "        correct += (predicted == label).sum().item()\n",
        "\n",
        "    # Accuracy\n",
        "    print('Test Accuracy: {:.4f} %'.format(100 * correct / total)) \n",
        "\n",
        "    # Show 4 test images\n",
        "    fig, axes = plt.subplots(nrows=2, ncols=2)\n",
        "    img_cpu = img.cpu()\n",
        "    label_cpu = label.cpu()\n",
        "    print(\"Shapes: img {}, label {}, predicted {}\".format(img_cpu.size(), label_cpu.size(), predicted.size()))\n",
        "    print(\"Label: {}\".format(label))\n",
        "    print(\"Predicted: {}\".format(predicted))\n",
        "    for i, ax in enumerate(axes.flat):\n",
        "      ax.imshow(img_cpu[i]) \n",
        "      ax.set_title('Target: {} - Prediction: {}'.format(label_cpu[i], predicted[i]))\n",
        "      ax.set_xticks([])\n",
        "      ax.set_yticks([])\n",
        "    plt.tight_layout()"
      ],
      "execution_count": 141,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:34: UserWarning: Implicit dimension choice for log_softmax has been deprecated. Change the call to include dim=X as an argument.\n"
          ],
          "name": "stderr"
        },
        {
          "output_type": "stream",
          "text": [
            "Test Accuracy: 82.5300 %\n",
            "Shapes: img torch.Size([16, 28, 28]), label torch.Size([16]), predicted torch.Size([16])\n",
            "Label: tensor([2, 0, 0, 6, 2, 4, 9, 6, 2, 2, 7, 7, 0, 4, 8, 9], device='cuda:0')\n",
            "Predicted: tensor([2, 0, 0, 6, 2, 1, 9, 8, 2, 2, 7, 7, 0, 4, 5, 9], device='cuda:0')\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAVEAAAEYCAYAAADlIcXmAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAHlJJREFUeJzt3Xu8VHW9//EXAookchOOmpmK8gHp\ngp3QRLAItTRJH15AMysLDnCQ1MLwWtSjTFMpf5qKmrfwgXgv4eEtT4meyKSoOCofBJFCFDRuAt64\n/P5Ya9d8v3szs2d/Z29m9n4/Hw8f7vfMmrW+s5jvZ77znTVrtdu2bRsiItI0O+3oBoiI1DIVURGR\nBCqiIiIJVERFRBKoiIqIJFARFRFJ0KHYnWZ2AzAsj32AFcDbeR7k7m81Y9sws/5AT3d/psRyHYGf\nAcPJ3hh+A0x09y1lbKsD8D6wGNiSr2ctMNndf9u0Z/CvdU8B9nH30Wb2O+Acd/9rkeXHuPvN+d8l\nl09o11DgaqALsBE4t9S+lprqF+2AK4ATgG3Afe5+SZnbaov94hDgeqAnsAoY6+7Pb2/5okXU3ccX\nrPgV4Mst3MlOBjYDpbb5bWB/4GNAO+B3wFeA25qwzaHu/jqAmR0JPGhmfd39n01YVz3u/pli9+dv\nCFcANzdm+aYys12Bh4AT3f1pM/sicBfw4ebYXmtSQ/3iDOAI4KNk/eJpM5vn7g81YZttpV+0A2YC\n33L3WWZ2Elm/GLi9xxQtoo3YYH/gFqBHvq5L3H1mwbvXRcDX3N3M7AvANGA92ejnWqCvuy83s/HA\nOUAnshfGaOBY4HzgXTPrAVwIPA8Mcfc3o6b8FnjQ3d/L2/UcMCDluQG4+xwzWwYcZmaLgP8hKzwD\n3H14/mKaCnQD3gBOd/dXzKwzcAdwKLAUeKlgny0HTnH3P5jZWfk+2gbMBcaQjaK7mdlC4Bjg9wXL\nnwZcku/rV4HR7r7UzH4I7A7sS/ZGsgo4wd1Xmtk5QHd3nxI9vQ7545/O8zPAvma2m7tvSN13bVkV\n9YtTgdsK+sX0/LamFNF/aeX9YiDQ2d1n5c/1ATO72cwOcveXaEDqnOhUsuLVHxgL/MLM2hfcvyV/\noXQkGxWe5e4HkxW4TgBmNgz4LvAZstHkO8AUd38QeBiY6u7fcfct7t6vgRcK7v6su3u+vo7AUcCz\nic+tTkfg3fzv/wD+mL9QugK/As539wPJhv9358uNJvsocABwCvC5eKVmdiBwOTAU6Ad0ByYAXwfe\ny5/r3wuW3x+4Afiiu/cDHs9znVOBiWQfL9cCXwNw92saeKHg7m/l+7jOscALKqAVURX9AugLLCnI\nS8hea5XQKvsF9fcZwMsU2W+pRfQLwE/zv58GPkC2Q+vMyv/fD2jn7k/k+TqyjxcAI4AZ7v66u28D\nbgROakpj8qH4jWQ74f6mrCNa3/Fko4k/5DftDNQVnk8DSwvmhaYDA8xsb+BI4P78Bf4mMLuB1R8D\nPJ0/763ASLJRyPYcA/zG3V/O8y3AcDOr+zf8nbv/I9+H88nefRv7PAcCVwHjGvsYKapa+kVnsuJb\n5+28LUlaeb+I9xmU2G9JH+eB44CLzGwPYGt+W2FhXp3/vzuwpuD2Vwv+7gaMMLPjCh6/c7kNyd/V\n7yAbvp+S/wPEy9wF/GceP+3uKxtY1dNmVjeBvhQ41t03mhnAu+6+saDdln+8qLMJ6EX2AltXcPsa\noHe0nT3I3hkBcPd38jZu7yn2ItyHa8j+/XrkuXB7W4DCkc92mdkQspHC1wo+2kuaaukXG8lHtrnO\nQL1PGuoXgXifwXb2W50mF1Ez2wW4l+yLicfyLyo2bWfx9cBuBXmvgr9XAL9w9wua2pbcrWT/wCe6\n++aGFnD3Mxqxnn9NoJewAljg7p+K7zCzNUDXgpt6NfD4N4FPFDymK/X/8QqtBA4pyD3J5tdWN7x4\nafkIdCYw0t3/t6nrkX+rsn6xEDiQ7DsDgIOAF+KF1C8Cdfusbvs7kU0F1NtvdVI+zncBdgHm5R+j\nzwHeI3xRFDbsA/moB7J5ojq/Ak4xs555o08ys0n5fe+TvbMVZWYjyZ7omdsroM1gLtkXMZ/M23Cg\nmd2R74u5wAlmtpOZ9Sabb4zNBo40s33zx9wMfJXsOXcws/jjw+PAMDOr+/Z8HPBoQyPuxshfHL8E\n/ksFtKKqpl8A9wBjzWxXM9ud7AuaGeU+oTLVdL9w978B6/OaAtlc7KKC6YJ6mlxE8zmNqcBfgT8B\nTjbh/QjRO0c+JP9vYLqZzQf+L79rm7s/B1xJ9nHhReCbwK/z+38NnG1md5tZezNbmH9Eio0lK6IL\n8mUWmtnNTX1ujZF/fBkJ3JC3+z7g3nzuZRrZ6GMp2ajkgQYev4xsnzxFtu/eBa4BlpN9KbbczA6N\nlh8HzMq3dzgwPl5vzMzOsex4vNgRwMHA1QX7bKGZfbyRu0AaUGX9YibZN+cLgHlkc6yPVO7Z1tcK\n+gXAacC3zewlsgJ+ZrF1tdsR5xPNh+hrgd0K5lJE2jT1i9rUYj/7NLP5ZnZyHkeRzZvohSJtmvpF\n7Uv9dr4c5wLXmtmPyd5tz2rBbYtUK/WLGrdDPs6LiLQWOouTiEiClI/zGsLWvnalF5EyqE/UvrL7\nhEaiIiIJVERFRBKoiIqIJFARFRFJoCIqIpJARVREJIGKqIhIAhVREZEEKqIiIglUREVEEqiIiogk\naMlT4bWoZ58Nr5g8ZsyYIHfp0iXIl19+eZAHDx5cb53t2zfq2m8i0oZoJCoikkBFVEQkgYqoiEiC\nlDPbV/W5E0eMGBHk2bNnB7ldu+KnDbzkkkvq3XbxxRcHeeedd25i66qGzidaWVXdJ0qZNWtWkCdM\nmBDkTp3qX/49/u6hW7fGXMm5qul8oiIiLUlFVEQkgYqoiEgCzYmWoU+fPkG+9NJLgzxq1Kgg18Cc\nqeZEK6uq+0TsjTfeCPIhhxwS5BUrVgS5oVpxww03BHncuHFB3rhxY5A3bdoU5F69ejWusS1Hc6Ii\nIi1JRVREJIGKqIhIglY7J3r99dcH+bvf/W6Q16xZk7yNeN+dcMIJQb766quDfMABByRvs8I0J1pZ\nVd0nVq1aFeTDDjssyMuWLSt7nTvtFI7DOnfuHOStW7cWzRdddFGQGzo+u4VpTlREpCWpiIqIJFAR\nFRFJ0GrnRGOrV68O8tq1a4suf+edd9a77dZbbw3y8uXLgxwfezpkyJAgP/nkk0Hu0GGHn85Vc6KV\nVdV94gc/+EGQp0yZsmMaUiCeU43btAPmSDUnKiLSklRERUQSqIiKiCRoM3OizWHy5MlBvvLKK4su\n/8c//jHIn/zkJyvepjJpTrSyqqpP/OQnPwlyfD7cLVu2tGRzGiWeI33iiSeCPGzYsOZuguZERURa\nkoqoiEgCFVERkQQ7/EDFWvaJT3wiyOVet+nRRx+teJtE6lx33XVBLjUHeuaZZwY5fn3G5x9tyLnn\nnhvkiRMnBvnoo48O8ssvvxzk+Lf1GzZsKLnNHU0jURGRBCqiIiIJVERFRBLoONEES5YsCXLfvn2L\nLh9fw2bevHkVb1OZdJxoZe3QPjFnzpwgDx8+PMil5kRfffXVIC9atCjIDR2jed555wX5xz/+cZDj\n64zF12SKr20f+/jHPx7k+fPnF12+AnScqIhIS1IRFRFJoCIqIpJAx4km2G+//YI8YMCAID///PMt\n2Bpp65566qkgl5oD3XfffYMcXx+pW7duQR4xYkS9dUyaNCnI8RxobMyYMUGeO3dukKdPnx7k1157\nrej6qoFGoiIiCVRERUQSqIiKiCTQnGiClStXBnnp0qVFlx81alRzNkfamPg6YVOnTi3r8fFv47t2\n7RpkMwvyvffeW28dpeZAY/F1xbp3717W46uRRqIiIglUREVEEqiIiogk0JxogscffzzIGzduLLr8\nnnvu2ZzNkTbmscceC/K6deuKLn/66acHuU+fPkWX79SpU9MaVob4t/PXXnttkFevXh3k3//+90Ee\nPHhw8zSsDBqJiogkUBEVEUmgIioikqDVzIm+9dZbQb7sssuCHP8OeObMmUXXN27cuHq3PfLII0GO\nz99Y6hpLF1xwQZDj4/zi4/ROPfXUIO+6665F1y9STO/evYPcsWPHHdSSf4vbFNu8eXOQ43OeVgON\nREVEEqiIiogkUBEVEUmgIioikqDVfLEUXzDrtttuC3J8Qb5SXwKNHz++5DbLXefrr78e5PjA4sWL\nFwf5ox/9aJDjC92JyI6nkaiISAIVURGRBCqiIiIJWs2caKmTL1SDXXbZJcjxRbvOPvvsosuLFJo2\nbdqObkKye+65J8jlfs9QDTQSFRFJoCIqIpJARVREJEGrmRN95plnylr+8MMPD/KHPvShIMdzNY0R\nnyAkvhDYfvvtF+R99tmn7G2I1Jk3b96ObkLZ4hOKxCdZjudA45PyfPazn22ehiXQSFREJIGKqIhI\nAhVREZEENTsnGs8HrVy5sujy8fFnX/7yl4O8dOnSoss3Zp33339/kIcMGVJyHSKt2ZYtW4Icny/i\nzjvvLPr4iRMnBrlnz56VaVgFaSQqIpJARVREJIGKqIhIgpqdE40vstW+ffsgb926tejjJ0yYUPT+\nxvxmt3///kEeNGhQyceIVMro0aOD/LOf/azo8hs3bgxyfHHHzp07BznuU42xYsWKIF966aVBjs/z\nG9tpp3BcN2zYsLLb0NI0EhURSaAiKiKSQEVURCRBu8YcD7kdTX5gc4jnTubMmRPk5jhP4YIFC4J8\n8MEHJ6+zhVX/yRprS4v2iYcffjjIJ554YtiYMvv2uHHjgjx06NAgDx48uN5jbrnlliDfddddQX7l\nlVeKbrN3795BnjFjRpB3wJxo2X1CI1ERkQQqoiIiCVRERUQStJo50XguJp7fiY+RKzUnutdee9W7\n7cknnwyymZXTxGqkOdHK2qF9YvLkyUG+6qqrgpzQ15ssPu7zoIMOCnL8W/qjjjqq2dtUguZERURa\nkoqoiEgCFVERkQStZk409uyzzwZ57NixQV68eHGQ4/mjM844o946u3TpUqHWVQ3NiVZWVfWJAw88\nMMgvv/xyxbfRoUN4+o0+ffoE+brrrgvy8OHDK96GCtOcqIhIS1IRFRFJoCIqIpKg1c6JSqNoTrSy\nqrpPxNcAW716dZC///3vB7lHjx5Bjq93BHDqqacGuVu3bilNrAaaExURaUkqoiIiCVRERUQSaE60\nbdOcaGWpT9Q+zYmKiLQkFVERkQQqoiIiCVRERUQSqIiKiCRQERURSaAiKiKSQEVURCSBiqiISAIV\nURGRBCqiIiIJVERFRBKoiIqIJFARFRFJoCIqIpKgQ+lFtkvnohQJqU+0QRqJiogkUBEVEUmgIioi\nkkBFVEQkgYqoiEgCFVERkQQqoiIiCVRERUQSqIiKiCRQERURSaAiKiKSoOhv583sBmBYHvsAK4C3\n8zzI3d9qxrZhZv2Bnu7+TInl2gFXACcA24D73P2SMrfVAXgfWAxsIXuDWQtMdvffNqH5heueAuzj\n7qPN7HfAOe7+1yLLj3H3m/O/Sy6f0K5DgOuBnsAqYKy7P1/p7bQ2tdIv8mVPAS4nez3PA77u7hvK\n2FZb7BddgV8AhwKbgAvc/aHtLV+0iLr7+IIVvwJ8uTH/cBV0MrAZKLXNM4AjgI+SnQTiaTObV+yJ\nFzHU3V8HMLMjgQfNrK+7/7MJ66rH3T9T7H4z60j2hnBzY5ZvqvyNZybwLXefZWYnAXcBA5tje61J\nrfQLMzsQ+H/AEGBp/vdxwD1N2Gab6Be5nwHL3P0UMzsYuNbMHnb3LQ0tnHIWp7p3xFuAHvm6LnH3\nmQXvXhcBX3N3M7MvANOA9cDVwLVAX3dfbmbjgXOATmQvjNHAscD5wLtm1gO4EHgeGOLub0ZNORW4\nzd3fy9s1Pb+tKUX0X9x9jpktAw4zs0XA/+TrHODuw/MX01SgG/AGcLq7v2JmnYE7yN7JlgIvFeyz\n5cAp7v4HMzsr30fbgLnAGOA3QDczWwgcA/y+YPnTgEvyff0qMNrdl5rZD4HdgX2Bj5GNKk9w95Vm\ndg7Q3d2nRE9vINDZ3Wflz/UBM7vZzA5y95eQJquifnEmcI+7v5zniZV4fq25X5jZrmS148P5c30B\nGF5sf6TOiU4FHnT3/sBY4Bdm1r7g/i35C6UjcBtwlrsfDAwge2FgZsOA7wKfAfYH3gGmuPuDwMPA\nVHf/jrtvcfd+DbxQAPoCSwryEqBf4nOr0xF4N//7P4A/5i+UrsCvgPPd/UCyj8V358uNJvuIfABw\nCvC5eKX5KOFyYGje1u7ABODrwHv5c/17wfL7AzcAX3T3fsDjea5zKlkn6UP2cetrAO5+TQMFFOrv\nM4CXqdx+a8uqpV98HNhsZr8xs0Vmdn1eJCqhtfYLA94C/svMXjCzZ/N/i+1KLaJfAH6a//008AGy\nHVpnVv7/fkA7d38iz9fx73MvjgBmuPvr7r4NuBE4qcx2dCZ7kdV5O29LEjM7nmw08Yf8pp2BB/O/\nPw0sLZgXmg4MMLO9gSOB+/MX+JvA7AZWfwzwdP68twIjyUYh23MM8JuCUcUtwHAzq/s3/J27/yPf\nh/PJ3n2LifcZVGi/SdX0i27AUcDpwCfy7U0ucx31tPJ+0Q3YA1iXv7H9AHjAzLpt7wFJH+fJ5lcu\nMrM9gK35bYWFeXX+/+7AmoLbX40aPcLMjit4/M5ltmMj+Tt4rjNQb/LczO4C/jOPn3b3lQ2s62kz\nq5tAXwoc6+4bzQzgXXffWNBuyz9e1NkE9CJ7ga0ruH0N0Dvazh5k74wAuPs7eRu39xx7Ee7DNWT/\nfj3yXLi9LUDhyKch8T6D7ew3KVu19It1wG/d/Q0AM7sROBeYUriQ+kVgHdkb2bR8+7PN7DWyKYjH\nG3pAk4uome0C3Auc6O6P5R8TNm1n8fXAbgV5r4K/VwC/cPcLmtoWYCFwIFD37ncQ8EK8kLuf0Yh1\n/WsCvYQVwAJ3/1R8h5mtAboW3NSrgce/STY6qHtMV+oXtUIrgUMKck+y+bXVDS9eUt0+q9v+TmQf\neertN2m8KusXywhfh1vy/wLqF4G/k71RfIDs3we2s9/qpHyc7wLsAszLv+k9B3iP8EVRZyHwATMb\nkuexBff9CjjFzHoCmNlJZjYpv+99sne2Uu4BxprZrma2O9lE9Ixyn1CZ5gL7mtknIZvLMbM78n0x\nFzjBzHYys95kXwbEZgNHmtm++WNuBr5K9pw7mFn8sfpxYJiZfTjP44BH8488ZXP3vwHrzWxkftPX\ngUUFH4ukaaqpX8wETjezvfM52a+TfUHTnGq9X/wTeBKYlLd/MPBBssPDGtTkIprPaUwF/gr8CXCy\nCe9HiN458iH5fwPTzWw+8H/5Xdvc/TngSrKPCy8C3wR+nd//a+BsM7vbzNqb2cL8I1JsJtk3hAvI\nnuwMd3+kqc+tMfKPLyOBG/J23wfcm8+9TCMbfSwlG5U80MDjl5Htk6fI9t27wDXAcuBZYLmZHRot\nPw6YlW/vcGB8vN6YmZ1j2fF4DTkN+LaZvUT2Qj2z9DOXYqqpX7j7/wI/IiteLwKv5etsNq2kX5wF\nDLHs8LXrgZHuvm47y9Ju27ZtpbZXcfkQfS2wW8Fcikibpn5Rm1rsZ59mNt/MTs7jKLJ5E71QpE1T\nv6h9qd/Ol+NcsiP/f0z2bntWC25bpFqpX9S4HfJxXkSktdBZnEREEqR8nNcQtva1K72IlEF9ovaV\n3Sc0EhURSaAiKiKSQEVURCSBiqiISAIVURGRBCqiIiIJVERFRBK05M8+RaSVee+994J8+umnB7ln\nz55Bvummm5q9TS1NI1ERkQQqoiIiCVRERUQSaE5URJrs8ssvD/KDDz4Y5B/96Ect2ZwdQiNREZEE\nKqIiIglUREVEEqSc2b5Vnztx/fr19W4744zw8tyzZ88uuo5437ZrF56q8LHHHgvy0UcfXU4TK0Hn\nE62sVt0nnnvuuXq3HXfccUHetGlTkOfOnRvkj33sY5VvWGXpfKIiIi1JRVREJIGKqIhIAs2J5rZs\n2RLkkSNH1lvmoYcequg2u3btGuRvfOMbQZ48eXKQ99hjj4puH82JVlqr6hPx7+IPOeSQesu8+OKL\nQY6PC73wwgsr37DmpTlREZGWpCIqIpJARVREJEGbmRPdvHlzkK+99togX3bZZUFevXp1yXXGc5RD\nhgwpuo45c+aUXGeh/fffP8iLFy8u6/GNoDnRyqqpPlFKfO7PcePG1Vtm1113DfKSJUuCvOeee1a+\nYc1Lc6IiIi1JRVREJIGKqIhIgjZzPtFbb701yJMmTUpeZ3zuxMGDBwd5w4YNQZ41a1aQV61aFeTz\nzjuv6ONFmlN8XOgTTzxR8jHxdws1OAeaTCNREZEEKqIiIglUREVEErSa40Tj40CnTZsW5O985ztB\nfuedd4qu76tf/Wq9266++uogd+vWLcjx+UJLiY8j7dWrV5Dbt28f5Ph6Nt/61rfK2l4DdJxoZVVV\nnyhX/Dv4AQMGBHnMmDH1HnP99dcHOX7N1iAdJyoi0pJUREVEEqiIiogkaDXHicZzoN/85jfLevzE\niRODPGrUqHrLdO/evfyGJYjPcbpy5coW3b60bvFxod/73veCHP8ufvz48fXW0QrmQJNpJCoikkBF\nVEQkgYqoiEiCmp0Tvfvuu4McHwdaSnxuxCuuuCLIu+yyS9MaJlIjrrzyyiDfd999Re8fOHBgs7ep\nFmkkKiKSQEVURCSBiqiISIKanRNdtmxZkEv9Fr5Lly5B/tKXvhRkzYFKa/f6668HOT4XRHxcaNxH\npGEaiYqIJFARFRFJoCIqIpKgZuZEX3vttSDH10yK7bzzzkGOj4E74ogjKtMwkRrx05/+NMhr164N\n8lVXXRXkvfbaq+Q64/M7zJ8/P8g//OEPg9yzZ8+i9zdmm9VGI1ERkQQqoiIiCVRERUQS1Myc6OzZ\ns4O8ePHiosvHv/s96qijKt6mVH/+85+L3h8fuzp06NDmbI7UmPfffz/I8bk933zzzSDfdNNNQe7U\nqVOQTzvttKLbi+c/of53E2PHjg1yfA7e+Hju3XffPcjxvG0t0EhURCSBiqiISAIVURGRBDUzJ3rh\nhRfu6CZU3MUXX1z0/ni+6Pjjj2/O5kiN2bp1a5DjOdH4uM9169YFOb4O2d577110+YZer/F154cP\nHx7k22+/PcjxeYAvuuiiIJ9//vlF21SNNBIVEUmgIioikkBFVEQkgYqoiEiCqv1iKT6B7ObNm4su\nH38JM2DAgIq3KVU8Cf+nP/1pB7VEWoP4xxjr168P8o033lj08ZMmTSp6f3xwfvz6BTj55JODfMst\ntwS5Y8eORe8fNmxYkGvhi6SYRqIiIglUREVEEqiIiogkqNo50fgiWvF8T6xfv35BjudadoT4pLfx\niaG3bdtW9PGlDsYXKXTHHXcEecOGDUEeNGhQkPfcc88gx99DTJkyJcjxwfwAEyZMCHJ8Qcj4Nbxo\n0aIgxydlrkUaiYqIJFARFRFJoCIqIpKgaudEW4P4BA5PPfVU0eX32WefIJ900kkVb5O0XqtWrSp6\nf48ePYLcoUPY/WfMmBHkt99+O8iHHnpovXXGF3y85pprgnzFFVcEefz48UGOjzOtRRqJiogkUBEV\nEUmgIioikkBzos3ol7/8ZdH7e/fuHeRHH300yB/84Acr3iZpu5YtWxbk+KJxpc5PEc/xAwwcODDI\nL774YpA///nPBzk+9rQ10EhURCSBiqiISAIVURGRBJoTLUP8W+QFCxYE+dxzzw3yX/7yl6LrO+yw\nw4Lcv3//hNZJW3fQQQcVvX/hwoVBPvzww4P80ksvFX28u9e7LT7/w9FHHx3kBx54IMidOnUquo1a\npJGoiEgCFVERkQQqoiIiCVrNnOiKFSuCvGTJkiD36dMnyK+++mqQS/2uHWDatGlBfuaZZ8ppIiNG\njAjy9OnTy3q8SDFf+cpXghz3ifjcnvGcfbt27ZLb8NZbbwX5b3/7W5Ab+v19rdNIVEQkgYqoiEgC\nFVERkQRVOye62267BTmer4mPT1u+fHmQP/e5zwX5Ix/5SJDjOdMXXnihSe0sFF9f5lOf+lSQ77zz\nziDHz1Gkki644IIgx3Om//jHP8paX0PngoivRR//Hn/jxo1lbaMWaSQqIpJARVREJIGKqIhIgnal\nrn1eRJMf2BTxtVluuummltw8UP+aRwcccECQzzrrrCD369ev2duUKP3AQCnUon1CmkXZfUIjURGR\nBCqiIiIJVERFRBLUzJxofA3ss88+O8i333570vp//vOfB3nQoEH1lunbt2+Q4+NCa5DmRCtLc6K1\nT3OiIiItSUVURCSBiqiISIKamROVZqE50cpSn6h9mhMVEWlJKqIiIglUREVEEqiIiogkUBEVEUmg\nIioikkBFVEQkgYqoiEgCFVERkQQqoiIiCVRERUQSqIiKiCRQERURSaAiKiKSQEVURCRBh4TH6lyU\nIiH1iTZII1ERkQQqoiIiCVRERUQSqIiKiCRQERURSaAiKiKS4P8DX+Lo5rfGvfQAAAAASUVORK5C\nYII=\n",
            "text/plain": [
              "<Figure size 432x288 with 4 Axes>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "metadata": {
        "id": "LYYZW0YSYH2f",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "## 7. Summary\n",
        "*Steps per epoch on 128 batch_size = 469, using GPU*\n",
        "\n",
        "<table align=\"left\">\n",
        "  <tr>\n",
        "    <td><b>Model</b></td><td><b>Epoch</b></td><td align=\"center\"><b>Train Acc (%)</b></td><td align=\"center\"><b>Test Acc (%)</b></td><td align=\"center\"><b>Training Time (min)</b></td>\n",
        "  </tr>\n",
        "  <tr>\n",
        "    <td rowspan=\"2\">RNN</td><td>10</td><td align=\"center\">81.25</td><td align=\"center\">75.06</td><td align=\"center\">2.23</td>\n",
        "  </tr>\n",
        "  <tr>\n",
        "    <td>30</td><td align=\"center\">83.59</td><td align=\"center\">75.14</td><td align=\"center\">6.90</td>\n",
        "  </tr>\n",
        "  <tr>\n",
        "    <td rowspan=\"2\">LSTM</td><td>10</td><td align=\"center\">94.53</td><td align=\"center\">90.04</td><td align=\"center\">2.55</td>\n",
        "  </tr>\n",
        "  <tr>\n",
        "    <td>30</td><td align=\"center\">95.31</td><td align=\"center\">90.10</td><td align=\"center\">7.62</td>\n",
        "  </tr>\n",
        "  <tr>\n",
        "    <td rowspan=\"2\">GRU</td><td>10</td><td align=\"center\">97.66</td><td align=\"center\">92.45</td><td align=\"center\">2.47</td>\n",
        "  </tr>\n",
        "  <tr>\n",
        "    <td>30</td><td align=\"center\">89.06</td><td align=\"center\">82.53</td><td align=\"center\">7.38</td>\n",
        "  </tr>\n",
        "</table>"
      ]
    }
  ]
}