{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Image Classification with MNIST Using a Petastorm Dataset and PyTorch\n",
    "\n",
    "In this notebook we will read a training dataset saved in the Petastorm format in the project's feature store and use that to train a Deep CNN defined in PyTorch to classify images of digits in the MNIST dataset.\n",
    "\n",
    "This notebook assumes that you have already created the training datasets in the feature store, which you can do by running this notebook: \n",
    "\n",
    "[Create Petastorm MNIST Dataset Notebook](PetastormMNIST_CreateDataset.ipynb)\n",
    "\n",
    "![Petastorm 7](./../images/petastorm7.png \"Petastorm 7\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Starting Spark application\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<table>\n",
       "<tr><th>ID</th><th>YARN Application ID</th><th>Kind</th><th>State</th><th>Spark UI</th><th>Driver log</th><th>Current session?</th></tr><tr><td>5</td><td>application_1559565096638_0007</td><td>pyspark</td><td>idle</td><td><a target=\"_blank\" href=\"http://hopsworks0.logicalclocks.com:8088/proxy/application_1559565096638_0007/\">Link</a></td><td><a target=\"_blank\" href=\"http://hopsworks0.logicalclocks.com:8042/node/containerlogs/container_e01_1559565096638_0007_01_000001/demo_featurestore_admin000__meb10000\">Link</a></td><td>✔</td></tr></table>"
      ],
      "text/plain": [
       "<IPython.core.display.HTML object>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "SparkSession available as 'spark'.\n"
     ]
    }
   ],
   "source": [
    "from hops import hdfs, featurestore, experiment, tensorboard\n",
    "import numpy as np\n",
    "import pydoop\n",
    "import json\n",
    "\n",
    "# Must import pyarrow before torch. See: https://github.com/uber/petastorm/blob/master/docs/troubleshoot.rst\n",
    "import pyarrow as pa\n",
    "import torch\n",
    "import torch.nn as nn\n",
    "import torch.optim as optim\n",
    "from torchvision import transforms\n",
    "import torch.nn.functional as F\n",
    "from petastorm import make_reader, TransformSpec\n",
    "from petastorm.tf_utils import make_petastorm_dataset\n",
    "from petastorm.pytorch import DataLoader"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Constants"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "TRAIN_DATASET_NAME = \"MNIST_train_petastorm\"\n",
    "TEST_DATASET_NAME = \"MNIST_test_petastorm\"\n",
    "BATCH_SIZE = 50\n",
    "NUM_EPOCHS = 5\n",
    "LEARNING_RATE = 0.01\n",
    "MOMENTUM = 0.001\n",
    "SEED = 1\n",
    "LOG_INTERVAL = 10\n",
    "READER_EPOCHS = 1\n",
    "PROJECT_PATH = hdfs.project_path()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Step 1: Define The Model"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "class Net(nn.Module):\n",
    "    def __init__(self):\n",
    "        super(Net, self).__init__()\n",
    "        self.conv1 = nn.Conv2d(1, 10, kernel_size=5)\n",
    "        self.conv2 = nn.Conv2d(10, 20, kernel_size=5)\n",
    "        self.conv2_drop = nn.Dropout2d()\n",
    "        self.fc1 = nn.Linear(320, 50)\n",
    "        self.fc2 = nn.Linear(50, 10)\n",
    "\n",
    "    def forward(self, x):\n",
    "        x = F.relu(F.max_pool2d(self.conv1(x), 2))\n",
    "        x = F.relu(F.max_pool2d(self.conv2_drop(self.conv2(x)), 2))\n",
    "        x = x.view(-1, 320)\n",
    "        x = F.relu(self.fc1(x))\n",
    "        x = F.dropout(x, training=self.training)\n",
    "        x = self.fc2(x)\n",
    "        return F.log_softmax(x, dim=1)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Step 2: Define PyTorch Dataset Transformer\n",
    "\n",
    "Petastorm datasets can be read directly with PyTorch by using `make_reader` and `make_petastorm_dataset` from the Petastorm library"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [],
   "source": [
    "def _transform_row(mnist_row):\n",
    "    \"\"\"\n",
    "    Normalize images\n",
    "    \"\"\"\n",
    "    transform = transforms.Compose([\n",
    "        transforms.Lambda(lambda nd: nd.reshape(28, 28, 1)),\n",
    "        transforms.ToTensor(),\n",
    "        transforms.Normalize((0.1307,), (0.3081,))\n",
    "    ])\n",
    "    # In addition, the petastorm pytorch DataLoader does not distinguish the notion of\n",
    "    # data or target transform, but that actually gives the user more flexibility\n",
    "    # to make the desired partial transform, as shown here.\n",
    "    result_row = {\n",
    "        'image': transform(mnist_row['image']),\n",
    "        'digit': mnist_row['digit']\n",
    "    }\n",
    "\n",
    "    return result_row"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Step 3: Define Epoch Reader and Training Loop"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [],
   "source": [
    "def train_epoch(train_dataset_path, model, device, optimizer, epoch):\n",
    "    \"\"\"\n",
    "    Function for training a single epoch of MNIST using PyTorch\n",
    "    \"\"\"\n",
    "    with DataLoader(make_reader(train_dataset_path, num_epochs=READER_EPOCHS, hdfs_driver='libhdfs',\n",
    "                               transform_spec=TransformSpec(_transform_row)), \n",
    "                    batch_size=BATCH_SIZE) as train_loader:\n",
    "        correct = 0\n",
    "        count = 0\n",
    "        train_loss = 0\n",
    "        model.train()\n",
    "        for batch_idx, row in enumerate(train_loader):\n",
    "            data, target = row['image'].to(device), row['digit'].to(device)\n",
    "            optimizer.zero_grad()\n",
    "            output = model(data)\n",
    "            loss = F.nll_loss(output, target)\n",
    "            train_loss += loss.item()\n",
    "            pred = output.max(1, keepdim=True)[1]  # get the index of the max log-probability\n",
    "            correct += pred.eq(target.view_as(pred)).sum().item()\n",
    "            count += data.shape[0]\n",
    "            loss.backward()\n",
    "            optimizer.step()\n",
    "            if batch_idx % LOG_INTERVAL == 0:\n",
    "                print('Train Epoch: {} [{}]\\tLoss: {:.6f}'.format(\n",
    "                    epoch, batch_idx * len(data), loss.item()))\n",
    "        train_accuracy = correct / count\n",
    "        train_loss /= count\n",
    "        return train_accuracy, train_loss, epoch"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Step 4: Put it All Together in a Training Function"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "def train_fn():\n",
    "    \"\"\"\n",
    "    The training loop\n",
    "    \"\"\"\n",
    "    # Setup Torch\n",
    "    use_cuda = torch.cuda.is_available()\n",
    "    torch.manual_seed(SEED)\n",
    "    device = torch.device('cuda' if use_cuda else 'cpu')\n",
    "    # Create Model\n",
    "    model = Net().to(device)\n",
    "    # Define optimizer\n",
    "    optimizer = optim.SGD(model.parameters(), lr=LEARNING_RATE, momentum=MOMENTUM)\n",
    "    # get dataset path from the featurestore\n",
    "    train_dataset_path = featurestore.get_training_dataset_path(TRAIN_DATASET_NAME)\n",
    "    train_history = {}\n",
    "    train_epochs = []\n",
    "    train_accuracies = []\n",
    "    train_losses = []\n",
    "    for epoch in range(1, NUM_EPOCHS + 1):\n",
    "        train_accuracy, train_loss, epoch = train_epoch(train_dataset_path, model, device, optimizer, epoch)\n",
    "        train_epochs.append(epoch)\n",
    "        train_accuracies.append(train_accuracy)\n",
    "        train_losses.append(train_loss)\n",
    "    torch.save(model.state_dict(), \"mnist_torch_ps.pt\") #PyTorch can't save to HDFS in the current version so save to local fs first\n",
    "    hdfs.copy_to_hdfs(\"mnist_torch_ps.pt\", hdfs.project_path() + \"mnist/\", overwrite=True) # copy from local fs to hdfs\n",
    "    train_history[\"acc\"] = train_accuracies\n",
    "    train_history[\"epoch\"] = train_epochs\n",
    "    train_history[\"loss\"] = train_losses\n",
    "    # save training history to HDFS\n",
    "    results_path = hdfs.project_path() + \"mnist/mnist_train_results_2.txt\"\n",
    "    hdfs.dump(json.dumps(train_history), results_path)\n",
    "    return train_history"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Step 5: Training Experiments\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train Epoch: 1 [0]\tLoss: 2.314652\n",
      "Train Epoch: 1 [500]\tLoss: 2.347810\n",
      "Train Epoch: 1 [1000]\tLoss: 2.322586\n",
      "Train Epoch: 1 [1500]\tLoss: 2.292396\n",
      "Train Epoch: 1 [2000]\tLoss: 2.293021\n",
      "Train Epoch: 1 [2500]\tLoss: 2.279352\n",
      "Train Epoch: 1 [3000]\tLoss: 2.323452\n",
      "Train Epoch: 1 [3500]\tLoss: 2.277730\n",
      "Train Epoch: 1 [4000]\tLoss: 2.245240\n",
      "Train Epoch: 1 [4500]\tLoss: 2.224981\n",
      "Train Epoch: 1 [5000]\tLoss: 2.203325\n",
      "Train Epoch: 1 [5500]\tLoss: 2.202740\n",
      "Train Epoch: 1 [6000]\tLoss: 2.194582\n",
      "Train Epoch: 1 [6500]\tLoss: 2.204894\n",
      "Train Epoch: 1 [7000]\tLoss: 2.185442\n",
      "Train Epoch: 1 [7500]\tLoss: 2.105492\n",
      "Train Epoch: 1 [8000]\tLoss: 2.085393\n",
      "Train Epoch: 1 [8500]\tLoss: 2.058237\n",
      "Train Epoch: 1 [9000]\tLoss: 1.887053\n",
      "Train Epoch: 1 [9500]\tLoss: 1.932784\n",
      "Train Epoch: 1 [10000]\tLoss: 1.965014\n",
      "Train Epoch: 1 [10500]\tLoss: 1.860895\n",
      "Train Epoch: 1 [11000]\tLoss: 1.852554\n",
      "Train Epoch: 1 [11500]\tLoss: 1.784856\n",
      "Train Epoch: 1 [12000]\tLoss: 1.768502\n",
      "Train Epoch: 1 [12500]\tLoss: 1.804160\n",
      "Train Epoch: 1 [13000]\tLoss: 1.605310\n",
      "Train Epoch: 1 [13500]\tLoss: 1.548712\n",
      "Train Epoch: 1 [14000]\tLoss: 1.947237\n",
      "Train Epoch: 1 [14500]\tLoss: 1.626681\n",
      "Train Epoch: 1 [15000]\tLoss: 1.342831\n",
      "Train Epoch: 1 [15500]\tLoss: 1.333472\n",
      "Train Epoch: 1 [16000]\tLoss: 1.877568\n",
      "Train Epoch: 1 [16500]\tLoss: 1.080007\n",
      "Train Epoch: 1 [17000]\tLoss: 1.264965\n",
      "Train Epoch: 1 [17500]\tLoss: 1.446953\n",
      "Train Epoch: 1 [18000]\tLoss: 1.330481\n",
      "Train Epoch: 1 [18500]\tLoss: 1.080801\n",
      "Train Epoch: 1 [19000]\tLoss: 1.403510\n",
      "Train Epoch: 1 [19500]\tLoss: 1.195393\n",
      "Train Epoch: 1 [20000]\tLoss: 1.365885\n",
      "Train Epoch: 1 [20500]\tLoss: 1.141244\n",
      "Train Epoch: 1 [21000]\tLoss: 1.269509\n",
      "Train Epoch: 1 [21500]\tLoss: 1.020391\n",
      "Train Epoch: 1 [22000]\tLoss: 0.693062\n",
      "Train Epoch: 1 [22500]\tLoss: 1.192552\n",
      "Train Epoch: 1 [23000]\tLoss: 0.936118\n",
      "Train Epoch: 1 [23500]\tLoss: 0.888021\n",
      "Train Epoch: 1 [24000]\tLoss: 0.941373\n",
      "Train Epoch: 1 [24500]\tLoss: 1.094744\n",
      "Train Epoch: 1 [25000]\tLoss: 1.032292\n",
      "Train Epoch: 1 [25500]\tLoss: 0.559550\n",
      "Train Epoch: 1 [26000]\tLoss: 0.673567\n",
      "Train Epoch: 1 [26500]\tLoss: 1.137646\n",
      "Train Epoch: 1 [27000]\tLoss: 0.789707\n",
      "Train Epoch: 1 [27500]\tLoss: 0.956055\n",
      "Train Epoch: 1 [28000]\tLoss: 0.666835\n",
      "Train Epoch: 1 [28500]\tLoss: 0.982934\n",
      "Train Epoch: 1 [29000]\tLoss: 1.015001\n",
      "Train Epoch: 1 [29500]\tLoss: 0.736728\n",
      "Train Epoch: 1 [30000]\tLoss: 1.037584\n",
      "Train Epoch: 1 [30500]\tLoss: 1.020220\n",
      "Train Epoch: 1 [31000]\tLoss: 0.876992\n",
      "Train Epoch: 1 [31500]\tLoss: 0.625908\n",
      "Train Epoch: 1 [32000]\tLoss: 0.625826\n",
      "Train Epoch: 1 [32500]\tLoss: 1.120969\n",
      "Train Epoch: 1 [33000]\tLoss: 0.839810\n",
      "Train Epoch: 1 [33500]\tLoss: 0.637523\n",
      "Train Epoch: 1 [34000]\tLoss: 0.992285\n",
      "Train Epoch: 1 [34500]\tLoss: 0.787687\n",
      "Train Epoch: 1 [35000]\tLoss: 0.548028\n",
      "Train Epoch: 1 [35500]\tLoss: 0.752950\n",
      "Train Epoch: 1 [36000]\tLoss: 0.689869\n",
      "Train Epoch: 1 [36500]\tLoss: 0.704910\n",
      "Train Epoch: 1 [37000]\tLoss: 0.796584\n",
      "Train Epoch: 1 [37500]\tLoss: 0.735694\n",
      "Train Epoch: 1 [38000]\tLoss: 0.624109\n",
      "Train Epoch: 1 [38500]\tLoss: 0.984630\n",
      "Train Epoch: 1 [39000]\tLoss: 0.569103\n",
      "Train Epoch: 1 [39500]\tLoss: 0.640644\n",
      "Train Epoch: 1 [40000]\tLoss: 0.607074\n",
      "Train Epoch: 1 [40500]\tLoss: 0.739015\n",
      "Train Epoch: 1 [41000]\tLoss: 0.766151\n",
      "Train Epoch: 1 [41500]\tLoss: 0.862180\n",
      "Train Epoch: 1 [42000]\tLoss: 0.739657\n",
      "Train Epoch: 1 [42500]\tLoss: 1.137352\n",
      "Train Epoch: 1 [43000]\tLoss: 0.796514\n",
      "Train Epoch: 1 [43500]\tLoss: 0.710710\n",
      "Train Epoch: 1 [44000]\tLoss: 0.712428\n",
      "Train Epoch: 1 [44500]\tLoss: 0.391899\n",
      "Train Epoch: 1 [45000]\tLoss: 1.157633\n",
      "Train Epoch: 1 [45500]\tLoss: 0.730997\n",
      "Train Epoch: 1 [46000]\tLoss: 0.670208\n",
      "Train Epoch: 1 [46500]\tLoss: 0.374244\n",
      "Train Epoch: 1 [47000]\tLoss: 0.732776\n",
      "Train Epoch: 1 [47500]\tLoss: 0.569183\n",
      "Train Epoch: 1 [48000]\tLoss: 0.548214\n",
      "Train Epoch: 1 [48500]\tLoss: 0.403679\n",
      "Train Epoch: 1 [49000]\tLoss: 0.787867\n",
      "Train Epoch: 1 [49500]\tLoss: 1.213032\n",
      "Train Epoch: 1 [50000]\tLoss: 0.563721\n",
      "Train Epoch: 1 [50500]\tLoss: 0.636813\n",
      "Train Epoch: 1 [51000]\tLoss: 0.361427\n",
      "Train Epoch: 1 [51500]\tLoss: 0.660716\n",
      "Train Epoch: 1 [52000]\tLoss: 0.645396\n",
      "Train Epoch: 1 [52500]\tLoss: 0.442736\n",
      "Train Epoch: 1 [53000]\tLoss: 0.717546\n",
      "Train Epoch: 1 [53500]\tLoss: 0.478140\n",
      "Train Epoch: 1 [54000]\tLoss: 0.745808\n",
      "Train Epoch: 1 [54500]\tLoss: 0.644925\n",
      "Train Epoch: 1 [55000]\tLoss: 0.611227\n",
      "Train Epoch: 1 [55500]\tLoss: 0.378807\n",
      "Train Epoch: 1 [56000]\tLoss: 0.307739\n",
      "Train Epoch: 1 [56500]\tLoss: 0.439195\n",
      "Train Epoch: 1 [57000]\tLoss: 0.538999\n",
      "Train Epoch: 1 [57500]\tLoss: 0.548627\n",
      "Train Epoch: 1 [58000]\tLoss: 0.398077\n",
      "Train Epoch: 1 [58500]\tLoss: 0.208606\n",
      "Train Epoch: 1 [59000]\tLoss: 0.234980\n",
      "Train Epoch: 1 [59500]\tLoss: 0.204218\n",
      "Train Epoch: 2 [0]\tLoss: 0.619840\n",
      "Train Epoch: 2 [500]\tLoss: 0.921175\n",
      "Train Epoch: 2 [1000]\tLoss: 0.684286\n",
      "Train Epoch: 2 [1500]\tLoss: 0.542011\n",
      "Train Epoch: 2 [2000]\tLoss: 0.584727\n",
      "Train Epoch: 2 [2500]\tLoss: 0.379518\n",
      "Train Epoch: 2 [3000]\tLoss: 0.635102\n",
      "Train Epoch: 2 [3500]\tLoss: 0.733626\n",
      "Train Epoch: 2 [4000]\tLoss: 0.479817\n",
      "Train Epoch: 2 [4500]\tLoss: 0.456036\n",
      "Train Epoch: 2 [5000]\tLoss: 0.487749\n",
      "Train Epoch: 2 [5500]\tLoss: 0.430104\n",
      "Train Epoch: 2 [6000]\tLoss: 0.443401\n",
      "Train Epoch: 2 [6500]\tLoss: 0.415634\n",
      "Train Epoch: 2 [7000]\tLoss: 0.771553\n",
      "Train Epoch: 2 [7500]\tLoss: 0.656121\n",
      "Train Epoch: 2 [8000]\tLoss: 0.439045\n",
      "Train Epoch: 2 [8500]\tLoss: 0.459105\n",
      "Train Epoch: 2 [9000]\tLoss: 0.281455\n",
      "Train Epoch: 2 [9500]\tLoss: 0.423440\n",
      "Train Epoch: 2 [10000]\tLoss: 0.484109\n",
      "Train Epoch: 2 [10500]\tLoss: 0.401370\n",
      "Train Epoch: 2 [11000]\tLoss: 0.377011\n",
      "Train Epoch: 2 [11500]\tLoss: 0.473158\n",
      "Train Epoch: 2 [12000]\tLoss: 0.452099\n",
      "Train Epoch: 2 [12500]\tLoss: 0.653331\n",
      "Train Epoch: 2 [13000]\tLoss: 0.849055\n",
      "Train Epoch: 2 [13500]\tLoss: 0.393340\n",
      "Train Epoch: 2 [14000]\tLoss: 0.442612\n",
      "Train Epoch: 2 [14500]\tLoss: 0.826222\n",
      "Train Epoch: 2 [15000]\tLoss: 0.287240\n",
      "Train Epoch: 2 [15500]\tLoss: 0.426907\n",
      "Train Epoch: 2 [16000]\tLoss: 0.820387\n",
      "Train Epoch: 2 [16500]\tLoss: 0.240167\n",
      "Train Epoch: 2 [17000]\tLoss: 0.405147\n",
      "Train Epoch: 2 [17500]\tLoss: 0.438141\n",
      "Train Epoch: 2 [18000]\tLoss: 0.593009\n",
      "Train Epoch: 2 [18500]\tLoss: 0.417196\n",
      "Train Epoch: 2 [19000]\tLoss: 0.630604\n",
      "Train Epoch: 2 [19500]\tLoss: 0.521333\n",
      "Train Epoch: 2 [20000]\tLoss: 0.536920\n",
      "Train Epoch: 2 [20500]\tLoss: 0.481749\n",
      "Train Epoch: 2 [21000]\tLoss: 0.507058\n",
      "Train Epoch: 2 [21500]\tLoss: 0.347641\n",
      "Train Epoch: 2 [22000]\tLoss: 0.416561\n",
      "Train Epoch: 2 [22500]\tLoss: 0.681721\n",
      "Train Epoch: 2 [23000]\tLoss: 0.390809\n",
      "Train Epoch: 2 [23500]\tLoss: 0.470198\n",
      "Train Epoch: 2 [24000]\tLoss: 0.319788\n",
      "Train Epoch: 2 [24500]\tLoss: 0.532139\n",
      "Train Epoch: 2 [25000]\tLoss: 0.263668\n",
      "Train Epoch: 2 [25500]\tLoss: 0.347382\n",
      "Train Epoch: 2 [26000]\tLoss: 0.229244\n",
      "Train Epoch: 2 [26500]\tLoss: 0.555477\n",
      "Train Epoch: 2 [27000]\tLoss: 0.410140\n",
      "Train Epoch: 2 [27500]\tLoss: 0.489020\n",
      "Train Epoch: 2 [28000]\tLoss: 0.201792\n",
      "Train Epoch: 2 [28500]\tLoss: 0.378409\n",
      "Train Epoch: 2 [29000]\tLoss: 0.453885\n",
      "Train Epoch: 2 [29500]\tLoss: 0.473300\n",
      "Train Epoch: 2 [30000]\tLoss: 0.365118\n",
      "Train Epoch: 2 [30500]\tLoss: 0.646152\n",
      "Train Epoch: 2 [31000]\tLoss: 0.393904\n",
      "Train Epoch: 2 [31500]\tLoss: 0.398466\n",
      "Train Epoch: 2 [32000]\tLoss: 0.477276\n",
      "Train Epoch: 2 [32500]\tLoss: 0.446803\n",
      "Train Epoch: 2 [33000]\tLoss: 0.560624\n",
      "Train Epoch: 2 [33500]\tLoss: 0.417403\n",
      "Train Epoch: 2 [34000]\tLoss: 0.631800\n",
      "Train Epoch: 2 [34500]\tLoss: 0.557998\n",
      "Train Epoch: 2 [35000]\tLoss: 0.267541\n",
      "Train Epoch: 2 [35500]\tLoss: 0.443195\n",
      "Train Epoch: 2 [36000]\tLoss: 0.415995\n",
      "Train Epoch: 2 [36500]\tLoss: 0.371574\n",
      "Train Epoch: 2 [37000]\tLoss: 0.360189\n",
      "Train Epoch: 2 [37500]\tLoss: 0.459716\n",
      "Train Epoch: 2 [38000]\tLoss: 0.319102\n",
      "Train Epoch: 2 [38500]\tLoss: 0.649184\n",
      "Train Epoch: 2 [39000]\tLoss: 0.424727\n",
      "Train Epoch: 2 [39500]\tLoss: 0.355261\n",
      "Train Epoch: 2 [40000]\tLoss: 0.220223\n",
      "Train Epoch: 2 [40500]\tLoss: 0.275426\n",
      "Train Epoch: 2 [41000]\tLoss: 0.353535\n",
      "Train Epoch: 2 [41500]\tLoss: 0.612338\n",
      "Train Epoch: 2 [42000]\tLoss: 0.554872\n",
      "Train Epoch: 2 [42500]\tLoss: 0.561576\n",
      "Train Epoch: 2 [43000]\tLoss: 0.488499\n",
      "Train Epoch: 2 [43500]\tLoss: 0.346328\n",
      "Train Epoch: 2 [44000]\tLoss: 0.443777\n",
      "Train Epoch: 2 [44500]\tLoss: 0.259883\n",
      "Train Epoch: 2 [45000]\tLoss: 0.837269\n",
      "Train Epoch: 2 [45500]\tLoss: 0.550828\n",
      "Train Epoch: 2 [46000]\tLoss: 0.448741\n",
      "Train Epoch: 2 [46500]\tLoss: 0.356512\n",
      "Train Epoch: 2 [47000]\tLoss: 0.438028\n",
      "Train Epoch: 2 [47500]\tLoss: 0.487593\n",
      "Train Epoch: 2 [48000]\tLoss: 0.528074\n",
      "Train Epoch: 2 [48500]\tLoss: 0.432210\n",
      "Train Epoch: 2 [49000]\tLoss: 0.591643\n",
      "Train Epoch: 2 [49500]\tLoss: 0.895546\n",
      "Train Epoch: 2 [50000]\tLoss: 0.341296\n",
      "Train Epoch: 2 [50500]\tLoss: 0.450499\n",
      "Train Epoch: 2 [51000]\tLoss: 0.331676\n",
      "Train Epoch: 2 [51500]\tLoss: 0.358942\n",
      "Train Epoch: 2 [52000]\tLoss: 0.478027\n",
      "Train Epoch: 2 [52500]\tLoss: 0.291974\n",
      "Train Epoch: 2 [53000]\tLoss: 0.347549\n",
      "Train Epoch: 2 [53500]\tLoss: 0.456278\n",
      "Train Epoch: 2 [54000]\tLoss: 0.427167\n",
      "Train Epoch: 2 [54500]\tLoss: 0.381366\n",
      "Train Epoch: 2 [55000]\tLoss: 0.341925\n",
      "Train Epoch: 2 [55500]\tLoss: 0.308532\n",
      "Train Epoch: 2 [56000]\tLoss: 0.388050\n",
      "Train Epoch: 2 [56500]\tLoss: 0.248812\n",
      "Train Epoch: 2 [57000]\tLoss: 0.539491\n",
      "Train Epoch: 2 [57500]\tLoss: 0.430806\n",
      "Train Epoch: 2 [58000]\tLoss: 0.325792\n",
      "Train Epoch: 2 [58500]\tLoss: 0.312470\n",
      "Train Epoch: 2 [59000]\tLoss: 0.223431\n",
      "Train Epoch: 2 [59500]\tLoss: 0.183124\n",
      "Train Epoch: 3 [0]\tLoss: 0.397888\n",
      "Train Epoch: 3 [500]\tLoss: 0.555410\n",
      "Train Epoch: 3 [1000]\tLoss: 0.443689\n",
      "Train Epoch: 3 [1500]\tLoss: 0.307681\n",
      "Train Epoch: 3 [2000]\tLoss: 0.215819\n",
      "Train Epoch: 3 [2500]\tLoss: 0.211418\n",
      "Train Epoch: 3 [3000]\tLoss: 0.777273\n",
      "Train Epoch: 3 [3500]\tLoss: 0.410696\n",
      "Train Epoch: 3 [4000]\tLoss: 0.226901\n",
      "Train Epoch: 3 [4500]\tLoss: 0.313698\n",
      "Train Epoch: 3 [5000]\tLoss: 0.605175\n",
      "Train Epoch: 3 [5500]\tLoss: 0.420811\n",
      "Train Epoch: 3 [6000]\tLoss: 0.205940\n",
      "Train Epoch: 3 [6500]\tLoss: 0.297538\n",
      "Train Epoch: 3 [7000]\tLoss: 0.664845\n",
      "Train Epoch: 3 [7500]\tLoss: 0.447547\n",
      "Train Epoch: 3 [8000]\tLoss: 0.220270\n",
      "Train Epoch: 3 [8500]\tLoss: 0.261112\n",
      "Train Epoch: 3 [9000]\tLoss: 0.186899\n",
      "Train Epoch: 3 [9500]\tLoss: 0.525807\n",
      "Train Epoch: 3 [10000]\tLoss: 0.617036\n",
      "Train Epoch: 3 [10500]\tLoss: 0.286154\n",
      "Train Epoch: 3 [11000]\tLoss: 0.492432\n",
      "Train Epoch: 3 [11500]\tLoss: 0.285027\n",
      "Train Epoch: 3 [12000]\tLoss: 0.316035\n",
      "Train Epoch: 3 [12500]\tLoss: 0.421649\n",
      "Train Epoch: 3 [13000]\tLoss: 0.587274\n",
      "Train Epoch: 3 [13500]\tLoss: 0.430303\n",
      "Train Epoch: 3 [14000]\tLoss: 0.438531\n",
      "Train Epoch: 3 [14500]\tLoss: 0.509462\n",
      "Train Epoch: 3 [15000]\tLoss: 0.217851\n",
      "Train Epoch: 3 [15500]\tLoss: 0.403349\n",
      "Train Epoch: 3 [16000]\tLoss: 0.863393\n",
      "Train Epoch: 3 [16500]\tLoss: 0.206019\n",
      "Train Epoch: 3 [17000]\tLoss: 0.234085\n",
      "Train Epoch: 3 [17500]\tLoss: 0.575927\n",
      "Train Epoch: 3 [18000]\tLoss: 0.492653\n",
      "Train Epoch: 3 [18500]\tLoss: 0.219705\n",
      "Train Epoch: 3 [19000]\tLoss: 0.336940\n",
      "Train Epoch: 3 [19500]\tLoss: 0.392986\n",
      "Train Epoch: 3 [20000]\tLoss: 0.474515\n",
      "Train Epoch: 3 [20500]\tLoss: 0.258947\n",
      "Train Epoch: 3 [21000]\tLoss: 0.401852\n",
      "Train Epoch: 3 [21500]\tLoss: 0.286817\n",
      "Train Epoch: 3 [22000]\tLoss: 0.292514\n",
      "Train Epoch: 3 [22500]\tLoss: 0.725641\n",
      "Train Epoch: 3 [23000]\tLoss: 0.338796\n",
      "Train Epoch: 3 [23500]\tLoss: 0.197620\n",
      "Train Epoch: 3 [24000]\tLoss: 0.442460\n",
      "Train Epoch: 3 [24500]\tLoss: 0.421835\n",
      "Train Epoch: 3 [25000]\tLoss: 0.236234\n",
      "Train Epoch: 3 [25500]\tLoss: 0.247695\n",
      "Train Epoch: 3 [26000]\tLoss: 0.258222\n",
      "Train Epoch: 3 [26500]\tLoss: 0.361936\n",
      "Train Epoch: 3 [27000]\tLoss: 0.323021\n",
      "Train Epoch: 3 [27500]\tLoss: 0.471466\n",
      "Train Epoch: 3 [28000]\tLoss: 0.200719\n",
      "Train Epoch: 3 [28500]\tLoss: 0.297158\n",
      "Train Epoch: 3 [29000]\tLoss: 0.280326\n",
      "Train Epoch: 3 [29500]\tLoss: 0.293862\n",
      "Train Epoch: 3 [30000]\tLoss: 0.487289\n",
      "Train Epoch: 3 [30500]\tLoss: 0.551118\n",
      "Train Epoch: 3 [31000]\tLoss: 0.415547\n",
      "Train Epoch: 3 [31500]\tLoss: 0.312819\n",
      "Train Epoch: 3 [32000]\tLoss: 0.506050\n",
      "Train Epoch: 3 [32500]\tLoss: 0.422329\n",
      "Train Epoch: 3 [33000]\tLoss: 0.222236\n",
      "Train Epoch: 3 [33500]\tLoss: 0.331535\n",
      "Train Epoch: 3 [34000]\tLoss: 0.417262\n",
      "Train Epoch: 3 [34500]\tLoss: 0.442178\n",
      "Train Epoch: 3 [35000]\tLoss: 0.221713\n",
      "Train Epoch: 3 [35500]\tLoss: 0.363654\n",
      "Train Epoch: 3 [36000]\tLoss: 0.242190\n",
      "Train Epoch: 3 [36500]\tLoss: 0.296701\n",
      "Train Epoch: 3 [37000]\tLoss: 0.310814\n",
      "Train Epoch: 3 [37500]\tLoss: 0.572967\n",
      "Train Epoch: 3 [38000]\tLoss: 0.249382\n",
      "Train Epoch: 3 [38500]\tLoss: 0.587125\n",
      "Train Epoch: 3 [39000]\tLoss: 0.365271\n",
      "Train Epoch: 3 [39500]\tLoss: 0.248488\n",
      "Train Epoch: 3 [40000]\tLoss: 0.363927\n",
      "Train Epoch: 3 [40500]\tLoss: 0.199263\n",
      "Train Epoch: 3 [41000]\tLoss: 0.386913\n",
      "Train Epoch: 3 [41500]\tLoss: 0.411311\n",
      "Train Epoch: 3 [42000]\tLoss: 0.266194\n",
      "Train Epoch: 3 [42500]\tLoss: 0.563311\n",
      "Train Epoch: 3 [43000]\tLoss: 0.434032\n",
      "Train Epoch: 3 [43500]\tLoss: 0.246812\n",
      "Train Epoch: 3 [44000]\tLoss: 0.250274\n",
      "Train Epoch: 3 [44500]\tLoss: 0.149181\n",
      "Train Epoch: 3 [45000]\tLoss: 0.370156\n",
      "Train Epoch: 3 [45500]\tLoss: 0.643493\n",
      "Train Epoch: 3 [46000]\tLoss: 0.490385\n",
      "Train Epoch: 3 [46500]\tLoss: 0.178775\n",
      "Train Epoch: 3 [47000]\tLoss: 0.392861\n",
      "Train Epoch: 3 [47500]\tLoss: 0.305933\n",
      "Train Epoch: 3 [48000]\tLoss: 0.324681\n",
      "Train Epoch: 3 [48500]\tLoss: 0.264139\n",
      "Train Epoch: 3 [49000]\tLoss: 0.695150\n",
      "Train Epoch: 3 [49500]\tLoss: 0.764703\n",
      "Train Epoch: 3 [50000]\tLoss: 0.305670\n",
      "Train Epoch: 3 [50500]\tLoss: 0.280864\n",
      "Train Epoch: 3 [51000]\tLoss: 0.212112\n",
      "Train Epoch: 3 [51500]\tLoss: 0.292480\n",
      "Train Epoch: 3 [52000]\tLoss: 0.236343\n",
      "Train Epoch: 3 [52500]\tLoss: 0.195253\n",
      "Train Epoch: 3 [53000]\tLoss: 0.413919\n",
      "Train Epoch: 3 [53500]\tLoss: 0.381807\n",
      "Train Epoch: 3 [54000]\tLoss: 0.504153\n",
      "Train Epoch: 3 [54500]\tLoss: 0.175783\n",
      "Train Epoch: 3 [55000]\tLoss: 0.308307\n",
      "Train Epoch: 3 [55500]\tLoss: 0.337251\n",
      "Train Epoch: 3 [56000]\tLoss: 0.313177\n",
      "Train Epoch: 3 [56500]\tLoss: 0.215633\n",
      "Train Epoch: 3 [57000]\tLoss: 0.300737\n",
      "Train Epoch: 3 [57500]\tLoss: 0.594361\n",
      "Train Epoch: 3 [58000]\tLoss: 0.208251\n",
      "Train Epoch: 3 [58500]\tLoss: 0.135319\n",
      "Train Epoch: 3 [59000]\tLoss: 0.083631\n",
      "Train Epoch: 3 [59500]\tLoss: 0.179061\n",
      "Train Epoch: 4 [0]\tLoss: 0.291782\n",
      "Train Epoch: 4 [500]\tLoss: 0.415687\n",
      "Train Epoch: 4 [1000]\tLoss: 0.373633\n",
      "Train Epoch: 4 [1500]\tLoss: 0.545070\n",
      "Train Epoch: 4 [2000]\tLoss: 0.206357\n",
      "Train Epoch: 4 [2500]\tLoss: 0.328217\n",
      "Train Epoch: 4 [3000]\tLoss: 0.373664\n",
      "Train Epoch: 4 [3500]\tLoss: 0.254306\n",
      "Train Epoch: 4 [4000]\tLoss: 0.184824\n",
      "Train Epoch: 4 [4500]\tLoss: 0.516478\n",
      "Train Epoch: 4 [5000]\tLoss: 0.333210\n",
      "Train Epoch: 4 [5500]\tLoss: 0.325951\n",
      "Train Epoch: 4 [6000]\tLoss: 0.139906\n",
      "Train Epoch: 4 [6500]\tLoss: 0.202069\n",
      "Train Epoch: 4 [7000]\tLoss: 0.388184\n",
      "Train Epoch: 4 [7500]\tLoss: 0.275762\n",
      "Train Epoch: 4 [8000]\tLoss: 0.231508\n",
      "Train Epoch: 4 [8500]\tLoss: 0.275232\n",
      "Train Epoch: 4 [9000]\tLoss: 0.183261\n",
      "Train Epoch: 4 [9500]\tLoss: 0.203526\n",
      "Train Epoch: 4 [10000]\tLoss: 0.358410\n",
      "Train Epoch: 4 [10500]\tLoss: 0.132772\n",
      "Train Epoch: 4 [11000]\tLoss: 0.205823\n",
      "Train Epoch: 4 [11500]\tLoss: 0.382235\n",
      "Train Epoch: 4 [12000]\tLoss: 0.154178\n",
      "Train Epoch: 4 [12500]\tLoss: 0.204589\n",
      "Train Epoch: 4 [13000]\tLoss: 0.500733\n",
      "Train Epoch: 4 [13500]\tLoss: 0.350327\n",
      "Train Epoch: 4 [14000]\tLoss: 0.297533\n",
      "Train Epoch: 4 [14500]\tLoss: 0.592561\n",
      "Train Epoch: 4 [15000]\tLoss: 0.287886\n",
      "Train Epoch: 4 [15500]\tLoss: 0.277174\n",
      "Train Epoch: 4 [16000]\tLoss: 0.565333\n",
      "Train Epoch: 4 [16500]\tLoss: 0.221692\n",
      "Train Epoch: 4 [17000]\tLoss: 0.417540\n",
      "Train Epoch: 4 [17500]\tLoss: 0.306073\n",
      "Train Epoch: 4 [18000]\tLoss: 0.510064\n",
      "Train Epoch: 4 [18500]\tLoss: 0.324666\n",
      "Train Epoch: 4 [19000]\tLoss: 0.168929\n",
      "Train Epoch: 4 [19500]\tLoss: 0.324342\n",
      "Train Epoch: 4 [20000]\tLoss: 0.418429\n",
      "Train Epoch: 4 [20500]\tLoss: 0.155318\n",
      "Train Epoch: 4 [21000]\tLoss: 0.362205\n",
      "Train Epoch: 4 [21500]\tLoss: 0.187151\n",
      "Train Epoch: 4 [22000]\tLoss: 0.253433\n",
      "Train Epoch: 4 [22500]\tLoss: 0.506044\n",
      "Train Epoch: 4 [23000]\tLoss: 0.246175\n",
      "Train Epoch: 4 [23500]\tLoss: 0.312927\n",
      "Train Epoch: 4 [24000]\tLoss: 0.311217\n",
      "Train Epoch: 4 [24500]\tLoss: 0.287164\n",
      "Train Epoch: 4 [25000]\tLoss: 0.347461\n",
      "Train Epoch: 4 [25500]\tLoss: 0.211034\n",
      "Train Epoch: 4 [26000]\tLoss: 0.196277\n",
      "Train Epoch: 4 [26500]\tLoss: 0.374076\n",
      "Train Epoch: 4 [27000]\tLoss: 0.225920\n",
      "Train Epoch: 4 [27500]\tLoss: 0.425831\n",
      "Train Epoch: 4 [28000]\tLoss: 0.101450\n",
      "Train Epoch: 4 [28500]\tLoss: 0.190879\n",
      "Train Epoch: 4 [29000]\tLoss: 0.303654\n",
      "Train Epoch: 4 [29500]\tLoss: 0.356354\n",
      "Train Epoch: 4 [30000]\tLoss: 0.469682\n",
      "Train Epoch: 4 [30500]\tLoss: 0.534364\n",
      "Train Epoch: 4 [31000]\tLoss: 0.340332\n",
      "Train Epoch: 4 [31500]\tLoss: 0.221132\n",
      "Train Epoch: 4 [32000]\tLoss: 0.608599\n",
      "Train Epoch: 4 [32500]\tLoss: 0.270502\n",
      "Train Epoch: 4 [33000]\tLoss: 0.207109\n",
      "Train Epoch: 4 [33500]\tLoss: 0.379733\n",
      "Train Epoch: 4 [34000]\tLoss: 0.542799\n",
      "Train Epoch: 4 [34500]\tLoss: 0.407311\n",
      "Train Epoch: 4 [35000]\tLoss: 0.126173\n",
      "Train Epoch: 4 [35500]\tLoss: 0.266643\n",
      "Train Epoch: 4 [36000]\tLoss: 0.312281\n",
      "Train Epoch: 4 [36500]\tLoss: 0.229092\n",
      "Train Epoch: 4 [37000]\tLoss: 0.242022\n",
      "Train Epoch: 4 [37500]\tLoss: 0.511313\n",
      "Train Epoch: 4 [38000]\tLoss: 0.458181\n",
      "Train Epoch: 4 [38500]\tLoss: 0.323650\n",
      "Train Epoch: 4 [39000]\tLoss: 0.227028\n",
      "Train Epoch: 4 [39500]\tLoss: 0.127028\n",
      "Train Epoch: 4 [40000]\tLoss: 0.328208\n",
      "Train Epoch: 4 [40500]\tLoss: 0.147009\n",
      "Train Epoch: 4 [41000]\tLoss: 0.242505\n",
      "Train Epoch: 4 [41500]\tLoss: 0.413382\n",
      "Train Epoch: 4 [42000]\tLoss: 0.246739\n",
      "Train Epoch: 4 [42500]\tLoss: 0.605354\n",
      "Train Epoch: 4 [43000]\tLoss: 0.431116\n",
      "Train Epoch: 4 [43500]\tLoss: 0.368285\n",
      "Train Epoch: 4 [44000]\tLoss: 0.317723\n",
      "Train Epoch: 4 [44500]\tLoss: 0.246383\n",
      "Train Epoch: 4 [45000]\tLoss: 0.426887\n",
      "Train Epoch: 4 [45500]\tLoss: 0.427944\n",
      "Train Epoch: 4 [46000]\tLoss: 0.411599\n",
      "Train Epoch: 4 [46500]\tLoss: 0.163703\n",
      "Train Epoch: 4 [47000]\tLoss: 0.485943\n",
      "Train Epoch: 4 [47500]\tLoss: 0.235072\n",
      "Train Epoch: 4 [48000]\tLoss: 0.207498\n",
      "Train Epoch: 4 [48500]\tLoss: 0.280768\n",
      "Train Epoch: 4 [49000]\tLoss: 0.490604\n",
      "Train Epoch: 4 [49500]\tLoss: 0.519480\n",
      "Train Epoch: 4 [50000]\tLoss: 0.311114\n",
      "Train Epoch: 4 [50500]\tLoss: 0.331554\n",
      "Train Epoch: 4 [51000]\tLoss: 0.130298\n",
      "Train Epoch: 4 [51500]\tLoss: 0.207090\n",
      "Train Epoch: 4 [52000]\tLoss: 0.220545\n",
      "Train Epoch: 4 [52500]\tLoss: 0.121829\n",
      "Train Epoch: 4 [53000]\tLoss: 0.272601\n",
      "Train Epoch: 4 [53500]\tLoss: 0.160430\n",
      "Train Epoch: 4 [54000]\tLoss: 0.313159\n",
      "Train Epoch: 4 [54500]\tLoss: 0.225195\n",
      "Train Epoch: 4 [55000]\tLoss: 0.216868\n",
      "Train Epoch: 4 [55500]\tLoss: 0.275215\n",
      "Train Epoch: 4 [56000]\tLoss: 0.230752\n",
      "Train Epoch: 4 [56500]\tLoss: 0.159089\n",
      "Train Epoch: 4 [57000]\tLoss: 0.404584\n",
      "Train Epoch: 4 [57500]\tLoss: 0.318453\n",
      "Train Epoch: 4 [58000]\tLoss: 0.224510\n",
      "Train Epoch: 4 [58500]\tLoss: 0.106110\n",
      "Train Epoch: 4 [59000]\tLoss: 0.036654\n",
      "Train Epoch: 4 [59500]\tLoss: 0.100747\n",
      "Train Epoch: 5 [0]\tLoss: 0.284882\n",
      "Train Epoch: 5 [500]\tLoss: 0.390197\n",
      "Train Epoch: 5 [1000]\tLoss: 0.331643\n",
      "Train Epoch: 5 [1500]\tLoss: 0.351815\n",
      "Train Epoch: 5 [2000]\tLoss: 0.244909\n",
      "Train Epoch: 5 [2500]\tLoss: 0.162648\n",
      "Train Epoch: 5 [3000]\tLoss: 0.433158\n",
      "Train Epoch: 5 [3500]\tLoss: 0.524262\n",
      "Train Epoch: 5 [4000]\tLoss: 0.173844\n",
      "Train Epoch: 5 [4500]\tLoss: 0.191874\n",
      "Train Epoch: 5 [5000]\tLoss: 0.333540\n",
      "Train Epoch: 5 [5500]\tLoss: 0.373463\n",
      "Train Epoch: 5 [6000]\tLoss: 0.252792\n",
      "Train Epoch: 5 [6500]\tLoss: 0.144528\n",
      "Train Epoch: 5 [7000]\tLoss: 0.582535\n",
      "Train Epoch: 5 [7500]\tLoss: 0.435696\n",
      "Train Epoch: 5 [8000]\tLoss: 0.204081\n",
      "Train Epoch: 5 [8500]\tLoss: 0.265917\n",
      "Train Epoch: 5 [9000]\tLoss: 0.112789\n",
      "Train Epoch: 5 [9500]\tLoss: 0.301323\n",
      "Train Epoch: 5 [10000]\tLoss: 0.371870\n",
      "Train Epoch: 5 [10500]\tLoss: 0.161700\n",
      "Train Epoch: 5 [11000]\tLoss: 0.376175\n",
      "Train Epoch: 5 [11500]\tLoss: 0.223167\n",
      "Train Epoch: 5 [12000]\tLoss: 0.188436\n",
      "Train Epoch: 5 [12500]\tLoss: 0.447232\n",
      "Train Epoch: 5 [13000]\tLoss: 0.519080\n",
      "Train Epoch: 5 [13500]\tLoss: 0.290447\n",
      "Train Epoch: 5 [14000]\tLoss: 0.204044\n",
      "Train Epoch: 5 [14500]\tLoss: 0.357989\n",
      "Train Epoch: 5 [15000]\tLoss: 0.116156\n",
      "Train Epoch: 5 [15500]\tLoss: 0.335184\n",
      "Train Epoch: 5 [16000]\tLoss: 0.509289\n",
      "Train Epoch: 5 [16500]\tLoss: 0.115550\n",
      "Train Epoch: 5 [17000]\tLoss: 0.390196\n",
      "Train Epoch: 5 [17500]\tLoss: 0.417954\n",
      "Train Epoch: 5 [18000]\tLoss: 0.431091\n",
      "Train Epoch: 5 [18500]\tLoss: 0.259086\n",
      "Train Epoch: 5 [19000]\tLoss: 0.195512\n",
      "Train Epoch: 5 [19500]\tLoss: 0.338307\n",
      "Train Epoch: 5 [20000]\tLoss: 0.413318\n",
      "Train Epoch: 5 [20500]\tLoss: 0.163657\n",
      "Train Epoch: 5 [21000]\tLoss: 0.299917\n",
      "Train Epoch: 5 [21500]\tLoss: 0.114913\n",
      "Train Epoch: 5 [22000]\tLoss: 0.213001\n",
      "Train Epoch: 5 [22500]\tLoss: 0.385346\n",
      "Train Epoch: 5 [23000]\tLoss: 0.329177\n",
      "Train Epoch: 5 [23500]\tLoss: 0.115336\n",
      "Train Epoch: 5 [24000]\tLoss: 0.235521\n",
      "Train Epoch: 5 [24500]\tLoss: 0.274558\n",
      "Train Epoch: 5 [25000]\tLoss: 0.175308\n",
      "Train Epoch: 5 [25500]\tLoss: 0.246120\n",
      "Train Epoch: 5 [26000]\tLoss: 0.328373\n",
      "Train Epoch: 5 [26500]\tLoss: 0.371750\n",
      "Train Epoch: 5 [27000]\tLoss: 0.152148\n",
      "Train Epoch: 5 [27500]\tLoss: 0.418375\n",
      "Train Epoch: 5 [28000]\tLoss: 0.224202\n",
      "Train Epoch: 5 [28500]\tLoss: 0.318796\n",
      "Train Epoch: 5 [29000]\tLoss: 0.311048\n",
      "Train Epoch: 5 [29500]\tLoss: 0.172224\n",
      "Train Epoch: 5 [30000]\tLoss: 0.423829\n",
      "Train Epoch: 5 [30500]\tLoss: 0.317994\n",
      "Train Epoch: 5 [31000]\tLoss: 0.153521\n",
      "Train Epoch: 5 [31500]\tLoss: 0.124865\n",
      "Train Epoch: 5 [32000]\tLoss: 0.393087\n",
      "Train Epoch: 5 [32500]\tLoss: 0.224512\n",
      "Train Epoch: 5 [33000]\tLoss: 0.209728\n",
      "Train Epoch: 5 [33500]\tLoss: 0.255618\n",
      "Train Epoch: 5 [34000]\tLoss: 0.393016\n",
      "Train Epoch: 5 [34500]\tLoss: 0.412754\n",
      "Train Epoch: 5 [35000]\tLoss: 0.282987\n",
      "Train Epoch: 5 [35500]\tLoss: 0.250728\n",
      "Train Epoch: 5 [36000]\tLoss: 0.275412\n",
      "Train Epoch: 5 [36500]\tLoss: 0.197065\n",
      "Train Epoch: 5 [37000]\tLoss: 0.229326\n",
      "Train Epoch: 5 [37500]\tLoss: 0.368085\n",
      "Train Epoch: 5 [38000]\tLoss: 0.205134\n",
      "Train Epoch: 5 [38500]\tLoss: 0.415392\n",
      "Train Epoch: 5 [39000]\tLoss: 0.180657\n",
      "Train Epoch: 5 [39500]\tLoss: 0.289081\n",
      "Train Epoch: 5 [40000]\tLoss: 0.257063\n",
      "Train Epoch: 5 [40500]\tLoss: 0.251602\n",
      "Train Epoch: 5 [41000]\tLoss: 0.261884\n",
      "Train Epoch: 5 [41500]\tLoss: 0.236813\n",
      "Train Epoch: 5 [42000]\tLoss: 0.235541\n",
      "Train Epoch: 5 [42500]\tLoss: 0.457901\n",
      "Train Epoch: 5 [43000]\tLoss: 0.466899\n",
      "Train Epoch: 5 [43500]\tLoss: 0.336740\n",
      "Train Epoch: 5 [44000]\tLoss: 0.208274\n",
      "Train Epoch: 5 [44500]\tLoss: 0.285346\n",
      "Train Epoch: 5 [45000]\tLoss: 0.455281\n",
      "Train Epoch: 5 [45500]\tLoss: 0.257271\n",
      "Train Epoch: 5 [46000]\tLoss: 0.429283\n",
      "Train Epoch: 5 [46500]\tLoss: 0.123630\n",
      "Train Epoch: 5 [47000]\tLoss: 0.352957\n",
      "Train Epoch: 5 [47500]\tLoss: 0.292780\n",
      "Train Epoch: 5 [48000]\tLoss: 0.125774\n",
      "Train Epoch: 5 [48500]\tLoss: 0.182802\n",
      "Train Epoch: 5 [49000]\tLoss: 0.458743\n",
      "Train Epoch: 5 [49500]\tLoss: 0.889245\n",
      "Train Epoch: 5 [50000]\tLoss: 0.150561\n",
      "Train Epoch: 5 [50500]\tLoss: 0.261878\n",
      "Train Epoch: 5 [51000]\tLoss: 0.090729\n",
      "Train Epoch: 5 [51500]\tLoss: 0.241817\n",
      "Train Epoch: 5 [52000]\tLoss: 0.232490\n",
      "Train Epoch: 5 [52500]\tLoss: 0.164470\n",
      "Train Epoch: 5 [53000]\tLoss: 0.271147\n",
      "Train Epoch: 5 [53500]\tLoss: 0.233940\n",
      "Train Epoch: 5 [54000]\tLoss: 0.266719\n",
      "Train Epoch: 5 [54500]\tLoss: 0.243603\n",
      "Train Epoch: 5 [55000]\tLoss: 0.209635\n",
      "Train Epoch: 5 [55500]\tLoss: 0.339845\n",
      "Train Epoch: 5 [56000]\tLoss: 0.206854\n",
      "Train Epoch: 5 [56500]\tLoss: 0.090572\n",
      "Train Epoch: 5 [57000]\tLoss: 0.266986\n",
      "Train Epoch: 5 [57500]\tLoss: 0.284211\n",
      "Train Epoch: 5 [58000]\tLoss: 0.169153\n",
      "Train Epoch: 5 [58500]\tLoss: 0.062349\n",
      "Train Epoch: 5 [59000]\tLoss: 0.129352\n",
      "Train Epoch: 5 [59500]\tLoss: 0.048305\n",
      "Started copying local path mnist_torch_ps.pt to hdfs path hdfs://10.0.2.15:8020/Projects/demo_featurestore_admin000/mnist//mnist_torch_ps.pt\n",
      "\n",
      "Finished copying"
     ]
    }
   ],
   "source": [
    "train_history = train_fn()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Step 6: Plot Training Results\n",
    "\n",
    "Inside the `train_fn` function we saved the training history to HDFS, which means we can later read it in %%local mode for plotting."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [],
   "source": [
    "%%local\n",
    "import json\n",
    "from hops import hdfs\n",
    "import matplotlib.pyplot as plt\n",
    "from pylab import rcParams\n",
    "results_path = hdfs.project_path() + \"mnist/mnist_train_results_2.txt\"\n",
    "results = json.loads(hdfs.load(results_path))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Plot Loss/Epoch During Training"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[<matplotlib.lines.Line2D at 0x7f976b6cea90>]"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZUAAAEWCAYAAACufwpNAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAgAElEQVR4nO3deXxeZZ3//9e7SZN0SVKapmnpXhKEIogQWQZQoMAAKnWUr4IooCBuiI5f/Q46Xx3Hcean83VcWAZlU2CURdzqSqFsspNCWVqWpqWlLW2S7htpm/bz++OcpHdD0qTtfefO8n4+Hvcj97nOdZ/zOadNPvd1XeecSxGBmZlZNgzKdwBmZtZ/OKmYmVnWOKmYmVnWOKmYmVnWOKmYmVnWOKmYmVnWOKmY9QBJP5f0nXzH0ZdJulHS17Nd17LLScU6JWmxpNPyHUe2SfqWpO2SNmW81uU7rr2R/ttskzSqXfmzkkLS5HT55+nyMRl1qiVFxvKDki7NWP66pNfS87JM0p1p+byM87VDUnPG8m5/wCX9JGPdtnbn+y/7cswRcWlE/Ee261p2OalYvyapsJNVd0bE8IzXiB4NLDteA85vXZB0ODC0g3prgG61kiRdBHwcOC0ihgO1wGyAiDis9XwBfwMuzzh/u/0Bj4jPZNT9D3Y/32d1sN/O/p2sj3FSsX0i6VOS6iWtkTRT0oFpuST9UFKjpA2SXpD09nTd2ZLmS9ooabmkr3Sy7YslPSrpGknrJb0saXrG+nJJN0lakW7nO5IK2n32h5JWA9/ah2MLSVdIWiRplaT/J2lQum6QpP8raUl6jLdKKs/47ImSHpO0TtJSSRdnbPoASX9Kj/9JSQftbWzt3AZcmLF8EXBrB/VuAY6Q9J5ubPNdwD0RsRAgIlZGxPX7GedbtLaWJH1C0uvArPTc3i1pZXr+HpR0aMZn/kfSt9L3p6Wttf8jqUnSG5Iu3Me6lem/ywZJT0n6D0kPZvuYBwonFdtrkk4F/j/gw8BYYAlwR7r6DODdwMFAeVpndbruJuDTEVEKvB24fw+7ORZYCIwC/gX4jaSR6bqfAy1ANfDOdJ+XtvvsIqAK+Pd9PMx/IPmWfhQwA/hkWn5x+joFmAoMB64BkDQJ+AtwNVAJHAnMzdjmecC/AgcA9fsRW6sngDJJh6ZJ9Tzgfzqot4WktdCd/T0BXCjpq5JqW5N1Dr0bOAR4b7r8R6AGGAO8SJI4OzMeGAIcCHwGuE5S2T7UvQ5YR/L/5ZMkydn2kZOK7YsLgJsj4pmI2Ap8DTheST/+dqCU5A+FIuKliFiRfm47ME1SWUSsjYhn9rCPRuBHEbE9Iu4EXgHeK6kKOBv4UkRsjohG4Ickf1BbvRERV0dES0S82cn2P5x+G259PdBu/fciYk1EvA78iF3dTBcAP4iIRRGxKT3289Lum48C90XE7WncqyMiM6n8NiKeiogW4BckSWd/tbZWTgdeApZ3Uu+nwERJb+l6yhQR/wN8Afh74CGgUdI/ZSHOzvxLRGyJiDcjYmdE/DwiNkZEM0kr82hJwzr5bDPwnfRczwS2knyZ6XZdSYOBDwDfTGPoKpFZF5xUbF8cSNI6ASD947oaGBcR95N8c7+W5A/S9RnfCD9EkhCWSHpI0vF72Mfy2P1pp0vS/U4CBgMrWhMCyR/M0Rl1l3bjGO6KiBEZr1Parc/cRuu+od2xp+8LSb7lTiBpXXVmZcb7LSStnLfQ7oPcXV3BdBtJMruYjru+AEiT/7+lrz2KiF9ExGnACJJv9f8m6e+7+tw+ajvPkgok/Wfa7biBpDUHSWu1I6siYkfGcqfndA91q4ACdv/37s7/H+uEk4rtizdI/rgDkH6TrCD9lhwRV0XE0cA0km+OX03Ln46IGSQJ4HfAXXvYxzhJyliemO53Kcm3zFEZCaEsIg7LqJuNR29P6GDf0O7Y03UtQEMa2/6Ok+w2yN3VFUwRsYRkwP5s4DddbPpnJInig92MY3tE/Ap4nqS7MuvafXG4kOQ4TiXpOq1Oy9X+c1nUAOwk6R5rNaGTutYNTirWlcGSSjJehcDtwCckHSmpmKS//smIWCzpXZKOTbsVNpN0O+yUVCTpAknlEbEd2EDyy9yZ0cAVkgZL+l/AocCf0660WcB/SSpLB3cP6uYg9N74qqQDJE0AvgjcmZbfDvyjpCmSMq9sau3SOk3ShyUVSqqQlI0urq5cApwaEZv3VCmN8V+ATruzlFzo8F5Jpem5PQs4DHgyqxF3rJTkC8NqkqvY9nfMqUvp/8XfAf8qaYikw4CP5Xq//ZmTinXlz8CbGa9vRcR9wDeAXwMrSL6dt45plAE3AGtJuoZWA/8vXfdxYHHatfEZkvGJzjxJMmC7iuSPy7kR0TrgfyFQBMxP93M3yQUDe+Mj2v0+lU2SMrvQfg/MIRlo/xPJRQYAN5N0OT1M0kJoJhmDIB1/ORv43ySX8c4F3rGXce21iFgYEXXdrH47yb9ZZzYAXwdeJxm8/k/gsxHxyP5F2S0/I2kJvgHMAx7rgX0CfJakpd2QxnA7SXKzfSBP0mW9TXoZ7qURcWKe9h9ATUTUd1nZ+h1J/wWMiIhL8h1LX+SWipkNaJKmSTpcieOATwC/zXdcfZXvYjWzga6MZDxsLEkX2Hcj4o/5DanvcveXmZlljbu/zMwsawZ099eoUaNi8uTJ+Q7DzKxPmTNnzqqIqOxo3YBOKpMnT6aurrtXYpqZGYCkJZ2tc/eXmZlljZOKmZllTU6TiqQzJb2iZN6NKztYXyzpznT9k9o1W93pkuYomYtjTvqodSQNTec9eFnJLHTfzdjWxelcCXPT16Xt92dmZrmVs6SSzsNwLXAWyYMFz5c0rV21S4C1EVFN8vjy76Xlq4D3R8ThJHMbZD6K+vsRcQjJPBontHuU950RcWT6ujH7R2VmZnuSy5bKMUB9Ou/ENpJJnGa0qzODZFY6SJ7fNF2SIuLZiGh9Kuw8YIik4nTehQcA0m0+w+5PFzUzszzKZVIZx+7zEixLyzqskz5BdT3Jg90yfQhonQyqjaQRwPtJ589urSvpeSVTkvrx1WZmPaxXD9Snj6H+HvDpduWtj1+/KiIWpcV/ACZHxBHAvexqAbXf5mWS6iTVNTU15S54M7MBKJdJZTm7T3YznrdOddpWJ00U5aTzmUsaT/JQtwsjov1setcDCyLiR60F6dStra2ZG4GjOwoqIq6PiNqIqK2s7PDenS69uHw93/vry/gRN2Zmu8tlUnkaqEknMyoimW9jZrs6M0kG4gHOBe6PiEi7tv4EXBkRj2Z+QNJ3SJLPl9qVZ86ncQ7JfN058czra7nuwYU8vnB115XNzAaQnCWVdIzkcuAekj/wd0XEPEnflnROWu0moEJSPfBloPWy48tJphL9ZsYlwqPT1ss/k1xN9ky7S4evSC8zfg64gmTO7pz4cO0EqsqK+fHsBbnahZlZnzSgn1JcW1sb+/qYlp89+hr/+of53HHZcRw3tf21BWZm/ZekORFR29G6Xj1Q35udf8xEKkuL+fF9bq2YmbVyUtlHJYML+Mx7DuLxRat56rU1+Q7HzKxXcFLZDx89ZiKjhhdzlcdWzMwAJ5X9MqSogE+/eyqP1K9izhK3VszMnFT20wXHTaRiWBE/nl2f71DMzPLOSWU/DS0q5FPvnsrDrzbx7Otr8x2OmVleOalkwcePm8TIYUW+b8XMBjwnlSwYVlzIpSdN4cFXmnhu6bp8h2NmljdOKlly4fGTGTF0sK8EM7MBzUklS4YXF3LpiVOY/XIjLyxbn+9wzMzywkkliy78u8mUlRRy1f1urZjZwOSkkkVlJYO55MSp3Du/gXlvuLViZgOPk0qWXXzCZEpLCj22YmYDkpNKlpUPGcwnTpjCPfMaeGnFhnyHY2bWo5xUcuCSE6ZQWlzI1R5bMbMBxkklB8qHDubiEybz5xdW8srKjfkOx8ysxzip5MgnT5jCsKICt1bMbEBxUsmRA4YVcdHfTeZPL6xgQYNbK2Y2MDip5NClJ01lyOACrr7fTzA2s4HBSSWHRg4r4uPHT+IPz79BfeOmfIdjZpZzTio5dtlJUykpLODaB9xaMbP+L6dJRdKZkl6RVC/pyg7WF0u6M13/pKTJafnpkuZIeiH9eWrGZ45Oy+slXSVJaflISfdKWpD+PCCXx9ZdFcOL+fjxk/j93OUsanJrxcz6t5wlFUkFwLXAWcA04HxJ09pVuwRYGxHVwA+B76Xlq4D3R8ThwEXAbRmfuQ74FFCTvs5My68EZkdEDTA7Xe4VPnXSVIoKB3HtAwvzHYqZWU7lsqVyDFAfEYsiYhtwBzCjXZ0ZwC3p+7uB6ZIUEc9GxBtp+TxgSNqqGQuURcQTERHArcAHOtjWLRnleVdZWswFx07id3OXs2T15nyHY2aWM7lMKuOApRnLy9KyDutERAuwHqhoV+dDwDMRsTWtv6yTbVZFxIr0/UqgqqOgJF0mqU5SXVNT094d0X749LunUjhIXOMrwcysH+vVA/WSDiPpEvv03nwubcVEJ+uuj4jaiKitrKzMQpTdM7qshPOPmchvnl3O0jVbemy/ZmY9KZdJZTkwIWN5fFrWYR1JhUA5sDpdHg/8FrgwIhZm1B/fyTYb0u4x0p+NWTuSLPnsyQdRMEi+EszM+q1cJpWngRpJUyQVAecBM9vVmUkyEA9wLnB/RISkEcCfgCsj4tHWymn31gZJx6VXfV0I/L6DbV2UUd5rVJWVcP67JnD3nGVurZhZv5SzpJKOkVwO3AO8BNwVEfMkfVvSOWm1m4AKSfXAl9l1xdblQDXwTUlz09fodN3ngBuBemAh8Je0/LvA6ZIWAKely73OZ04+iEES1z3kK8HMrP9RMvwwMNXW1kZdXV2P7/f//u4F7nx6KQ9+9RTGjRjS4/s3M9sfkuZERG1H63r1QH1/9dmTqwG47kGPrZhZ/+KkkgfjRgzh3KMncNfTy1ix/s18h2NmljVOKnnyuZMPYmcEP3nQYytm1n84qeTJhJFDOffo8dz+9FIaNjTnOxwzs6xwUsmjz51czY6dwU98JZiZ9RNOKnk0sWIoH3znOH755Os0urViZv2Ak0qeXX5qNS07g58+vCjfoZiZ7TcnlTybVDGMGUceyC+eXELTxq35DsfMbL84qfQCXzi1hm0tO7nhb26tmFnf5qTSC0wZNYwZR47jtseXsGqTWytm1nc5qfQSnz+lmuaWHdz4t9fyHYqZ2T5zUuklqkcP5/1HHMitjy9mzeZt+Q7HzGyfOKn0Il84tZo3t+/gRo+tmFkf5aTSi9RUlXL24WO55bHFrNvi1oqZ9T1OKr3MFafWsHnbDm56xGMrZtb3OKn0Mm8bU8rZh4/h548uZv2W7fkOx8xsrzip9EJfOLWGjVtbuPlRt1bMrG9xUumFDh1bxt8fVsXNj77G+jfdWjGzvsNJpZe6YnoNG5tb+Pmji/MdiplZtzmp9FKHHVjOaYdWcdMji9jY7NaKmfUNOU0qks6U9IqkeklXdrC+WNKd6fonJU1OyyskPSBpk6RrMuqXSpqb8Vol6UfpuoslNWWsuzSXx9YTvji9hg3NLdzy2OJ8h2Jm1i05SyqSCoBrgbOAacD5kqa1q3YJsDYiqoEfAt9Ly5uBbwBfyawcERsj4sjWF7AE+E1GlTsz1t+Y/aPqWYePL2f6IaO58ZHX2LS1Jd/hmJl1KZctlWOA+ohYFBHbgDuAGe3qzABuSd/fDUyXpIjYHBGPkCSXDkk6GBgN/C37ofceV0yvYd2W7dz6+OJ8h2Jm1qVcJpVxwNKM5WVpWYd1IqIFWA9UdHP755G0TCKj7EOSnpd0t6QJHX1I0mWS6iTVNTU1dXNX+fOOCSM4+W2V3PDwIja7tWJmvVxfHqg/D7g9Y/kPwOSIOAK4l10toN1ExPURURsRtZWVlT0Q5v67YnoNa7ds57YnluQ7FDOzPcplUlkOZLYWxqdlHdaRVAiUA6u72rCkdwCFETGntSwiVkdE62QkNwJH73vovctREw/gpJpR3PDwIrZsc2vFzHqvXCaVp4EaSVMkFZG0LGa2qzMTuCh9fy5wf7vurM6cz+6tFCSNzVg8B3hpn6Lupb50Wg2rN2/jF0+8nu9QzMw6VZirDUdEi6TLgXuAAuDmiJgn6dtAXUTMBG4CbpNUD6whSTwASFoMlAFFkj4AnBER89PVHwbObrfLKySdA7Sk27o4V8eWD0dPGsmJ1aP46cML+dhxkxhSVJDvkMzM3kLdaxj0T7W1tVFXV5fvMLrtqdfW8OGfPs433jeNS06cku9wzGyAkjQnImo7WteXB+oHnGOmjOT4qRX85KGFNG/fke9wzMzewkmlj/niaTU0bdzK7U95bMXMeh8nlT7muKkVHDNlpFsrZtYrOan0QV+aXkPDhq3cVbe068pmZj3ISaUPOv6gCt41+QCue3AhW1vcWjGz3sNJpQ+SxBXTa1ixvplf1S3LdzhmZm2cVPqoE6tHcdTEEVz34EK2tezMdzhmZoCTSp8liS+edjDL173J3XPcWjGz3sFJpQ97d80o3jFhBNc+UM/2HW6tmFn+Oan0YZL40vQalq97k98849aKmeWfk0ofd/LbKjlifDnXuLViZr2Ak0ofJ4krTq1h6Zo3+d2z7WcWMDPrWU4q/cD0Q0fz9nFlXPNAPS1urZhZHjmp9AOtrZUlq7fw+7lv5DscMxvAnFT6idOnVXHo2KS1smPnwJ3OwMzyy0mln5DEF6dX89qqzfzhObdWzCw/nFT6kTOmjeGQMaVcdf8Ct1bMLC+cVPqRQYPEF06tYVHTZv70wop8h2NmA5CTSj9z1tvHcHDVcK6evYCdbq2YWQ9zUulnWlsrCxo38ecX3Voxs57lpNIPnX34WA6qHMbVs+vdWjGzHpXTpCLpTEmvSKqXdGUH64sl3Zmuf1LS5LS8QtIDkjZJuqbdZx5Mtzk3fY3e07YGooJByXwrrzRs5J55K/MdjpkNIDlLKpIKgGuBs4BpwPmSprWrdgmwNiKqgR8C30vLm4FvAF/pZPMXRMSR6auxi20NSO874kCmjhrGjz22YmY9KJctlWOA+ohYFBHbgDuAGe3qzABuSd/fDUyXpIjYHBGPkCSX7upwW/seft9WMEhcfmo1L6/cyL0vNeQ7HDMbIHKZVMYBSzOWl6VlHdaJiBZgPVDRjW3/LO36+kZG4ujWtiRdJqlOUl1TU9PeHE+fc847DmRyxVCumr2ACLdWzCz3+uJA/QURcThwUvr6+N58OCKuj4jaiKitrKzMSYC9RWHBIC4/tYZ5b2zgvpcau/6Amdl+ymVSWQ5MyFgen5Z1WEdSIVAOrN7TRiNiefpzI/BLkm62fdrWQPCBIw9k4ki3VsysZ+QyqTwN1EiaIqkIOA+Y2a7OTOCi9P25wP2xh798kgoljUrfDwbeB7y4L9saKAoLBnH5KdW8sHw9D7zi1oqZ5VbOkko6rnE5cA/wEnBXRMyT9G1J56TVbgIqJNUDXwbaLjuWtBj4AXCxpGXplWPFwD2SngfmkrRObuhqWwPdPxw1jvEHDOHH97m1Yma5pe78kZF0ELAsIrZKOhk4Arg1ItblOL6cqq2tjbq6unyH0SNuf+p1vvabF/j5J97FyW8bne9wzKwPkzQnImo7WtfdlsqvgR2SqoHrScYufpml+KwHfOio8YwbMYQfe2zFzHKou0llZ9qd9Q/A1RHxVWBs7sKybCsqHMTnTjmIZ19fx98WrMp3OGbWT3U3qWyXdD7JQPgf07LBuQnJcuXco8cztrzErRUzy5nuJpVPAMcD/x4Rr0maAtyWu7AsF4oLC/jcyQcxZ8laHls44K+2NrMc6FZSiYj5EXFFRNwu6QCgNCIG9LO1+qoPv2sCY8pKfCWYmeVEt5JK+mTgMkkjgWeAGyT9ILehWS4UFxbwmfdM5anFa3hi0Zp8h2Nm/Ux3u7/KI2ID8EGSS4mPBU7LXViWS+cdM5HRpcX8ePar+Q7FzPqZ7iaVQkljgQ+za6De+qiSwQV85j0H8cSiNTy5yGMrZpY93U0q3ya5M35hRDwtaSqwIHdhWa599NiJjBpezFX3+5/RzLKnuwP1v4qIIyLis+nyooj4UG5Ds1xKWitTebR+NXWLPbZiZtnR3YH68ZJ+K6kxff1a0vhcB2e5dcGxkxg1vIgfz3Zrxcyyo7vdXz8jeQrwgenrD2mZ9WFDigr41ElT+duCVcxZsjbf4ZhZP9DdpFIZET+LiJb09XOgf89wNUB87LhJjBxWxFVurZhZFnQ3qayW9DFJBenrY3gCrH5hWHEhl540hYdebWLu0j790Gkz6wW6m1Q+SXI58UpgBckkWBfnKCbrYRceP5kRQwe7tWJm+627V38tiYhzIqIyIkZHxAcAX/3VTwwvLuRTJ03l/pcbeX6ZWytmtu/2Z+bHL2ctCsu7C4+fRPkQt1bMbP/sT1JR1qKwvCstGcwlJ07hvpcaeXH5+nyHY2Z91P4kFT/itp+5+ITJlJYUurViZvtsj0lF0kZJGzp4bSS5X8X6kbKSwXzyhCnMmt/A/Dc25DscM+uD9phUIqI0Iso6eJVGRGFXG5d0pqRXJNVLurKD9cWS7kzXPylpclpeIekBSZskXZNRf6ikP0l6WdI8Sd/NWHexpCZJc9PXpXtzIizxyROmUFpcyNV+JpiZ7YP96f7aI0kFwLXAWcA04HxJ09pVuwRYGxHVwA+B1om/moFvAF/pYNPfj4hDgHcCJ0g6K2PdnRFxZPq6MYuHM2CUDx3MJ06YzF9eXMnLK91aMbO9k7OkAhwD1KcPn9wG3AHMaFdnBnBL+v5uYLokRcTmiHiEJLm0iYgtEfFA+n4byYRhfgZZln3yxCkMLy7k6tn1+Q7FzPqYXCaVccDSjOVlaVmHdSKiBVgPVHRn45JGAO8HZmcUf0jS85LuljShk89dJqlOUl1TU1P3jmSAGTG0iIv+bhJ/fnEFrzZszHc4ZtaH5DKp5IykQuB24KqIWJQW/wGYHBFHAPeyqwW0m4i4PiJqI6K2stKPL+vMpSdOZcjgAq6+360VM+u+XCaV5UBma2F8WtZhnTRRlNO9Z4pdDyyIiB+1FkTE6ojYmi7eCBy9j3EbcMCwIi48fjJ/fP4N6hvdWjGz7sllUnkaqJE0RVIRcB7J4/MzzQQuSt+fC9wfEXu8/0XSd0iSz5falY/NWDwHeGk/YjfgUydNoaSwgGvcWjGzbspZUknHSC4nmYb4JeCuiJgn6duSzkmr3QRUSKoneexL22XHkhYDPwAulrRM0rR0YrB/Jrma7Jl2lw5fkV5m/BxwBX7g5X6rGF7MhcdPYuZzb7CoaVO+wzGzPkBdNAz6tdra2qirq8t3GL1a08atnPSf93P228fyg48cme9wzKwXkDQnImo7WtcnB+qt51SWFvOxYyfxu7nLWbxqc77DMbNezknFunTZe6YyuGAQ1zzgsRUz2zMnFevS6NISPnrsRH777HKWrHZrxcw656Ri3fKZ9xxEwSDx3w8szHcoZtaLOalYt1SVlfDRYyby62eWsXTNlnyHY2a9lJOKddun3zOVQRL//aDHVsysY04q1m1jy4fwkXdN4O45y1i21q0VM3srJxXbK589+SAArnvQYytm9lZOKrZXDhwxhP9VO4G76pbyxro38x2OmfUyTiq21z6XtlZ+8pBbK2a2OycV22vjDxjKuUeP546nlrJyfXPXHzCzAcNJxfbJ506uZmeEWytmthsnFdsnE0YO5YNHjeOXT71O4wa3Vsws4aRi++zzp1SzY2fwk4cWdV3ZzAYEJxXbZ5MqhvGBI8fxiyeX0LjRrRUzc1Kx/XT5qdVs37GTGx52a8XMnFRsP00ZlbRWbntiCas2bc13OGaWZ04qtt8+f2o121rcWjEzJxXLgoMqh/P+dxzIrY8vYbVbK2YDmpOKZcUXTq2muWUHNz7yWr5DMbM8ymlSkXSmpFck1Uu6soP1xZLuTNc/KWlyWl4h6QFJmyRd0+4zR0t6If3MVZKUlo+UdK+kBenPA3J5bLa76tGlvPfwsdz62GLWbt6W73DMLE9yllQkFQDXAmcB04DzJU1rV+0SYG1EVAM/BL6XljcD3wC+0sGmrwM+BdSkrzPT8iuB2RFRA8xOl60HXTG9hi3bd3CTWytmA1YuWyrHAPURsSgitgF3ADPa1ZkB3JK+vxuYLkkRsTkiHiFJLm0kjQXKIuKJiAjgVuADHWzrloxy6yEHV5Vy9tvH8vPHFrNui1srZgNRLpPKOGBpxvKytKzDOhHRAqwHKrrY5rJOtlkVESvS9yuBqn0L2/bHF6ZXs2lrCze7tWI2IPXLgfq0FRMdrZN0maQ6SXVNTU09HFn/d8iYMs48bAw/e3Qx69/cnu9wzKyH5TKpLAcmZCyPT8s6rCOpECgHVnexzfGdbLMh7R5r7SZr7GgDEXF9RNRGRG1lZWU3D8X2xhemV7Nxaws/e9StFbOBJpdJ5WmgRtIUSUXAecDMdnVmAhel788F7k9bGR1Ku7c2SDouverrQuD3HWzrooxy62GHHVjO6dOquPmR19jQ7NaK2UCSs6SSjpFcDtwDvATcFRHzJH1b0jlptZuACkn1wJfJuGJL0mLgB8DFkpZlXDn2OeBGoB5YCPwlLf8ucLqkBcBp6bLlyRen17ChuYVbHl2c71DMrAdpDw2Dfq+2tjbq6uryHUa/dektT/P04rU88k+nUFoyON/hmFmWSJoTEbUdreuXA/XWO1wxvYb1b27n1seX5DsUM+shTiqWM0eMH8Epb6vkhr8tYtPWlnyHY2Y9wEnFcuqK6TWs27Kd29xaMRsQCvMdgPVv75x4AO8+uJKfPryQ4cUFnDatirHlQ/IdlpnliAfqPVCfc6+s3MhnfzGHRU2bAThifDmnH1rFGYeN4eCq4aTPBDWzPmJPA/VOKk4qPaa+cROz5q/k3vkNPPv6OgAmjhzKGdOqOH1aFbWTR1IwyAnGrLdzUumEk0r+NG5o5r6XGpk1fyWP1a9m246djBxWxKmHjOaMaVWcVFPJkKKCfIdpZh1wUumEk0rvsGlrCw+90sS981cy++VGNja3UDJ4ECfVVHL6tCqmHzKaiuHF+VJXzk0AABDjSURBVA7TzFJ7SioeqLe8G15cyHuPGMt7jxjL9h07eeq1Ncyal3ST3Tu/gUGC2kkjOT3tJps8ali+QzazTril4pZKrxURzHtjA7PS5PLSig0AHFw1nDOmjeH0aVUcPq6cQR6HMetR7v7qhJNK37J0zRbund/ArPkreXrxWnbsDMaUlXDatNGcMW0Mx02toKjQt16Z5ZqTSiecVPqutZu3cf/Ljdw7v4GHXm3ize07KC0u5ORDRnP6tCpOflslZX7emFlOOKl0wkmlf2jevoNH61cxa14Ds19uYNWmbQwuEMdNreCMaVW+4dIsy5xUOuGk0v/s2BnMXbqWWfMamDW/gddW7brhMrkfxjdcmu0vJ5VOOKn0bxHBwqZNzJrfwKx5DcxdmtxwOaliaNsd/UdPOsA3XJrtJSeVTjipDCwNG5q576XkSrLMGy6np+MwvuHSrHucVDrhpDJwtd5wOWv+Su73DZdme8U3P5q10/6GyycXreHe+Svb7olpveHyjMOSGy4nVfiGS7PucEvFLRXLkHnD5ax5K3l55UYA3lZVyunTqjjjsOSGSw/020Dm7q9OOKlYV5au2ZK2Xlby1Gtr2Bkwpqyk7ZExvuHSBiInlU44qdjeaL3hctb8lTz86irfcGkDVt6SiqQzgR8DBcCNEfHdduuLgVuBo4HVwEciYnG67mvAJcAO4IqIuEfS24A7MzYxFfhmRPxI0reATwFN6bqvR8Sf9xSfk4rtq+btO3hkwSrund/AfS81sHpzxg2Xh43h9EOrGFNeku8wzXIiL0lFUgHwKnA6sAx4Gjg/IuZn1PkccEREfEbSecA/RMRHJE0DbgeOAQ4E7gMOjogd7ba/HDg2IpakSWVTRHy/uzE6qVg27NgZPPv62rZxmMWrtwDwjvHl6TjMGGpG+4ZL6z/ydfXXMUB9RCxKg7gDmAHMz6gzA/hW+v5u4Bolv3kzgDsiYivwmqT6dHuPZ3x2OrAwIpbk8BjMulQwSNROHknt5JF87axD0hkukzv6vz/rVb4/61UmVQxtu6PfN1xaf5bLpDIOWJqxvAw4trM6EdEiaT1QkZY/0e6z49p99jyS1kymyyVdCNQB/zsi1rYPStJlwGUAEydO3JvjMeuSJGqqSqmpKuXzp1S33XA5a14Dtzy2hBv+9ppvuLR+rU/epyKpCDgH+FpG8XXAvwGR/vwv4JPtPxsR1wPXQ9L9lfNgbUCrKivhgmMnccGxk9jYvJ2HXm3i3vkN/HXeSn41Z1nbDZdnTKti+qFVjBxWlO+QzfZLLpPKcmBCxvL4tKyjOsskFQLlJAP2XX32LOCZiGhoLch8L+kG4I9ZOAazrCktGcz7jjiQ9x1xINta0hku57eb4XLyyLYEM7liqMdhrM/J5UB9IclA/XSShPA08NGImJdR5/PA4RkD9R+MiA9LOgz4JbsG6mcDNa0D9en4zD0R8bOMbY2NiBXp+38kGcA/b08xeqDeeoOI4MXlG9ru6G+94XJ4cSEHjR5OTeurajg1o0sZN2KIZ7u0vMrnJcVnAz8iuaT45oj4d0nfBuoiYqakEuA24J3AGuC8jIH9fybpvmoBvhQRf0nLhwGvA1MjYn3Gvm4DjiTp/loMfLo1yXTGScV6o9dXb+HhBU0saNjIgsZNLGjcRNPGrW3rSwYPonp0kmCq2xJOKRNHDvUFANYjfPNjJ5xUrK9Yt2Ub9Y2bqE+TzILGTdQ3bOSN9c1tdYoKBzF11LDkQoGM1s2kimEMLvBd/5Y9fqCkWR83YmhR22XLmTY2b2dh02YWNGxsSzhzl67lD8+90VancJCYMmoYNVXDqR5d2pZspowaRnGhrzyz7HJSMevDSksGc+SEERw5YcRu5Vu2tbCoaTMLGjeyoCFJNi+t2MhfX1zJzrRzYpBgUsWwjC60pEvtoMrhvszZ9pmTilk/NLSokLePK+ft48p3K2/evoPXVm1u6z5r7Up74OVGWtJsI8H4A4ZQk7ZqqtMxm+rRwxle7D8Ztmf+H2I2gJQMLuDQsWUcOrZst/JtLTtZsjpJNknLJulOe2TBKrbt2NlW78DyEqrbjdlUV5ZSPtQP0rSEk4qZUVQ4qO1JABy+q7xlx06Wrn2z7Uq0ZNxmI794cjXN23clm9GlxW3dZ5lXpPlmzoHHScXMOlVYMIgpo4YxZdQwzjhsV/nOncHydW/uNmazoHETv6pbyuZtbc99pWJYUdp9NpzqyuFtV6ZVlhb7xs5+yknFzPbaoEFiwsihTBg5lFMPqWorjwhWrG9Ou9F2XZE2c+4bbGhuaatXVlLYlmBax2xqRg9nbHmJk00f5/tUfJ+KWc5FBE0bt7Ylm7Z7bRo3sWbztrZ6fopA3+D7VMwsryQxuqyE0WUlnFA9ard1qzdtbWvRtI7ZPPxqE3fPWdZWx08R6DucVMwsryqGF1MxvJhjp1bsVr5+y3bqm3Yfs3ly0Wp+++yuZ8u2PkWgevRwDhwxhKqyEqrKipOfpSWMLiumZLDvuelJTipm1iuVDx3M0ZNGcvSkrp8i8MLy9cya38C2lp1v2c6IoYPbEkxVWQlj0sQzuqykbXnU8CIK/SibrHBSMbM+pbOnCEQE69/cTsOGrTRsaGblhmYaNzS3LTdsaGZBwyaaNm1lx87dx5IlGDW8mKqyYsak3XRVpRmtnjQRHTC0yGM7XXBSMbN+QRIjhhYxYmgRbxtT2mm9HTuD1Zu30rhhKyvXN9OwMUk8jWkiWr6umWdfX8fqjAsIWg0uEKPbJZvRaSKqymgBlRYXDtir2JxUzGxAKRiUJIbRpSVveYxNpm0tO2nMSDgNG5pp2LiVhjQRLWjcxCP1q9iYcal0q6FFBUnCKS3efZwnI/lUlZX0y/EeJxUzsw4UFQ5i/AFDGX/A0D3W27y1hcaNu7rYGjK63Bo3bOW5ZetYub6ZrR2M95QPGdyWYEaXljCmfNf7qrJixpSXMGp4cZ+ausBJxcxsPwwrLmRKcSFTRg3rtE5EsOHNlrSrrZmV65vbJaKt1DeuonFjx+M9FcPajfeUvbUFNLKXjPc4qZiZ5ZgkyocOpnzoYA6u6ny8Z+fOYPXmbW9t8WxMEtGK9c08t2wdqza9dbyncJCS7rbyXRcZjG672m3XcllJbsd7nFTMzHqJQYNEZWkxlaXFXY73NG1q7WJrbnfF21YWNm3isYWrdns0TqshgwuoKivmH08/mBlHjsv6MTipmJn1MUWFgxg3YgjjRgzZY70t21pobL2kemN6hdv65H3FsOKcxOakYmbWTw0tKmTyqEIm72G8J9tyekmBpDMlvSKpXtKVHawvlnRnuv5JSZMz1n0tLX9F0t9nlC+W9IKkuZLqMspHSrpX0oL05wG5PDYzM3urnCUVSQXAtcBZwDTgfEnT2lW7BFgbEdXAD4HvpZ+dBpwHHAacCfx3ur1Wp0TEke2eknklMDsiaoDZ6bKZmfWgXLZUjgHqI2JRRGwD7gBmtKszA7glfX83MF3JZQkzgDsiYmtEvAbUp9vbk8xt3QJ8IAvHYGZmeyGXSWUcsDRjeVla1mGdiGgB1gMVXXw2gFmS5ki6LKNOVUSsSN+vBKrogKTLJNVJqmtqatr7ozIzs071nds0dzkxIo4i6Vb7vKR3t68QycxjHc4+FhHXR0RtRNRWVlbmOFQzs4Ell0llOTAhY3l8WtZhHUmFQDmwek+fjYjWn43Ab9nVLdYgaWy6rbFAYxaPxczMuiGXSeVpoEbSFElFJAPvM9vVmQlclL4/F7g/bWXMBM5Lrw6bAtQAT0kaJqkUQNIw4AzgxQ62dRHw+xwdl5mZdSJn96lERIuky4F7gALg5oiYJ+nbQF1EzARuAm6TVA+sIUk8pPXuAuYDLcDnI2KHpCrgt+kjBgqBX0bEX9Ndfhe4S9IlwBLgw7k6NjMz65iShsHAJKmJJAHti1HAqiyGky2Oa+84rr3XW2NzXHtnf+KaFBEdDkoP6KSyPyTVtbtPpldwXHvHce293hqb49o7uYqrL179ZWZmvZSTipmZZY2Tyr67Pt8BdMJx7R3Htfd6a2yOa+/kJC6PqZiZWda4pWJmZlnjpGJmZlnjpLIHkm6W1CjpxU7WS9JV6bwvz0s6qpfEdbKk9emcM3MlfbOH4pog6QFJ8yXNk/TFDur0+DnrZlw9fs4klUh6StJzaVz/2kGdTuccynNcF0tqyjhfl+Y6rox9F0h6VtIfO1jX4+erm3Hl83x1OAdVxvrs/k5GhF+dvIB3A0cBL3ay/mzgL4CA44Ane0lcJwN/zMP5Ggsclb4vBV4FpuX7nHUzrh4/Z+k5GJ6+Hww8CRzXrs7ngJ+k788D7uwlcV0MXNPT/8fSfX8Z+GVH/175OF/djCuf52sxMGoP67P6O+mWyh5ExMMkj4/pzAzg1kg8AYxofahlnuPKi4hYERHPpO83Ai/x1ukOevycdTOuHpeeg03p4uD01f7Kmc7mHMp3XHkhaTzwXuDGTqr0+PnqZly9WVZ/J51U9k935ozJl+PT7ou/SDqsp3eedju8k+Rbbqa8nrM9xAV5OGdpl8lckqdq3xsRnZ6v2H3OoXzHBfChtLvkbkkTOlifCz8C/g+ws5P1eTlf3YgL8nO+oPM5qFpl9XfSSaV/eobk2TzvAK4GfteTO5c0HPg18KWI2NCT+96TLuLKyzmLiB0RcSTJ9A7HSHp7T+y3K92I6w/A5Ig4AriXXa2DnJH0PqAxIubkel97o5tx9fj5ytDlHFTZ5KSyf7ozZ0yPi4gNrd0XEfFnYLCkUT2xb0mDSf5w/yIiftNBlbycs67iyuc5S/e5DngAOLPdqs7mHMprXBGxOiK2pos3Akf3QDgnAOdIWkwyPfmpkv6nXZ18nK8u48rT+Wrdd2dzULXK6u+kk8r+mQlcmF49cRywPnZNaZw3ksa09iNLOobk3znnf4jSfd4EvBQRP+ikWo+fs+7ElY9zJqlS0oj0/RDgdODldtU6m3Mor3G163M/h2ScKqci4msRMT4iJpMMwt8fER9rV63Hz1d34srH+Ur3u6c5qFpl9XcyZ/Op9AeSbie5KmiUpGXAv5AMWhIRPwH+THLlRD2wBfhEL4nrXOCzklqAN4Hzcv2LlToB+DjwQtofD/B1YGJGbPk4Z92JKx/nbCxwi6QCkiR2V0T8Ud2Yc6gXxHWFpHNI5jtaQ3J1U170gvPVnbjydb46nINK0mcgN7+TfkyLmZlljbu/zMwsa5xUzMwsa5xUzMwsa5xUzMwsa5xUzMwsa5xUzHJI0o6MJ9POlXRlFrc9WZ08qdosX3yfilluvZk+7sRsQHBLxSwP0jku/jOd5+IpSdVp+WRJ96cPHpwtaWJaXiXpt+kDL5+T9Hfppgok3aBk3pNZ6R3wZnnjpGKWW0PadX99JGPd+og4HLiG5Cm3kDzM8pb0wYO/AK5Ky68CHkofeHkUMC8trwGujYjDgHXAh3J8PGZ75DvqzXJI0qaIGN5B+WLg1IhYlD7scmVEVEhaBYyNiO1p+YqIGCWpCRif8VDC1sf43xsRNenyPwGDI+I7uT8ys465pWKWP9HJ+72xNeP9DjxOannmpGKWPx/J+Pl4+v4xdj0E8QLgb+n72cBnoW0CrfKeCtJsb/hbjVluDcl4MjLAXyOi9bLiAyQ9T9LaOD8t+wLwM0lfBZrY9cTYLwLXS7qEpEXyWSDv0yyYtecxFbM8SMdUaiNiVb5jMcsmd3+ZmVnWuKViZmZZ45aKmZlljZOKmZlljZOKmZlljZOKmZlljZOKmZllzf8P56lF8BGEeVwAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "%%local\n",
    "%matplotlib inline\n",
    "y = results[\"loss\"] #loss\n",
    "x = list(range(1, len(y)+1))#epoch\n",
    "plt.title(\"Loss per Epoch - MNIST Training\")\n",
    "plt.xlabel(\"Epoch\")\n",
    "plt.ylabel(\"Loss\")\n",
    "plt.plot(x,y)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Plot Accuracy/Epoch During Training"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[<matplotlib.lines.Line2D at 0x7f976af9f320>]"
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEWCAYAAAB8LwAVAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjAsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+17YcXAAAgAElEQVR4nO3de3xV9Znv8c9DLoSEW0ggaICEq4JovcR7VUCwap3S1mmLVq2ODqMVp2OnM6edV6ft2Dk9Mz0znXaKl1q1Wq2ltp46TNWpKGBR0QJeKqJAuBMEQkIgJCHX5/yxVnATdshOyM7aSb7v12u/si6/tdazV/Zez/r9fmutbe6OiIhIWwOiDkBERFKTEoSIiMSlBCEiInEpQYiISFxKECIiEpcShIiIxKUEIdJDzGyGme2MOo7eLNyH73V3WYlPCaKXMrPlZrbfzAZGHUtvZGbFZuZmdqjN6wtRx5YoM/tO+B6+0mb6V8Lp3wnHZ4Tj97Up94qZ3RwO32xmr8TM+7iZvWZmB8ys0sxeNbNzzewfYvbVYTNrjhl/r836L4mZVxNnf4/r7Ht29+Xuflp3l5X4lCB6ITMrBi4BHPhUD287vSe31x06iHm4uw+Oef2qxwLrHhuAm9pM+1I4PVYNcGP42TkuMxsK/A74MTACKAT+Cah39++17ivgdmBlzL476mDs7itiyrbOi93f29tsd4CZ6ZiUQvTP6J1uAl4HHiU4GBxhZoPM7N/NbFt49veKmQ0K57WeFVaZ2Y6Ys8flZnZbzDrank26md1pZhuBjeG0H4XrOGhma8zskpjyaeGZ5iYzqw7njzWze83s39vEu9jM7o73JsPt/rWZbTazfWb2f2MPIGb2F2b2fliT+r2ZFR0v5s4ws0fN7AEzWxK+h5fbrP8iM1sV7uNVZnZRzLwRZvYzM9sVxvZMm3X/rZntNbMPzeyWzsbWxiog28xOC9d9GpAVTo9VRfB5+XYC65wC4O6/dPdmd69z9xfc/U8nGOsxws/nd81sJUESG2dmt4X/1+rwMxT72ZxtZltjxnea2VfN7N3wf/FLC2vVnSkbzv+Gme02szIz+8vwM1Tc3e+5N1GC6J1uAn4Rvj5hZgUx8/4NOAe4iODs7++BlvDg9jzBWeFI4Ezg7U5s89PA+cC0cHxVuI4RwJPAr80sK5z3VeA64GpgKPAXQC3wGHBd60HezPKB2eHy7fkMUAKcDcwN14WZzQX+Afhs+H5WAL/sIObO+iLwXSCfYF/9Itz2COBZ4D+BPOAHwLNmlhcu9ziQTXDWPAr4j5h1jgaGEZyV3wrca2a5XYyv1eN8VIv4Ujgez/8GrjWzUzpY3wag2cweM7OruiG+jtxI8H8dCuwE9gCfDMf/EvixmZ1xnOU/D8wBJhB89m/sbFkzuwa4C5hJkCBndf3t9CHurlcvegEfBxqB/HD8A+DucHgAUAd8LM5y3wB+2846lwO3xYzfDLwSM+7ArA7i2t+6XWA9MLedcu8Dc8LhBcBzx1mnA1fGjH8ZeCkcfh64NWbeAIIkVJRIzEBxWKaqzWtqOP9RYFFM+cFAMzCW4KDyxzbrWxnut5OAFiA3zjZnhP+f9Jhpe4ELuvhZ+A7wBDAO2A5khH/HhtO/E7PdneHw94FfhcOvADe38z+fGu6DnUATsBgoaLP9o5bpINbW/Z3eZvorwLc6WPZ3wJ3h8Gxga8y8ncC8mPEfAAu7UPbnwHdj5p0axlvc1e9qX3ipBtH7fAl4wd33heNP8lEzUz5B88KmOMuNbWd6onbEjpjZ18JmgANmVkVwVpyfwLYeA24Ih2+g/bPdeNvdBpwcDhcBPwqby6qASsAIzszjxtyOfHcfHvN6P97y7n4o3MbJ4Wtbm/VsC7c9Fqh09/3tbK/C3ZtixmsJks9R7OgO3uNeieNBW34p8D1go7sf733/K0Gt82MdrPN9d7/Z3ccA0wne8w+Pt8wJaPvZusbM3rCgc7wKuIKPPlvx7I4Zjrs/Eyh7cps4Evns9Hm9rsOxPwv7Ej4PpJlZ6wd9IDA8/MK/CxwGJgLvtFl8B3BeO6uuIWgSaTU6Tpkjj/0N+xv+HrgceM/dW8xsP8EBunVbE4G1cdbzBLA2jHcq8EycMrHGAq0HyHHArpht/G93/8Vxlj3RRxWPbR0ws8EEzWm7wldRm7LjgP8J4xphZsPdvaqrG3b3FRz/QNfWz4FHgOP2abh7hZn9kKDpLNFYPjCzR4G/6kQ8nRH72RoE/AaYBzzr7o1m9js++mwly4fAmJjxse0V7E9Ug+hdPk3QzDGNoP3/TIKD7ArgJndvIThI/MDMTg47iy8MO+J+Acw2s8+bWbqZ5ZnZmeF63wY+a2bZZjaJoG38eIYQNDuUA+lm9i2C9uJWDwHfNbPJFjijtX3e3XcS9F88Djzt7nUdbOvvzCzXzMYCXwFarzJ6APhGTOfsMDP7XAfr6qyrLejYzyQ4oL4enp0/B0wxs+vDffkFgv/J79z9Q4Lmr/vCuDPM7NJujiueXxGcaT+VQNkfEPRRTY0308xODTvSx4TjYwn6lF7vpliPZyCQSfDZag77Bi7vge0+BdxqZqeYWTbwjz2wzZSnBNG7fAn4mbtvd/fdrS9gIfBFCy7n/BpBTWIVQZPIvwIDwmaIq4G/Dae/DbQ2M/wH0EDQOfgYYWfscfye4Gx5A0HTymGOrpL/gOAL9wJwEHgYGBQz/zHgdDpuXgL4L2BNGO+z4bpw99+G722RmR0kqK1clcD62qqyo6/N/2rMvCcJrvqpJOjQvCHcdgVwDcG+rCCoTV0T0+x3I0E/0QcEfQx/04W4OsWDK41eTCDh4u4HCfoiRrRTpJqgc/8NM6shSAxrCd5vUoW1rruB3xLs9z8n6INI9nb/G7gf+APBVW+vhrPqk73tVGZhh4xIjwnPqJ8g6FBu9wNoZg5MdvfSHgvuo20/StCx+82e3rZEz8xOB94EBoY1835JNQjpUWaWQdBU9NDxkoNITzOzz5hZZngZ878A/9WfkwMoQUgPMrOpBJeSnkTyrogR6ao7gX0EV4QdDsf7NTUxiYhIXKpBiIhIXH3mPoj8/HwvLi6OOgwRkV5lzZo1+9x9ZLx5fSZBFBcXs3r16qjDEBHpVcys7VMBjlATk4iIxKUEISIicSlBiIhIXEoQIiISlxKEiIjEpQQhIiJxKUGIiEhcfeY+CBGRvu5wYzP7DtVTcaiBipp69h1qoOJQA8MGZXD9+eO6fXtKECIiEWlucfbXBgf5ikP17KsJ/h6dAD76W9PQHHc9Z48brgQhIpLK3J1D9U3HnOFXHKqnoqbhqLP/ikMNVNY2EO95qWkDjBE5meTlZJI/eCBnj8smb/BA8gZnkp8T/M0bPJC8nEzyBmeSnZmcQ7kShIjIcTQ0tVDZenCPOcPfFx7kjxz0wxpAQ1P8n5AYmpVOfniQn5A/mHOLg4N8/uBM8sKDfuvwsEEZDBiQ7J/h7pgShIj0Ky0tzoG6xqPP8GOac2LP8Pcdqufg4aa468lMH0B+TuaRM/spBUOCA3x4kM8fMvBIDSA3J4OB6Wk9/E5PnBKEiPR6dQ3Ncc/w91UffbCvqGmgsqaB5pZj23XMYET2Rwf4aScPDc74Y5JA7Nn+4IHpmEV/lp9MShAiknKamluoPNJ5G/8Mf1/MmX5tO523OZlpRw7uY3KzOXPs8CMJIDjgDzwyPiInk7QUaNZJJUoQIhKJqtoGtlXUsrWihu0VtWytqGVbRQ3bKmspr66Pu0x6a+dt2HY/Pj+n3TP8vJyBDMrsfc06qUQJQkSSwt0pr65nW2UtW/fVsK2ilm2VYRKoqOVAXeNR5UcPzWJcXjYzTxnJ6GGDGHnUlTpBQhialRqdt/2FEoSIdFlzi7Orqi48+IdJoKL1by11jR81/aQNMAqHD6IoL5s/+9hJFI3IoSgvm+L8HMbmZutsPwUpQYjIcdU3NbNzf91RB/7W4R37a2ls/qjDNzN9AONGZFOcl81FE/Mpzs8Ox3MozB1ERpqe7tObJDVBmNmVwI+ANOAhd/+XNvOLgEeAkUAlcIO77wznfQn4Zlj0n939sWTGKtKf1TY0HXXg3xozvOtA3VE3cw0emM64EdmcetIQrjhtNMV52YzLC5LA6KFZagLqQ5KWIMwsDbgXmAPsBFaZ2WJ3XxdT7N+An7v7Y2Y2C/g/wI1mNgL4NlACOLAmXHZ/suIV6etiO4WPqgnE6RQekZPJuBHZnFucy7i8MRTnZVOUFzQJ5eVk9vnLOyWQzBrEeUCpu28GMLNFwFwgNkFMA74aDi8DngmHPwEscffKcNklwJXAL5MYr0iv1topHHv2n2incOvBvzgvh3F52QzNyojoXUgqSWaCKAR2xIzvBM5vU+Yd4LMEzVCfAYaYWV47yxa23YCZzQfmA4wb1/0PqhJJNSfSKVyclxP0B6hTWBIUdSf114CFZnYz8AegDIh/x0sc7v4g8CBASUlJnEdeifQ+6hSWVJHMBFEGjI0ZHxNOO8LddxHUIDCzwcC17l5lZmXAjDbLLk9irCI9qqudwp+YPpqiER/1B6hTWJIpmQliFTDZzMYTJIZ5wPWxBcwsH6h09xbgGwRXNAH8HviemeWG41eE80V6ldqGJl7ZuI8Pdld32ClclBd0ChfljaFIncKSApKWINy9ycwWEBzs04BH3P09M7sHWO3uiwlqCf/HzJygienOcNlKM/suQZIBuKe1w1ok1e09eJiXPtjLknV7eKV035HHP48emkWROoWlFzGP92sVvVBJSYmvXr066jCkH3J3Nuw5xIvv7+GFdXt4Z0cVAGNHDGLO1NHMnjaKs8bmqlNYUpKZrXH3knjzou6kFumVmppb+OPWSl5ct5cX39/D9spaAD42djh/94lTmD21gCkFg9U0JL2aEoRIgqoPN/LyhnJeXLeHZevLOVDXSGb6AD4+KZ/bL5vI7KmjGDU0K+owRbqNEoTIceyqquOlsOno9c0VNDY7I3IymTOtgNlTC7h0Sn7Sfg9YJGr6ZIvEcHfe23WQF9/fw5J1e3hv10EAxufncMvF45kzrYCzx+Xqh2WkX1CCkH6voamFN7ZUsGTdHl5ct4ddBw5jBueMy+XrV53K7KkFTBo1OOowRXqcEoT0SwdqG1m+YS8vrNvDH9aXU13fRFbGAC6dPJK/mTOFWaeOIn/wwKjDFImUEoT0Gzsqa4Nawvt7+OOWSppanPzBA/nkGScxZ1oBF0/KJytDl6KKtFKCkD6rpcV5t+zAkaTwwe5qACaPGsz8Sycwe1oBZ44ZrkdViLRDCUL6lMONzazcVMEL6/bw0vt72FtdzwCDc4tH8M1PTmX21AKK83OiDlOkV1CCkF6vsqaBpR/s5cV1e/jDxnJqG5rJyUzjslNGMntqATNPGUVuTmbUYYr0OkoQ0itt2VfDknW7eXHdXlZvq6TFg2cdffbsQmZPLeDCiXkMTFd/gsiJUIKQXqG5xXl7x36WrNvLknW72VReA8DUk4ayYOYk5kwbzfTCoXq0hUg3UoKQlFXX0MyKjeW8+P4eln6wl32HGkgfYFwwIY8bLyhi9rQCxuRmRx2mSJ+lBCEppby6npfeD646WrFxH/VNLQwZmM6MU0cxZ1oBl00ZybBBejS2SE9QgpBIuTulew+xJHy0xds7qnCHwuGDuO68ccyZVsC5xSPITNdPZ4r0NCUI6XFNzS2s3rafF9ftYcn7e9hWETwq+4wxw7h79hRmTy1g6klD1J8gEjElCOkRh+qbWLGhnCXr9rB0/V6qahvJTBvAhRPz+MtLJnD51FGcNGxQ1GGKSAwlCEma3QcOH3kq6spNFTQ0tzA8O4NZp4xi9rQCLp0yksED9REUSVX6dkq3cXfe/7CaF8NO5j/tPABAUV42N10YXHVUUpRLepr6E0R6AyUIOSGNzS38cUslS9YFNYWyqjrM4MzwpzevmBY8Klv9CSK9jxKEdNrBw40sXx/0Jyxfv5fqw00MTB/AJZPzuWvWJGZNHcWoIfrpTZHeTglCOuXg4UZm//vL7K2uJy8nk6umj2b21AI+Plk/vSnS1+gbLZ3y89e2sre6np/eVMKsU0fppzdF+jAlCElYTX0TD7+yhZmnjGTOtIKowxGRJEvq5SRmdqWZrTezUjP7epz548xsmZm9ZWZ/MrOrw+nFZlZnZm+HrweSGack5sk3trO/tpEFsyZHHYqI9ICk1SDMLA24F5gD7ARWmdlid18XU+ybwFPufr+ZTQOeA4rDeZvc/cxkxSedc7ixmQdXbOaiiXmcU5QbdTgi0gOSWYM4Dyh1983u3gAsAua2KePA0HB4GLArifHICfj16h2UV9ezYNakqEMRkR6SzARRCOyIGd8ZTov1HeAGM9tJUHu4K2be+LDp6WUzuyTeBsxsvpmtNrPV5eXl3Ri6xGpoauGBlzdzTlEuF07IizocEekhUd/Seh3wqLuPAa4GHjezAcCHwDh3Pwv4KvCkmQ1tu7C7P+juJe5eMnLkyB4NvD955q0yyqrqWDBzkm54E+lHkpkgyoCxMeNjwmmxbgWeAnD3lUAWkO/u9e5eEU5fA2wCpiQxVmlHU3ML9y0v5bSThzLjFCVhkf4kmQliFTDZzMabWSYwD1jcpsx24HIAM5tKkCDKzWxk2MmNmU0AJgObkxirtOPZdz9ka0Utd81S7UGkv0naVUzu3mRmC4DfA2nAI+7+npndA6x298XA3wI/NbO7CTqsb3Z3N7NLgXvMrBFoAW5398pkxSrxtbQ4C5eWMnnUYK6YNjrqcESkhyX1Rjl3f46g8zl22rdihtcBF8dZ7mng6WTGJh17Yd1uNu49xI/mnckA3TEt0u9E3UktKcrdWbislOK8bD55+klRhyMiEVCCkLiWbyhnbdlBvjxjkn6/QaSf0jdfjuHu/PiljRQOH8Snz2p764qI9BdKEHKMlZsreHN7FX912QQy0/UREemv9O2XYyxcWsrIIQP5fMnYjguLSJ+lBCFHWbOtktc2VTD/kglkZaRFHY6IREgJQo6ycGkpudkZXH/+uKhDEZGIKUHIEWvLDrBsfTm3fnw8OQP1W1Ii/Z0ShBxx77JShmSlc9NFxVGHIiIpQAlCANiwp5rn1+7m5ouKGZqVEXU4IpIClCAEgPuWlZKdmcYtF4+POhQRSRFKEMLWfTUsfmcXN1xQxIiczKjDEZEUoQQh3L98E+lpA7jt46o9iMhHlCD6ubKqOp5+cyfzzh3LqKFZUYcjIilECaKf+8nLmwD4q8smRhyJiKQaJYh+bG/1YRat2sG1Z4+hcPigqMMRkRSjBNGPPbRiC03NLdwxQ7UHETmWEkQ/VVnTwBOvb+NTHzuZ4vycqMMRkRSkBNFP/ezVLdQ2NHPnzElRhyIiKUoJoh86UNfIo69u5arpo5lcMCTqcEQkRSlB9EOPr9xKdX2Tag8iclxKEP1MTX0TD7+yhZmnjGR64bCowxGRFKYE0c88+cZ29tc2smDW5KhDEZEUpwTRjxxubObBFZu5aGIe5xTlRh2OiKQ4JYh+5Nerd1BeXc+CWep7EJGOJTVBmNmVZrbezErN7Otx5o8zs2Vm9paZ/cnMro6Z941wufVm9olkxtkfNDS18MDLmzmnKJcLJ+RFHY6I9AJJSxBmlgbcC1wFTAOuM7NpbYp9E3jK3c8C5gH3hctOC8dPA64E7gvXJ130zFtllFXVsWDWJMws6nBEpBfoMEGY2V1m1pUG6/OAUnff7O4NwCJgbpsyDgwNh4cBu8LhucAid6939y1Aabg+6YKm5hbuW17K9MKhzJgyMupwRKSXSKQGUQCsMrOnwiajRE8/C4EdMeM7w2mxvgPcYGY7geeAuzqxLGY238xWm9nq8vLyBMPqf55990O2VtSyYOZk1R5EJGEdJgh3/yYwGXgYuBnYaGbfM7PueMLbdcCj7j4GuBp43MwSbvZy9wfdvcTdS0aO1JlxPC0tzsKlpUwpGMwV0wqiDkdEepGEDsbu7sDu8NUE5AK/MbPvH2exMmBszPiYcFqsW4Gnwm2sBLKA/ASXlQS8sG43G/ce4s6ZkxgwQLUHEUlcIn0QXzGzNcD3gVeB0939DuAc4NrjLLoKmGxm480sk6DTeXGbMtuBy8PtTCVIEOVhuXlmNtDMxhPUYP7YqXcmuDsLl5VSnJfNJ08/KepwRKSXSU+gzAjgs+6+LXaiu7eY2TXtLeTuTWa2APg9kAY84u7vmdk9wGp3Xwz8LfBTM7uboMP65rC28p6ZPQWsI6ix3OnuzV15g/3Z8g3lrC07yPevPYP0NN3yIiKdY8Hx+DgFzC4A3nP36nB8KDDV3d/ogfgSVlJS4qtXr446jJTh7lx7/2vsOVjPsq/NIDNdCUJEjmVma9y9JN68RI4a9wOHYsYPhdMkha3cXMGb26u4/bIJSg4i0iWJHDnMY6oZ7t5CYk1TEqGFS0sZOWQgnysZ23FhEZE4EkkQm83sr80sI3x9Bdic7MCk69Zsq+S1TRX81aUTyMrQDegi0jWJJIjbgYsILjPdCZwPzE9mUHJiFi4tJTc7g+vPHxd1KCLSi3XYVOTuewkuUZVeYG3ZAZatL+drV0whO1MtgSLSdR0eQcwsi+CGttMI7lMAwN3/IolxSRfdu6yUIVnp3HRRcdShiEgvl0gT0+PAaOATwMsEdzVXJzMo6ZoNe6p5fu1ubr6omKFZGVGHIyK9XCIJYpK7/yNQ4+6PAZ8k6IeQFHPfslKyM9O45eLxUYciIn1AIgmiMfxbZWbTCR7LPSp5IUlXbN1Xw+J3dnHDBUWMyMmMOhwR6QMS6cV8MPw9iG8SPCNpMPCPSY1KOu3+5ZtITxvAbZeo9iAi3eO4CSJ89PZBd98P/AGY0CNRSaeUVdXx9Js7+eL54xg1JKvjBUREEnDcJqbwrum/76FYpIt+8vImzGD+Zd3xEx0iIoFE+iBeNLOvmdlYMxvR+kp6ZJKQvdWHWbRqB589awyFwwdFHY6I9CGJ9EF8Ifx7Z8w0R81NKeGhFVtoam7hjhmqPYhI90rkTmr1eqaoypoGnnh9G5/62MkU5+dEHY6I9DGJ3El9U7zp7v7z7g9HOuNnr26htqGZO2dOijoUEemDEmliOjdmOIvgJ0LfBJQgInSgrpFHX93KVdNHM7lgSNThiEgflEgT012x42Y2HFiUtIgkIY+v3Ep1fZNqDyKSNF35qbEaQP0SEaqpb+LhV7Yw69RRTC8cFnU4ItJHJdIH8d8EVy1BkFCmAU8lMyg5viff2M7+2kbVHkQkqRLpg/i3mOEmYJu770xSPNKBw43NPLhiMxdNzOOcotyowxGRPiyRBLEd+NDdDwOY2SAzK3b3rUmNTOL69eodlFfX86N5Z0Ydioj0cYn0QfwaaIkZbw6nSQ9raGrhgZc3c05RLhdOyIs6HBHp4xJJEOnu3tA6Eg7redIReOatMsqq6lgwaxJmFnU4ItLHJZIgys3sU60jZjYX2JfIys3sSjNbb2alZvb1OPP/w8zeDl8bzKwqZl5zzLzFiWyvL2tqbuG+5aVMLxzKjCkjow5HRPqBRPogbgd+YWYLw/GdQNy7q2OZWRpwLzAnXGaVmS1293WtZdz97pjydwFnxayizt3V0B569t0P2VpRywM3nKPag4j0iERulNsEXGBmg8PxQwmu+zyg1N03A5jZImAusK6d8tcB305w3f1KS4uzcGkpUwoGc8W0gqjDEZF+osMmJjP7npkNd/dD7n7IzHLN7J8TWHchsCNmfGc4Ld42ighuvlsaMznLzFab2etm9ul2lpsfllldXl6eQEi90wvrdrNx7yHunDmJAQNUexCRnpFIH8RV7n6kbyD8dbmruzmOecBv3L05ZlqRu5cA1wM/NLNjnmft7g+6e4m7l4wc2Tfb5d2dhctKGZ+fwzVnnBx1OCLSjySSINLMbGDriJkNAgYep3yrMmBszPiYcFo884Bfxk5w97Lw72ZgOUf3T/QbyzeUs7bsIHdcNpE01R5EpAclkiB+AbxkZrea2W3AEuCxBJZbBUw2s/FmlkmQBI65GsnMTgVygZUx03Jbk5KZ5QMX037fRZ/l7vz4pY0UDh/Ep8+K2zonIpI0iXRS/6uZvQPMJngm0++BogSWazKzBWH5NOARd3/PzO4BVrt7a7KYByxyd49ZfCrwEzNrIUhi/xJ79VN/sXJzBW9ur+K7c08jM70rz1UUEem6RC5zBdhDkBw+B2wBnk5kIXd/DniuzbRvtRn/TpzlXgNOTzC2Pmvh0lJGDhnI50rGdlxYRKSbtZsgzGwKwaWn1xHcGPcrwNx9Zg/F1q+t2VbJa5sq+OYnp5KVkRZ1OCLSDx2vBvEBsAK4xt1LAczs7uOUl260cGkpudkZXH/+uKhDEZF+6ngN258FPgSWmdlPzexyQJfR9IC1ZQdYtr6c2y6ZQHZmoq2AIiLdq90E4e7PuPs84FRgGfA3wCgzu9/MruipAPuje5eVMiQrnRsv7PBaABGRpOnw0hh3r3H3J939zwjuZXgL+F9Jj6yf2rCnmufX7ubmi4oZmpURdTgi0o916tpJd98f3r18ebIC6u/uW1ZKdmYat1ysn/0WkWjp4voUsnVfDYvf2cUNFxQxIkc/uSEi0VKCSCH3L99EetoAbrtEtQcRiZ4SRIooq6rj6Td3ct25Yxk1JCvqcERElCBSxU9e3oQZzL/smIfWiohEQgkiBeytPsyiVTu49uwxFA4fFHU4IiKAEkRKeGjFFpqaW7hjhmoPIpI6lCAiVlnTwBOvb2PumYUU5eVEHY6IyBFKEBH72atbqG1o5suqPYhIilGCiNCBukYefXUrV00fzeSCIVGHIyJyFCWICD2+civV9U3cOXNS1KGIiBxDCSIiNfVNPPzKFmadOorphcOiDkdE5BhKEBF58o3t7K9tVO1BRFKWEkQEDjc28+CKzVw8KY9zinKjDkdEJC4liAj8evUOyqvrWTBzctShiIi0SwmihzU0tfDAy5spKcrlggkjog5HRKRdShA97Jm3yiirquPOWZMw0y+4ikjqUoLoQU3NLdy3vJTphUOZMWVk1OGIiByXEkQPevbdD9laUcuCmZNVexCRlJfUBGFmV5rZejMrNbOvx5n/H5F+PWgAAA0SSURBVGb2dvjaYGZVMfO+ZGYbw9eXkhlnT2hpcRYuLWVKwWCumFYQdTgiIh1KT9aKzSwNuBeYA+wEVpnZYndf11rG3e+OKX8XcFY4PAL4NlACOLAmXHZ/suJNthfW7Wbj3kP8aN6ZDBig2oOIpL5k1iDOA0rdfbO7NwCLgLnHKX8d8Mtw+BPAEnevDJPCEuDKJMaaVO7OwmWljM/P4ZozTo46HBGRhCQzQRQCO2LGd4bTjmFmRcB4YGlnl+0Nlm8oZ23ZQe6YMZE01R5EpJdIlU7qecBv3L25MwuZ2XwzW21mq8vLy5MU2olxd3780kYKhw/iM2f12hwnIv1QMhNEGTA2ZnxMOC2eeXzUvJTwsu7+oLuXuHvJyJGpednoys0VvLm9ittnTCQjLVXysYhIx5J5xFoFTDaz8WaWSZAEFrctZGanArnAypjJvweuMLNcM8sFrgin9ToLl5YyashAPnfOmKhDERHplKQlCHdvAhYQHNjfB55y9/fM7B4z+1RM0XnAInf3mGUrge8SJJlVwD3htF5lzbZKXttUwfxLJ5CVkRZ1OCIinZK0y1wB3P054Lk2077VZvw77Sz7CPBI0oLrAQuXlpKbncH154+LOhQRkU5To3iSrC07wLL15dx2yQSyM5Oah0VEkkIJIknuXVbKkKx0brywKOpQRES6RAkiCTbsqeb5tbu55aJihmZlRB2OiEiXKEEkwX3LSsnOTOOWi8dHHYqISJcpQXSzrftqWPzOLm68oIjcnMyowxER6TIliG52//JNpKcN4NZLVHsQkd5NCaIblVXV8fSbO7nu3LGMGpIVdTgiIidECaIb/eTlTZjB/MsmRh2KiMgJU4LoJnurD7No1Q6uPXsMhcMHRR2OiMgJU4LoJg+t2EJTcwt3zFDtQUT6BiWIblBZ08ATr29j7pmFFOXlRB2OiEi3UILoBj97dQt1jc18WbUHEelDlCBO0IG6Rh59dStXTR/N5IIhUYcjItJtlCBO0OMrt1Jd38SdMydFHYqISLdSgjgBNfVNPPzKFmadOorTTh4WdTgiIt1KCeIEPPnGdvbXNqr2ICJ9khJEFx1ubObBFZu5eFIe5xTlRh2OiEi3U4Lool+v3kF5dT0LZk6OOhQRkaRQguiChqYWHnh5MyVFuVwwYUTU4YiIJIUSRBc881YZZVV1LJg1CTOLOhwRkaRQguikpuYW7lteyumFw7hsysiowxERSRoliE569t0P2VpRq9qDiPR5ShCd0NLiLFxaypSCwcyZWhB1OCIiSaUE0QkvrNvNxr2HuHPmJAYMUO1BRPo2JYgEuTsLl5UyPj+Ha844OepwRESSLqkJwsyuNLP1ZlZqZl9vp8znzWydmb1nZk/GTG82s7fD1+JkxpmI5RvKWVt2kDtmTCRNtQcR6QfSk7ViM0sD7gXmADuBVWa22N3XxZSZDHwDuNjd95vZqJhV1Ln7mcmKrzPcnR+/tJHC4YP4zFmFUYcjItIjklmDOA8odffN7t4ALALmtinzl8C97r4fwN33JjGeLlu5uYI3t1dx+4yJZKSpVU5E+odkHu0KgR0x4zvDabGmAFPM7FUze93MroyZl2Vmq8Ppn463ATObH5ZZXV5e3r3Rx1i4tJRRQwbyuXPGJG0bIiKpJurT4XRgMjADuA74qZkND+cVuXsJcD3wQzM75ufa3P1Bdy9x95KRI5Nz09qabZW8tqmC+ZdOICsjLSnbEBFJRclMEGXA2JjxMeG0WDuBxe7e6O5bgA0ECQN3Lwv/bgaWA2clMdZ2LVxayoicTK4/f1wUmxcRiUwyE8QqYLKZjTezTGAe0PZqpGcIag+YWT5Bk9NmM8s1s4Ex0y8G1tHD1pYdYNn6cm79+HiyM5PWny8ikpKSdtRz9yYzWwD8HkgDHnH398zsHmC1uy8O511hZuuAZuDv3L3CzC4CfmJmLQRJ7F9ir37qKfcuK2VIVjo3XljU05sWEYlcUk+L3f054Lk2074VM+zAV8NXbJnXgNOTGVtHNuyp5vm1u/nrWZMYmpURZSgiIpGIupM6Zd23rJTszDRuuXh81KGIiERCCSKOrftqWPzOLm68oIjcnMyowxERiYQSRBz3L99ERtoAbr1EtQcR6b+UINooq6rj6Td3ct154xg1JCvqcEREIqME0cZPXt6EGcy/dELUoYiIREoJIsbe6sMsWrWDa88ew8nDB0UdjohIpJQgYjy0YgtNzS3cMeOYp3qIiPQ7ShChypoGnnh9G3PPLKQoLyfqcEREIqcEEfrZq1uoa2zmy6o9iIgAShAAHKhr5NFXt3LV9NFMLhgSdTgiIilBCQJ4fOVWquubuHPmpKhDERFJGf0+QdTUN/HwK1u4/NRRnHbysKjDERFJGf3+GdaH6pu4cGIet12i+x5ERGL1+wRRMDSL+754TtRhiIiknH7fxCQiIvEpQYiISFxKECIiEpcShIiIxKUEISIicSlBiIhIXEoQIiISlxKEiIjEZe4edQzdwszKgW0nsIp8YF83hdOdFFfnKK7OUVyd0xfjKnL3kfFm9JkEcaLMbLW7l0QdR1uKq3MUV+cors7pb3GpiUlEROJSghARkbiUID7yYNQBtENxdY7i6hzF1Tn9Ki71QYiISFyqQYiISFxKECIiEle/ShBm9oiZ7TWzte3MNzP7TzMrNbM/mdnZKRLXDDM7YGZvh69v9VBcY81smZmtM7P3zOwrccr0+D5LMK4e32dmlmVmfzSzd8K4/ilOmYFm9qtwf71hZsUpEtfNZlYes79uS3ZcMdtOM7O3zOx3ceb1+P5KIKYo99VWM3s33O7qOPO79/vo7v3mBVwKnA2sbWf+1cDzgAEXAG+kSFwzgN9FsL9OAs4Oh4cAG4BpUe+zBOPq8X0W7oPB4XAG8AZwQZsyXwYeCIfnAb9KkbhuBhb29Gcs3PZXgSfj/b+i2F8JxBTlvtoK5B9nfrd+H/tVDcLd/wBUHqfIXODnHngdGG5mJ6VAXJFw9w/d/c1wuBp4HyhsU6zH91mCcfW4cB8cCkczwlfbq0DmAo+Fw78BLjczS4G4ImFmY4BPAg+1U6TH91cCMaWybv0+9qsEkYBCYEfM+E5S4MATujBsInjezE7r6Y2HVfuzCM4+Y0W6z44TF0Swz8KmibeBvcASd293f7l7E3AAyEuBuACuDZslfmNmY5MdU+iHwN8DLe3Mj2J/dRQTRLOvIEjsL5jZGjObH2d+t34flSB6hzcJnpfyMeDHwDM9uXEzGww8DfyNux/syW0fTwdxRbLP3L3Z3c8ExgDnmdn0nthuRxKI67+BYnc/A1jCR2ftSWNm1wB73X1NsreVqARj6vF9FePj7n42cBVwp5ldmsyNKUEcrQyIPRsYE06LlLsfbG0icPfngAwzy++JbZtZBsFB+Bfu/v/iFIlkn3UUV5T7LNxmFbAMuLLNrCP7y8zSgWFARdRxuXuFu9eHow8B5/RAOBcDnzKzrcAiYJaZPdGmTE/vrw5jimhftW67LPy7F/gtcF6bIt36fVSCONpi4KbwSoALgAPu/mHUQZnZ6NZ2VzM7j+D/lvSDSrjNh4H33f0H7RTr8X2WSFxR7DMzG2lmw8PhQcAc4IM2xRYDXwqH/xxY6mHvYpRxtWmn/hRBv05Sufs33H2MuxcTdEAvdfcb2hTr0f2VSExR7KtwuzlmNqR1GLgCaHvlY7d+H9O7HG0vZGa/JLi6Jd/MdgLfJuiww90fAJ4juAqgFKgFbkmRuP4cuMPMmoA6YF6yDyqhi4EbgXfD9muAfwDGxcQWxT5LJK4o9tlJwGNmlkaQkJ5y99+Z2T3AandfTJDYHjezUoILE+YlOaZE4/prM/sU0BTGdXMPxBVXCuyvjmKKal8VAL8Nz3vSgSfd/X/M7HZIzvdRj9oQEZG41MQkIiJxKUGIiEhcShAiIhKXEoSIiMSlBCEiInEpQYh0gpk1xzzF820z+3o3rrvY2nmir0gU+tV9ECLdoC58ZIVIn6cahEg3CJ/T//3wWf1/NLNJ4fRiM1saPtjtJTMbF04vMLPfhg8TfMfMLgpXlWZmP7XgdxteCO98FomEEoRI5wxq08T0hZh5B9z9dGAhwRNBIXhQ4GPhg91+AfxnOP0/gZfDhwmeDbwXTp8M3OvupwFVwLVJfj8i7dKd1CKdYGaH3H1wnOlbgVnuvjl8kOBud88zs33ASe7eGE7/0N3zzawcGBPz0LfWR5cvcffJ4fj/AjLc/Z+T/85EjqUahEj38XaGO6M+ZrgZ9RNKhJQgRLrPF2L+rgyHX+OjB8x9EVgRDr8E3AFHfsxnWE8FKZIonZ2IdM6gmCfIAvyPu7de6pprZn8iqAVcF067C/iZmf0dUM5HT9f8CvCgmd1KUFO4A4j80fIisdQHIdINwj6IEnffF3UsIt1FTUwiIhKXahAiIhKXahAiIhKXEoSIiMSlBCEiInEpQYiISFxKECIiEtf/B0ICYLNPQkFCAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "%%local\n",
    "%matplotlib inline\n",
    "y = results[\"acc\"] #acc\n",
    "x = list(range(1, len(y)+1))#epoch\n",
    "plt.title(\"Accuracy per Epoch - MNIST Training\")\n",
    "plt.xlabel(\"Epoch\")\n",
    "plt.ylabel(\"Accuracy\")\n",
    "plt.plot(x,y)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Step 7: Evaluation Using Trained Model and Test Dataset\n",
    "\n",
    "Inside the train_fn function we saved the trained model to HDFS in the ps format. We can load the weights of this model and use for serving predictions or for evaluation, in this example we will evaluate the model against the test set. "
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Load Model Weights"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [],
   "source": [
    "model_path_hdfs = hdfs.project_path() + \"mnist/\" + \"mnist_torch_ps.pt\""
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Currently PyTorch HDFS support is limited. To get around this we can download the ps model in the local file system and load it from there using `torch.load`. "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "File hdfs://10.0.2.15:8020/Projects/demo_featurestore_admin000/mnist/mnist_torch_ps.pt is already localized, skipping download..."
     ]
    }
   ],
   "source": [
    "local_path = hdfs.copy_to_local(model_path_hdfs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [],
   "source": [
    "model = Net()\n",
    "model.load_state_dict(torch.load(local_path))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [],
   "source": [
    "def test_epoch(test_dataset_path, model, device, epoch):\n",
    "    \"\"\"\n",
    "    Function for testing a single epoch of MNIST test dataset using PyTorch\n",
    "    \"\"\"\n",
    "    with DataLoader(make_reader(test_dataset_path, num_epochs=READER_EPOCHS, hdfs_driver='libhdfs',\n",
    "                               transform_spec=TransformSpec(_transform_row)), \n",
    "                    batch_size=BATCH_SIZE) as test_loader:\n",
    "        model.eval()\n",
    "        test_loss = 0\n",
    "        correct = 0\n",
    "        count = 0\n",
    "        with torch.no_grad():\n",
    "            for row in test_loader:\n",
    "                data, target = row['image'].to(device), row['digit'].to(device)\n",
    "                output = model(data)\n",
    "                test_loss += F.nll_loss(output, target, reduction='sum').item()  # sum up batch loss\n",
    "                pred = output.max(1, keepdim=True)[1]  # get the index of the max log-probability\n",
    "                correct += pred.eq(target.view_as(pred)).sum().item()\n",
    "                count += data.shape[0]\n",
    "\n",
    "        test_loss /= count\n",
    "        print('\\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\\n'.format(\n",
    "        test_loss, correct, count, 100. * correct / count))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [],
   "source": [
    "def test_fn(model):\n",
    "    # Setup Torch\n",
    "    use_cuda = torch.cuda.is_available()\n",
    "    torch.manual_seed(SEED)\n",
    "    device = torch.device('cuda' if use_cuda else 'cpu')\n",
    "    # get dataset path from the featurestore\n",
    "    test_dataset_path = featurestore.get_training_dataset_path(TEST_DATASET_NAME)\n",
    "    for epoch in range(1, NUM_EPOCHS + 1):\n",
    "        test_epoch(test_dataset_path, model, device, epoch)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Test set: Average loss: 0.0953, Accuracy: 9689/10000 (97%)\n",
      "\n",
      "\n",
      "Test set: Average loss: 0.0953, Accuracy: 9689/10000 (97%)\n",
      "\n",
      "\n",
      "Test set: Average loss: 0.0953, Accuracy: 9689/10000 (97%)\n",
      "\n",
      "\n",
      "Test set: Average loss: 0.0953, Accuracy: 9689/10000 (97%)\n",
      "\n",
      "\n",
      "Test set: Average loss: 0.0953, Accuracy: 9689/10000 (97%)"
     ]
    }
   ],
   "source": [
    "test_fn(model)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "PySpark",
   "language": "",
   "name": "pysparkkernel"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "python",
    "version": 2
   },
   "mimetype": "text/x-python",
   "name": "pyspark",
   "pygments_lexer": "python2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}