{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Reinplementation of Block Coordinate Descent (BCD) Algorithm for Training DNNs (10-layer MLP) for MNIST in PyTorch"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Import libraries"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "PyTorch Version: 1.0.0\n",
      "Torchvision Version: 0.2.1\n",
      "GPU is available? True\n"
     ]
    }
   ],
   "source": [
    "\"\"\"\n",
    "5 runs, seed = 5, 10, 15, 8, 19; \n",
    "validation accuracies: 0.8839, 0.8695, 0.8564, 0.8713, 0.8617\n",
    "\"\"\"\n",
    "from __future__ import print_function, division\n",
    "import numpy as np\n",
    "import pandas as pd\n",
    "import torch\n",
    "import torch.nn as nn\n",
    "import torch.nn.functional as F\n",
    "from torch.autograd import Variable\n",
    "import torch.optim as optim\n",
    "import torchvision\n",
    "from torchvision import datasets, models, transforms, utils\n",
    "from torch.utils.data import Dataset, DataLoader\n",
    "import matplotlib.pyplot as plt\n",
    "import time\n",
    "import os\n",
    "import copy\n",
    "\n",
    "print(\"PyTorch Version:\", torch.__version__)\n",
    "print(\"Torchvision Version:\", torchvision.__version__)\n",
    "print(\"GPU is available?\", torch.cuda.is_available())"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Read in MNIST dataset"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "dtype = torch.float\n",
    "# device = torch.device(\"cpu\") # Uncomment this to run on CPU\n",
    "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") # Uncomment this to run on GPU\n",
    "\n",
    "# Convert to tensor and scale to [0, 1]\n",
    "ts = transforms.Compose([transforms.ToTensor(), \n",
    "                             transforms.Normalize((0,), (1,))])\n",
    "mnist_trainset = datasets.MNIST('../data', train=True, download=True, transform=ts)\n",
    "mnist_testset = datasets.MNIST(root='../data', train=False, download=True, transform=ts)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Data manipulation"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Manipulate train set\n",
    "x_d1 = mnist_trainset[0][0].size()[1]\n",
    "x_d2 = mnist_trainset[0][0].size()[2]\n",
    "N = x_d3 = len(mnist_trainset)\n",
    "K = 10\n",
    "x_train = torch.empty((N,x_d1*x_d2), device=device)\n",
    "y_train = torch.empty(N, dtype=torch.long)\n",
    "for i in range(N): \n",
    "     x_train[i,:] = torch.reshape(mnist_trainset[i][0], (1, x_d1*x_d2))\n",
    "     y_train[i] = mnist_trainset[i][1]\n",
    "x_train = torch.t(x_train)\n",
    "y_one_hot = torch.zeros(N, K).scatter_(1, torch.reshape(y_train, (N, 1)), 1)\n",
    "y_one_hot = torch.t(y_one_hot).to(device=device)\n",
    "y_train = y_train.to(device=device)\n",
    "\n",
    "# Manipulate test set\n",
    "N_test = x_d3_test = len(mnist_testset)\n",
    "x_test = torch.empty((N_test,x_d1*x_d2), device=device)\n",
    "y_test = torch.empty(N_test, dtype=torch.long)\n",
    "for i in range(N_test): \n",
    "     x_test[i,:] = torch.reshape(mnist_testset[i][0], (1, x_d1*x_d2))\n",
    "     y_test[i] = mnist_testset[i][1]\n",
    "x_test = torch.t(x_test)\n",
    "y_test_one_hot = torch.zeros(N_test, K).scatter_(1, torch.reshape(y_test, (N_test, 1)), 1)\n",
    "y_test_one_hot = torch.t(y_test_one_hot).to(device=device)\n",
    "y_test = y_test.to(device=device)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Main algorithm (Jinshan's Algorithm in Zeng et al (2018))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Define parameter initialization and forward pass"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Initialization of parameters\n",
    "torch.manual_seed(5)\n",
    "def initialize(dim_in, dim_out):\n",
    "    W = 0.01*torch.randn(dim_out, dim_in, device=device)\n",
    "    b = 0.1*torch.ones(dim_out, 1, device=device)\n",
    "    return W, b\n",
    "\n",
    "# Forward pass\n",
    "def feed_forward(weight, bias, activation, dim = N):\n",
    "    U = torch.addmm(bias.repeat(1, dim), weight, activation)\n",
    "    V = nn.ReLU()(U)\n",
    "    return U, V"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Define functions for updating blocks"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "def updateV_js(U1,U2,W,b,rho,gamma): \n",
    "    _, d = W.size()\n",
    "    I = torch.eye(d, device=device)\n",
    "    U1 = nn.ReLU()(U1)\n",
    "    _, col_U2 = U2.size()\n",
    "    Vstar = torch.mm(torch.inverse(rho*(torch.mm(torch.t(W),W)) + gamma*I), \\\n",
    "                     rho*torch.mm(torch.t(W),U2-b.repeat(1,col_U2)) + gamma*U1)\n",
    "    return Vstar"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "def updateWb_js(U, V, W, b, alpha, rho): \n",
    "    d,N = V.size()\n",
    "    I = torch.eye(d, device=device)\n",
    "    _, col_U = U.size()\n",
    "    Wstar = torch.mm(alpha*W + rho*torch.mm(U - b.repeat(1,col_U),torch.t(V)),\\\n",
    "                     torch.inverse(alpha*I + rho*(torch.mm(V,torch.t(V)))))\n",
    "    bstar = (alpha*b+rho*torch.sum(U-torch.mm(W,V), dim=1).reshape(b.size()))/(rho*N + alpha)\n",
    "    return Wstar, bstar"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Define the proximal operator of the ReLU activation function"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "def relu_prox(a, b, gamma, d, N):\n",
    "    val = torch.empty(d,N, device=device)\n",
    "    x = (a+gamma*b)/(1+gamma)\n",
    "    y = torch.min(b,torch.zeros(d,N, device=device))\n",
    "\n",
    "    val = torch.where(a+gamma*b < 0, y, torch.zeros(d,N, device=device))\n",
    "    val = torch.where(((a+gamma*b >= 0) & (b >=0)) | ((a*(gamma-np.sqrt(gamma*(gamma+1))) <= gamma*b) & (b < 0)), x, val)\n",
    "    val = torch.where((-a <= gamma*b) & (gamma*b <= a*(gamma-np.sqrt(gamma*(gamma+1)))), b, val)\n",
    "    return val"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Define hyperparameters"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "gamma = 1\n",
    "# gamma1 = gamma2 = gamma3 = gamma4 = gamma5 = gamma6 \\\n",
    "# = gamma7 = gamma8 = gamma9 = gamma10 = gamma11 = gamma\n",
    "\n",
    "rho = 1\n",
    "# rho1 = rho2 = rho3 = rho4 = rho5 = rho6 = rho7 = rho8 \n",
    "# = rho9 = rho10 = rho11 = rho \n",
    "\n",
    "\n",
    "alpha = 1\n",
    "# alpha1 = alpha2 = alpha3 = alpha4 = alpha5 = alpha6 = alpha7 \\\n",
    "# = alpha8 = alpha9 = alpha10 = alpha"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Define block update"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "def block_update(Wn, bn, Wn_1, bn_1, Vn, Un, Vn_1, Un_1, Vn_2, dn_1, alpha = alpha, gamma = gamma, rho = rho, dim = N):\n",
    "    # update W(n) and b(n)\n",
    "    Wn, bn = updateWb_js(Un, Vn_1, Wn, bn, alpha, rho)\n",
    "    # update V(n-1)\n",
    "    Vn_1 = updateV_js(Un_1, Un, Wn, bn, rho, gamma)\n",
    "    # update U(n-1)\n",
    "    Un_1 = relu_prox(Vn_1, (rho*torch.addmm(bn_1.repeat(1,dim), Wn_1, Vn_2) + \\\n",
    "                            alpha*Un_1)/(rho + alpha), (rho + alpha)/gamma, dn_1, dim)\n",
    "    return Wn, bn, Vn_1, Un_1"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Define function for updates of three blocks simultaneously"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "def three_blocks_update(Wn, bn, Wn_1, bn_1, Wn_2, bn_2, Wn_3, bn_3, Vn, Un, Vn_1, Un_1, Vn_2, Un_2, Vn_3, Un_3, Vn_4, dn_1, dn_2, dn_3, alpha = alpha, gamma = gamma, rho = rho, dim = N): \n",
    "    # update W(n) and b(n)\n",
    "    Wn_new, bn_new = updateWb_js(Un, Vn_1, Wn, bn, alpha, rho)\n",
    "    # update V(n-1)\n",
    "    Vn_1_new = updateV_js(Un_1, Un, Wn, bn, rho, gamma)\n",
    "    # update U(n-1)\n",
    "    Un_1_new = relu_prox(Vn_1, (rho*torch.addmm(bn_1.repeat(1,dim), Wn_1, Vn_2) + \\\n",
    "                            alpha*Un_1)/(rho + alpha), (rho + alpha)/gamma, dn_1, dim)\n",
    "    \n",
    "    # update W(n-1) and b(n-1)\n",
    "    Wn_1_new, bn_1_new = updateWb_js(Un_1, Vn_2, Wn_1, bn_1, alpha, rho)\n",
    "    # update V(n-2)\n",
    "    Vn_2_new = updateV_js(Un_2, Un_1, Wn_1, bn_1, rho, gamma)\n",
    "    # update U(n-2)\n",
    "    Un_2_new = relu_prox(Vn_2, (rho*torch.addmm(bn_2.repeat(1,dim), Wn_2, Vn_3) + \\\n",
    "                            alpha*Un_2)/(rho + alpha), (rho + alpha)/gamma, dn_2, dim)\n",
    "    \n",
    "    # update W(n-2) and b(n-2)\n",
    "    Wn_2_new, bn_2_new = updateWb_js(Un_2, Vn_3, Wn_2, bn_2, alpha, rho)\n",
    "    # update V(n-3)\n",
    "    Vn_3_new = updateV_js(Un_3, Un_2, Wn_2, bn_2, rho, gamma)\n",
    "    # update U(n-3)\n",
    "    Un_3_new = relu_prox(Vn_3, (rho*torch.addmm(bn_3.repeat(1,dim), Wn_3, Vn_4) + \\\n",
    "                            alpha*Un_3)/(rho + alpha), (rho + alpha)/gamma, dn_3, dim)\n",
    "    \n",
    "    return Wn_new, bn_new, Vn_1_new, Un_1_new, Wn_1_new, bn_1_new, Vn_2_new, Un_2_new, Wn_2_new, bn_2_new, Vn_3_new, Un_3_new"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Define loss computation of layers"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "def compute_loss(weight, bias, activation, preactivation, rho = rho):\n",
    "    loss = rho/2*torch.pow(torch.dist(torch.addmm(bias.repeat(1,N), \\\n",
    "                                                  weight, activation), preactivation, 2), 2).cpu().numpy()\n",
    "    return loss"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Parameter initialization"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Layers: input + 3 hidden + output\n",
    "d0 = x_d1*x_d2\n",
    "d1 = d2 = d3 = d4 = d5 = d6 \\\n",
    "= d7 = d8 = d9 = d10 = 620 # 600\n",
    "d11 = K \n",
    "\n",
    "\n",
    "W1, b1 = initialize(d0, d1)\n",
    "W2, b2 = initialize(d1, d2)\n",
    "W3, b3 = initialize(d2, d3)\n",
    "W4, b4 = initialize(d3, d4)\n",
    "W5, b5 = initialize(d4, d5)\n",
    "W6, b6 = initialize(d5, d6)\n",
    "W7, b7 = initialize(d6, d7)\n",
    "W8, b8 = initialize(d7, d8)\n",
    "W9, b9 = initialize(d8, d9)\n",
    "W10, b10 = initialize(d9, d10)\n",
    "W11, b11 = initialize(d10, d11)\n",
    "\n",
    "\n",
    "U1, V1 = feed_forward(W1, b1, x_train)\n",
    "U2, V2 = feed_forward(W2, b2, V1)\n",
    "U3, V3 = feed_forward(W3, b3, V2)\n",
    "# U4 = torch.addmm(b4.repeat(1, N), W4, V3)\n",
    "# V4 = U4\n",
    "U4, V4 = feed_forward(W4, b4, V3)\n",
    "U5, V5 = feed_forward(W5, b5, V4)\n",
    "U6, V6 = feed_forward(W6, b6, V5)\n",
    "U7, V7 = feed_forward(W7, b7, V6)\n",
    "U8, V8 = feed_forward(W8, b8, V7)\n",
    "U9, V9 = feed_forward(W9, b9, V8)\n",
    "U10, V10 = feed_forward(W10, b10, V9)\n",
    "U11 = torch.addmm(b11.repeat(1, N), W11, V10)\n",
    "V11 = U11\n",
    "\n",
    "niter = 300\n",
    "loss1 = np.empty(niter)\n",
    "loss2 = np.empty(niter)\n",
    "# layer1 = np.empty(niter)\n",
    "# layer2 = np.empty(niter)\n",
    "# layer3 = np.empty(niter)\n",
    "# layer4 = np.empty(niter)\n",
    "# layer11 = np.empty(niter)\n",
    "# layer21 = np.empty(niter)\n",
    "# layer31 = np.empty(niter)\n",
    "# layer41 = np.empty(niter)\n",
    "accuracy_train = np.empty(niter)\n",
    "accuracy_test = np.empty(niter)\n",
    "time1 = np.empty(niter)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Training algorithm"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train on 60000 samples, validate on 10000 samples\n",
      "Epoch 1 / 300 \n",
      " - time (s): 1.9227125644683838 - sq_loss: 12110.6416015625 - tot_loss: 12759.522286938503 - acc: 0.09863333333333334 - val_acc: 0.0958\n",
      "Epoch 2 / 300 \n",
      " - time (s): 1.790919303894043 - sq_loss: 6812.234375 - tot_loss: 6822.50573406741 - acc: 0.10218333333333333 - val_acc: 0.101\n",
      "Epoch 3 / 300 \n",
      " - time (s): 1.796729564666748 - sq_loss: 4670.6591796875 - tot_loss: 4677.528311962262 - acc: 0.09863333333333334 - val_acc: 0.0958\n",
      "Epoch 4 / 300 \n",
      " - time (s): 1.7888429164886475 - sq_loss: 2997.572998046875 - tot_loss: 3003.5206896346062 - acc: 0.10218333333333333 - val_acc: 0.101\n",
      "Epoch 5 / 300 \n",
      " - time (s): 1.8666350841522217 - sq_loss: 1887.41162109375 - tot_loss: 1893.1961169689894 - acc: 0.09736666666666667 - val_acc: 0.0982\n",
      "Epoch 6 / 300 \n",
      " - time (s): 1.9635353088378906 - sq_loss: 1181.7332763671875 - tot_loss: 1187.5601520705968 - acc: 0.10218333333333333 - val_acc: 0.101\n",
      "Epoch 7 / 300 \n",
      " - time (s): 2.013420581817627 - sq_loss: 738.4261474609375 - tot_loss: 743.1299314480275 - acc: 0.09915 - val_acc: 0.1009\n",
      "Epoch 8 / 300 \n",
      " - time (s): 1.8075573444366455 - sq_loss: 461.00067138671875 - tot_loss: 465.1832609931007 - acc: 0.11236666666666667 - val_acc: 0.1135\n",
      "Epoch 9 / 300 \n",
      " - time (s): 1.799774169921875 - sq_loss: 287.6617126464844 - tot_loss: 291.74197488697246 - acc: 0.19376666666666667 - val_acc: 0.195\n",
      "Epoch 10 / 300 \n",
      " - time (s): 1.8849461078643799 - sq_loss: 179.4442901611328 - tot_loss: 183.48969017737545 - acc: 0.09915 - val_acc: 0.1009\n",
      "Epoch 11 / 300 \n",
      " - time (s): 2.048137664794922 - sq_loss: 111.9168930053711 - tot_loss: 115.95837438874878 - acc: 0.10218333333333333 - val_acc: 0.101\n",
      "Epoch 12 / 300 \n",
      " - time (s): 1.9549353122711182 - sq_loss: 69.79440307617188 - tot_loss: 73.68341962946579 - acc: 0.11236666666666667 - val_acc: 0.1135\n",
      "Epoch 13 / 300 \n",
      " - time (s): 1.9240520000457764 - sq_loss: 43.52358627319336 - tot_loss: 47.34082779515302 - acc: 0.10218333333333333 - val_acc: 0.101\n",
      "Epoch 14 / 300 \n",
      " - time (s): 1.8597002029418945 - sq_loss: 27.13995361328125 - tot_loss: 30.908643131901044 - acc: 0.09751666666666667 - val_acc: 0.0974\n",
      "Epoch 15 / 300 \n",
      " - time (s): 1.829394817352295 - sq_loss: 16.924423217773438 - tot_loss: 20.596736938110553 - acc: 0.1022 - val_acc: 0.101\n",
      "Epoch 16 / 300 \n",
      " - time (s): 1.8327584266662598 - sq_loss: 10.555015563964844 - tot_loss: 14.087422077340307 - acc: 0.14065 - val_acc: 0.1444\n",
      "Epoch 17 / 300 \n",
      " - time (s): 1.8023433685302734 - sq_loss: 6.583117485046387 - tot_loss: 10.423219974865788 - acc: 0.10441666666666667 - val_acc: 0.1028\n",
      "Epoch 18 / 300 \n",
      " - time (s): 1.8259098529815674 - sq_loss: 4.106605529785156 - tot_loss: 7.6121925653278595 - acc: 0.11236666666666667 - val_acc: 0.1135\n",
      "Epoch 19 / 300 \n",
      " - time (s): 1.8801143169403076 - sq_loss: 2.562115430831909 - tot_loss: 5.562518767423171 - acc: 0.2816166666666667 - val_acc: 0.2793\n",
      "Epoch 20 / 300 \n",
      " - time (s): 1.912268877029419 - sq_loss: 1.598874807357788 - tot_loss: 5.011455852709332 - acc: 0.11636666666666666 - val_acc: 0.1221\n",
      "Epoch 21 / 300 \n",
      " - time (s): 1.9048411846160889 - sq_loss: 0.9981869459152222 - tot_loss: 4.321339416765113 - acc: 0.25145 - val_acc: 0.2485\n",
      "Epoch 22 / 300 \n",
      " - time (s): 1.9256765842437744 - sq_loss: 0.6233453750610352 - tot_loss: 4.345168142874172 - acc: 0.2991333333333333 - val_acc: 0.2986\n",
      "Epoch 23 / 300 \n",
      " - time (s): 1.882256031036377 - sq_loss: 0.38937193155288696 - tot_loss: 3.6065803519668407 - acc: 0.1639 - val_acc: 0.1656\n",
      "Epoch 24 / 300 \n",
      " - time (s): 1.8272509574890137 - sq_loss: 0.24332422018051147 - tot_loss: 3.9425870393351943 - acc: 0.11501666666666667 - val_acc: 0.1149\n",
      "Epoch 25 / 300 \n",
      " - time (s): 1.8635907173156738 - sq_loss: 0.15216799080371857 - tot_loss: 3.3313615672886954 - acc: 0.2035 - val_acc: 0.2097\n",
      "Epoch 26 / 300 \n",
      " - time (s): 1.8273921012878418 - sq_loss: 0.09519749879837036 - tot_loss: 3.4342471476993524 - acc: 0.13978333333333334 - val_acc: 0.1405\n",
      "Epoch 27 / 300 \n",
      " - time (s): 1.8981950283050537 - sq_loss: 0.05960185080766678 - tot_loss: 3.8888547867536545 - acc: 0.3085833333333333 - val_acc: 0.3089\n",
      "Epoch 28 / 300 \n",
      " - time (s): 1.8207197189331055 - sq_loss: 0.03737415000796318 - tot_loss: 3.4847660906089004 - acc: 0.12476666666666666 - val_acc: 0.1211\n",
      "Epoch 29 / 300 \n",
      " - time (s): 1.8481383323669434 - sq_loss: 0.02343750186264515 - tot_loss: 4.51741780701559 - acc: 0.38288333333333335 - val_acc: 0.3902\n",
      "Epoch 30 / 300 \n",
      " - time (s): 1.876483678817749 - sq_loss: 0.014726446010172367 - tot_loss: 3.426854531513527 - acc: 0.2937666666666667 - val_acc: 0.3004\n",
      "Epoch 31 / 300 \n",
      " - time (s): 1.799320936203003 - sq_loss: 0.009282632730901241 - tot_loss: 4.120151142182294 - acc: 0.43878333333333336 - val_acc: 0.4446\n",
      "Epoch 32 / 300 \n",
      " - time (s): 1.810344934463501 - sq_loss: 0.005873255431652069 - tot_loss: 3.4131062422238756 - acc: 0.4621 - val_acc: 0.475\n",
      "Epoch 33 / 300 \n",
      " - time (s): 1.7914981842041016 - sq_loss: 0.003719714470207691 - tot_loss: 3.78807573965787 - acc: 0.44105 - val_acc: 0.45\n",
      "Epoch 34 / 300 \n",
      " - time (s): 1.80106520652771 - sq_loss: 0.0023627793416380882 - tot_loss: 3.4746507060845033 - acc: 0.38411666666666666 - val_acc: 0.3944\n",
      "Epoch 35 / 300 \n",
      " - time (s): 1.8162548542022705 - sq_loss: 0.0014980543637648225 - tot_loss: 3.9722294988196154 - acc: 0.37365 - val_acc: 0.3866\n",
      "Epoch 36 / 300 \n",
      " - time (s): 1.8187432289123535 - sq_loss: 0.0009670500294305384 - tot_loss: 3.300994048768189 - acc: 0.5089666666666667 - val_acc: 0.5257\n",
      "Epoch 37 / 300 \n",
      " - time (s): 1.948378562927246 - sq_loss: 0.000619674741756171 - tot_loss: 3.039932961240993 - acc: 0.56555 - val_acc: 0.5737\n",
      "Epoch 38 / 300 \n",
      " - time (s): 2.015716791152954 - sq_loss: 0.00039225025102496147 - tot_loss: 3.1945748995203758 - acc: 0.3919666666666667 - val_acc: 0.4102\n",
      "Epoch 39 / 300 \n",
      " - time (s): 1.835542917251587 - sq_loss: 0.0002537703257985413 - tot_loss: 3.147423869160775 - acc: 0.64655 - val_acc: 0.656\n",
      "Epoch 40 / 300 \n",
      " - time (s): 1.81858491897583 - sq_loss: 0.0001661595597397536 - tot_loss: 4.444220380243678 - acc: 0.3371 - val_acc: 0.3433\n",
      "Epoch 41 / 300 \n",
      " - time (s): 1.824434757232666 - sq_loss: 0.00011046299914596602 - tot_loss: 2.9750367833694327 - acc: 0.5950166666666666 - val_acc: 0.6064\n",
      "Epoch 42 / 300 \n",
      " - time (s): 1.8936481475830078 - sq_loss: 7.189739699242637e-05 - tot_loss: 3.065599774803559 - acc: 0.5512333333333334 - val_acc: 0.5624\n",
      "Epoch 43 / 300 \n",
      " - time (s): 1.8691039085388184 - sq_loss: 5.922486889176071e-05 - tot_loss: 3.039611797503312 - acc: 0.58375 - val_acc: 0.597\n",
      "Epoch 44 / 300 \n",
      " - time (s): 1.8494200706481934 - sq_loss: 3.5524164559319615e-05 - tot_loss: 3.7529815387388226 - acc: 0.5227166666666667 - val_acc: 0.525\n",
      "Epoch 45 / 300 \n",
      " - time (s): 1.8236546516418457 - sq_loss: 2.4527282221242785e-05 - tot_loss: 3.195220692548901 - acc: 0.7595166666666666 - val_acc: 0.7703\n",
      "Epoch 46 / 300 \n",
      " - time (s): 1.8705954551696777 - sq_loss: 1.986991446756292e-05 - tot_loss: 3.5426096088212944 - acc: 0.64755 - val_acc: 0.6567\n",
      "Epoch 47 / 300 \n",
      " - time (s): 1.919614553451538 - sq_loss: 1.3720753486268222e-05 - tot_loss: 3.6388338553315407 - acc: 0.6197666666666667 - val_acc: 0.6382\n",
      "Epoch 48 / 300 \n",
      " - time (s): 1.9364752769470215 - sq_loss: 1.4463600564340595e-05 - tot_loss: 3.3446159989043736 - acc: 0.6114333333333334 - val_acc: 0.628\n",
      "Epoch 49 / 300 \n",
      " - time (s): 1.9395248889923096 - sq_loss: 1.1965164958382957e-05 - tot_loss: 3.1611606525138995 - acc: 0.7271 - val_acc: 0.733\n",
      "Epoch 50 / 300 \n",
      " - time (s): 1.8952536582946777 - sq_loss: 6.673103143839398e-06 - tot_loss: 3.6304577957257607 - acc: 0.5734666666666667 - val_acc: 0.5821\n",
      "Epoch 51 / 300 \n",
      " - time (s): 1.955686092376709 - sq_loss: 5.051719199400395e-06 - tot_loss: 3.5276116077675397 - acc: 0.66795 - val_acc: 0.6769\n",
      "Epoch 52 / 300 \n",
      " - time (s): 1.889514684677124 - sq_loss: 3.914626631740248e-06 - tot_loss: 3.474648424491079 - acc: 0.5888833333333333 - val_acc: 0.5923\n",
      "Epoch 53 / 300 \n",
      " - time (s): 1.8557515144348145 - sq_loss: 3.2376108265452785e-06 - tot_loss: 3.562292145170204 - acc: 0.6182666666666666 - val_acc: 0.6202\n",
      "Epoch 54 / 300 \n",
      " - time (s): 1.9367101192474365 - sq_loss: 4.320020707382355e-06 - tot_loss: 3.007165585565417 - acc: 0.72405 - val_acc: 0.7296\n",
      "Epoch 55 / 300 \n",
      " - time (s): 1.9532277584075928 - sq_loss: 3.4823228816094343e-06 - tot_loss: 3.0102258367246577 - acc: 0.7408333333333333 - val_acc: 0.752\n",
      "Epoch 56 / 300 \n",
      " - time (s): 1.8726587295532227 - sq_loss: 4.191106654616306e-06 - tot_loss: 3.051810138257224 - acc: 0.6921333333333334 - val_acc: 0.7018\n",
      "Epoch 57 / 300 \n",
      " - time (s): 1.9005463123321533 - sq_loss: 1.2220061762491241e-05 - tot_loss: 2.9678615251523297 - acc: 0.5681333333333334 - val_acc: 0.5704\n",
      "Epoch 58 / 300 \n",
      " - time (s): 1.855621337890625 - sq_loss: 1.9359481484571006e-06 - tot_loss: 3.357422555437097 - acc: 0.65615 - val_acc: 0.6633\n",
      "Epoch 59 / 300 \n",
      " - time (s): 1.8908803462982178 - sq_loss: 3.138397232760326e-06 - tot_loss: 3.041428086406313 - acc: 0.7082 - val_acc: 0.7144\n",
      "Epoch 60 / 300 \n",
      " - time (s): 1.8576734066009521 - sq_loss: 2.7365272217139136e-06 - tot_loss: 3.5728923465280786 - acc: 0.65105 - val_acc: 0.6559\n",
      "Epoch 61 / 300 \n",
      " - time (s): 1.8464040756225586 - sq_loss: 5.066856829216704e-06 - tot_loss: 3.363462357823664 - acc: 0.74645 - val_acc: 0.7538\n",
      "Epoch 62 / 300 \n",
      " - time (s): 1.8238990306854248 - sq_loss: 9.505354682914913e-06 - tot_loss: 3.6050391057051456 - acc: 0.6895333333333333 - val_acc: 0.7061\n",
      "Epoch 63 / 300 \n",
      " - time (s): 2.011706829071045 - sq_loss: 5.5038540267560165e-06 - tot_loss: 3.3945817903218085 - acc: 0.7394 - val_acc: 0.7455\n",
      "Epoch 64 / 300 \n",
      " - time (s): 1.9704346656799316 - sq_loss: 5.183151642995654e-06 - tot_loss: 3.1293364243970245 - acc: 0.7255166666666667 - val_acc: 0.7346\n",
      "Epoch 65 / 300 \n",
      " - time (s): 2.0342164039611816 - sq_loss: 1.0461219062563032e-05 - tot_loss: 3.410314812339493 - acc: 0.7520333333333333 - val_acc: 0.7591\n",
      "Epoch 66 / 300 \n",
      " - time (s): 1.9547805786132812 - sq_loss: 2.500857135601109e-06 - tot_loss: 3.327481849360538 - acc: 0.7146 - val_acc: 0.7221\n",
      "Epoch 67 / 300 \n",
      " - time (s): 1.9121019840240479 - sq_loss: 2.2295369035418844e-06 - tot_loss: 3.1168842727290667 - acc: 0.7416666666666667 - val_acc: 0.7499\n",
      "Epoch 68 / 300 \n",
      " - time (s): 1.8256502151489258 - sq_loss: 4.887909199169371e-06 - tot_loss: 3.513922302249739 - acc: 0.7510333333333333 - val_acc: 0.7564\n",
      "Epoch 69 / 300 \n",
      " - time (s): 1.8391242027282715 - sq_loss: 7.144638857425889e-06 - tot_loss: 3.423971611187426 - acc: 0.7360833333333333 - val_acc: 0.7502\n",
      "Epoch 70 / 300 \n",
      " - time (s): 1.8769567012786865 - sq_loss: 1.7424257521270192e-06 - tot_loss: 3.192711546428086 - acc: 0.7611166666666667 - val_acc: 0.7702\n",
      "Epoch 71 / 300 \n",
      " - time (s): 1.8912091255187988 - sq_loss: 1.8051445067612804e-06 - tot_loss: 3.4628489859545653 - acc: 0.7331166666666666 - val_acc: 0.7424\n",
      "Epoch 72 / 300 \n",
      " - time (s): 1.8482799530029297 - sq_loss: 2.0403463167895097e-06 - tot_loss: 3.4373898822600495 - acc: 0.7454166666666666 - val_acc: 0.7571\n",
      "Epoch 73 / 300 \n",
      " - time (s): 1.894685983657837 - sq_loss: 1.4488197166429018e-06 - tot_loss: 2.8943080264936043 - acc: 0.7926166666666666 - val_acc: 0.8006\n",
      "Epoch 74 / 300 \n",
      " - time (s): 1.8918428421020508 - sq_loss: 1.148690625996096e-05 - tot_loss: 2.9474526378098744 - acc: 0.7610166666666667 - val_acc: 0.7725\n",
      "Epoch 75 / 300 \n",
      " - time (s): 1.824730396270752 - sq_loss: 8.832056664687116e-06 - tot_loss: 3.3104112317223553 - acc: 0.7417833333333334 - val_acc: 0.7569\n",
      "Epoch 76 / 300 \n",
      " - time (s): 1.9150149822235107 - sq_loss: 3.009697820743895e-06 - tot_loss: 4.093584107545894 - acc: 0.7647333333333334 - val_acc: 0.7728\n",
      "Epoch 77 / 300 \n",
      " - time (s): 2.024549722671509 - sq_loss: 2.7325997962179827e-06 - tot_loss: 3.233076671331446 - acc: 0.7804666666666666 - val_acc: 0.7955\n",
      "Epoch 78 / 300 \n",
      " - time (s): 1.8760385513305664 - sq_loss: 1.2744078503601486e-06 - tot_loss: 3.022502504240265 - acc: 0.7884 - val_acc: 0.8003\n",
      "Epoch 79 / 300 \n",
      " - time (s): 1.8265538215637207 - sq_loss: 1.1968550097662956e-06 - tot_loss: 3.205297705160774 - acc: 0.7759666666666667 - val_acc: 0.7913\n",
      "Epoch 80 / 300 \n",
      " - time (s): 1.8363838195800781 - sq_loss: 3.317811888337019e-06 - tot_loss: 4.307815995966621 - acc: 0.7188166666666667 - val_acc: 0.7295\n",
      "Epoch 81 / 300 \n",
      " - time (s): 1.8174221515655518 - sq_loss: 1.3392029813985573e-06 - tot_loss: 3.1596496677568666 - acc: 0.77635 - val_acc: 0.7909\n",
      "Epoch 82 / 300 \n",
      " - time (s): 1.8374438285827637 - sq_loss: 2.5406429813301656e-06 - tot_loss: 3.387823101466438 - acc: 0.7740333333333334 - val_acc: 0.7849\n",
      "Epoch 83 / 300 \n",
      " - time (s): 1.8398900032043457 - sq_loss: 6.830616712250048e-06 - tot_loss: 2.96811050977567 - acc: 0.69535 - val_acc: 0.6983\n",
      "Epoch 84 / 300 \n",
      " - time (s): 1.9078481197357178 - sq_loss: 3.7532536225626245e-06 - tot_loss: 3.076290400216749 - acc: 0.76975 - val_acc: 0.7795\n",
      "Epoch 85 / 300 \n",
      " - time (s): 1.8426320552825928 - sq_loss: 1.813915696402546e-05 - tot_loss: 3.135159510629819 - acc: 0.7642333333333333 - val_acc: 0.7744\n",
      "Epoch 86 / 300 \n",
      " - time (s): 1.881835699081421 - sq_loss: 1.2789139873348176e-05 - tot_loss: 3.297041971780345 - acc: 0.7430833333333333 - val_acc: 0.7508\n",
      "Epoch 87 / 300 \n",
      " - time (s): 1.9057154655456543 - sq_loss: 5.076953129901085e-06 - tot_loss: 3.195223936502771 - acc: 0.7864333333333333 - val_acc: 0.7964\n",
      "Epoch 88 / 300 \n",
      " - time (s): 1.8916661739349365 - sq_loss: 5.922613581788028e-06 - tot_loss: 3.5511946917254136 - acc: 0.7406166666666667 - val_acc: 0.75\n",
      "Epoch 89 / 300 \n",
      " - time (s): 1.9333827495574951 - sq_loss: 5.433571459434461e-06 - tot_loss: 2.97636763482933 - acc: 0.79865 - val_acc: 0.8067\n",
      "Epoch 90 / 300 \n",
      " - time (s): 1.9531211853027344 - sq_loss: 2.398304786765948e-06 - tot_loss: 3.0643671511097637 - acc: 0.7498666666666667 - val_acc: 0.7597\n",
      "Epoch 91 / 300 \n",
      " - time (s): 1.9100420475006104 - sq_loss: 2.6483010060474044e-06 - tot_loss: 4.106564617444519 - acc: 0.8289333333333333 - val_acc: 0.8392\n",
      "Epoch 92 / 300 \n",
      " - time (s): 1.8516790866851807 - sq_loss: 1.7090636674765847e-06 - tot_loss: 2.9828305370386943 - acc: 0.7966 - val_acc: 0.8046\n",
      "Epoch 93 / 300 \n",
      " - time (s): 1.8998770713806152 - sq_loss: 5.502097337739542e-06 - tot_loss: 3.0355965044182085 - acc: 0.7865833333333333 - val_acc: 0.7955\n",
      "Epoch 94 / 300 \n",
      " - time (s): 1.8097994327545166 - sq_loss: 1.588987515788176e-06 - tot_loss: 3.3936171904845196 - acc: 0.8102166666666667 - val_acc: 0.8223\n",
      "Epoch 95 / 300 \n",
      " - time (s): 1.956174612045288 - sq_loss: 6.441430286940886e-06 - tot_loss: 3.290393855847924 - acc: 0.8006333333333333 - val_acc: 0.8084\n",
      "Epoch 96 / 300 \n",
      " - time (s): 1.9762227535247803 - sq_loss: 1.1791307770181447e-06 - tot_loss: 2.7964814905553794 - acc: 0.80285 - val_acc: 0.8112\n",
      "Epoch 97 / 300 \n",
      " - time (s): 1.9788618087768555 - sq_loss: 3.1688050512457266e-06 - tot_loss: 2.803683271135924 - acc: 0.8069166666666666 - val_acc: 0.8185\n",
      "Epoch 98 / 300 \n",
      " - time (s): 1.8017594814300537 - sq_loss: 6.018945441610413e-06 - tot_loss: 3.123860814852833 - acc: 0.7836666666666666 - val_acc: 0.793\n",
      "Epoch 99 / 300 \n",
      " - time (s): 1.8036553859710693 - sq_loss: 3.697031161209452e-06 - tot_loss: 3.1841797011591098 - acc: 0.8058833333333333 - val_acc: 0.8166\n",
      "Epoch 100 / 300 \n",
      " - time (s): 1.806023120880127 - sq_loss: 1.1210933053007466e-06 - tot_loss: 3.1602853241193998 - acc: 0.8291166666666666 - val_acc: 0.8381\n",
      "Epoch 101 / 300 \n",
      " - time (s): 1.8101928234100342 - sq_loss: 4.077325229445705e-06 - tot_loss: 2.7597479145283614 - acc: 0.8124666666666667 - val_acc: 0.8222\n",
      "Epoch 102 / 300 \n",
      " - time (s): 1.8179359436035156 - sq_loss: 1.7227936041308567e-05 - tot_loss: 3.0315223210845943 - acc: 0.7776833333333333 - val_acc: 0.7847\n",
      "Epoch 103 / 300 \n",
      " - time (s): 1.8238370418548584 - sq_loss: 4.259694105712697e-05 - tot_loss: 2.8553550907708996 - acc: 0.8009666666666667 - val_acc: 0.8105\n",
      "Epoch 104 / 300 \n",
      " - time (s): 1.8039100170135498 - sq_loss: 3.2578493119217455e-05 - tot_loss: 3.1454164955794113 - acc: 0.8268166666666666 - val_acc: 0.8341\n",
      "Epoch 105 / 300 \n",
      " - time (s): 1.8084814548492432 - sq_loss: 1.2630792298296e-05 - tot_loss: 3.2388715651677558 - acc: 0.78695 - val_acc: 0.7948\n",
      "Epoch 106 / 300 \n",
      " - time (s): 1.804246425628662 - sq_loss: 1.0262348268952337e-06 - tot_loss: 3.139950981382867 - acc: 0.7912333333333333 - val_acc: 0.8014\n",
      "Epoch 107 / 300 \n",
      " - time (s): 1.796442985534668 - sq_loss: 1.8092825484927744e-05 - tot_loss: 2.7489648902919726 - acc: 0.78765 - val_acc: 0.7961\n",
      "Epoch 108 / 300 \n",
      " - time (s): 1.8055274486541748 - sq_loss: 6.022356501489412e-06 - tot_loss: 3.1576202002788705 - acc: 0.8200166666666666 - val_acc: 0.8307\n",
      "Epoch 109 / 300 \n",
      " - time (s): 1.821540355682373 - sq_loss: 1.3230593367552501e-06 - tot_loss: 3.032166674565019 - acc: 0.78735 - val_acc: 0.7986\n",
      "Epoch 110 / 300 \n",
      " - time (s): 1.8087584972381592 - sq_loss: 1.8620249875311856e-06 - tot_loss: 3.0305137165388487 - acc: 0.8165 - val_acc: 0.8237\n",
      "Epoch 111 / 300 \n",
      " - time (s): 1.8482768535614014 - sq_loss: 3.8851767385494895e-06 - tot_loss: 2.6527941454960455 - acc: 0.8065833333333333 - val_acc: 0.8176\n",
      "Epoch 112 / 300 \n",
      " - time (s): 1.8185088634490967 - sq_loss: 3.782464091273141e-06 - tot_loss: 3.3100035466989084 - acc: 0.7821166666666667 - val_acc: 0.7913\n",
      "Epoch 113 / 300 \n",
      " - time (s): 1.8020563125610352 - sq_loss: 5.560966656048549e-06 - tot_loss: 3.4173517345011533 - acc: 0.7823166666666667 - val_acc: 0.7909\n",
      "Epoch 114 / 300 \n",
      " - time (s): 1.8282723426818848 - sq_loss: 1.0316156249245978e-06 - tot_loss: 3.0517200951611585 - acc: 0.8229833333333333 - val_acc: 0.8333\n",
      "Epoch 115 / 300 \n",
      " - time (s): 1.8033161163330078 - sq_loss: 1.0089365787280258e-06 - tot_loss: 2.922809041965138 - acc: 0.8259 - val_acc: 0.8385\n",
      "Epoch 116 / 300 \n",
      " - time (s): 1.8264224529266357 - sq_loss: 1.175962893285032e-06 - tot_loss: 3.033729643001493 - acc: 0.83175 - val_acc: 0.838\n",
      "Epoch 117 / 300 \n",
      " - time (s): 1.807361364364624 - sq_loss: 1.651043135098007e-06 - tot_loss: 2.882767252492272 - acc: 0.8248833333333333 - val_acc: 0.8328\n",
      "Epoch 118 / 300 \n",
      " - time (s): 1.7938625812530518 - sq_loss: 4.525315489445347e-06 - tot_loss: 2.7835523096919133 - acc: 0.7933 - val_acc: 0.8054\n",
      "Epoch 119 / 300 \n",
      " - time (s): 1.8224208354949951 - sq_loss: 3.1219033189699985e-06 - tot_loss: 2.8498668466654635 - acc: 0.8158333333333333 - val_acc: 0.8265\n",
      "Epoch 120 / 300 \n",
      " - time (s): 1.8037614822387695 - sq_loss: 6.941811079741456e-06 - tot_loss: 2.8276440750032634 - acc: 0.8288 - val_acc: 0.84\n",
      "Epoch 121 / 300 \n",
      " - time (s): 1.8347439765930176 - sq_loss: 4.603313755069394e-06 - tot_loss: 2.843688395168101 - acc: 0.8325666666666667 - val_acc: 0.8429\n",
      "Epoch 122 / 300 \n",
      " - time (s): 1.810354232788086 - sq_loss: 6.184163794387132e-06 - tot_loss: 2.729835862348409 - acc: 0.8335 - val_acc: 0.8442\n",
      "Epoch 123 / 300 \n",
      " - time (s): 1.7958793640136719 - sq_loss: 2.630089056765428e-06 - tot_loss: 2.743406294668148 - acc: 0.8095166666666667 - val_acc: 0.8192\n",
      "Epoch 124 / 300 \n",
      " - time (s): 1.7957820892333984 - sq_loss: 1.3419885362964123e-06 - tot_loss: 3.3430892630130984 - acc: 0.83265 - val_acc: 0.8416\n",
      "Epoch 125 / 300 \n",
      " - time (s): 1.8134551048278809 - sq_loss: 4.159446689300239e-06 - tot_loss: 3.561043055744449 - acc: 0.8280166666666666 - val_acc: 0.8344\n",
      "Epoch 126 / 300 \n",
      " - time (s): 1.81925368309021 - sq_loss: 9.188255717162974e-06 - tot_loss: 3.038041777253966 - acc: 0.8420833333333333 - val_acc: 0.8532\n",
      "Epoch 127 / 300 \n",
      " - time (s): 2.9892830848693848 - sq_loss: 4.871459168498404e-06 - tot_loss: 3.1295109200018487 - acc: 0.8141666666666667 - val_acc: 0.8244\n",
      "Epoch 128 / 300 \n",
      " - time (s): 2.6022348403930664 - sq_loss: 3.3074861676141154e-06 - tot_loss: 3.060993110866093 - acc: 0.8102 - val_acc: 0.8227\n",
      "Epoch 129 / 300 \n",
      " - time (s): 2.548953056335449 - sq_loss: 7.690554411965422e-06 - tot_loss: 2.9726986156729254 - acc: 0.80305 - val_acc: 0.8163\n",
      "Epoch 130 / 300 \n",
      " - time (s): 2.5037055015563965 - sq_loss: 1.6400077811340452e-06 - tot_loss: 2.7543633365745563 - acc: 0.7864166666666667 - val_acc: 0.7966\n",
      "Epoch 131 / 300 \n",
      " - time (s): 2.557553768157959 - sq_loss: 1.1826798981928732e-05 - tot_loss: 3.231862379608174 - acc: 0.7804166666666666 - val_acc: 0.7917\n",
      "Epoch 132 / 300 \n",
      " - time (s): 2.591160297393799 - sq_loss: 6.5233562054345384e-06 - tot_loss: 3.330449787312318 - acc: 0.8086166666666667 - val_acc: 0.8189\n",
      "Epoch 133 / 300 \n",
      " - time (s): 2.618999719619751 - sq_loss: 1.4115123121882789e-05 - tot_loss: 2.782039883748439 - acc: 0.80785 - val_acc: 0.819\n",
      "Epoch 134 / 300 \n",
      " - time (s): 2.501441478729248 - sq_loss: 2.389424935245188e-06 - tot_loss: 3.2082729707731232 - acc: 0.82145 - val_acc: 0.8309\n",
      "Epoch 135 / 300 \n",
      " - time (s): 2.461033582687378 - sq_loss: 1.3191915058996528e-05 - tot_loss: 2.704604191272665 - acc: 0.8485166666666667 - val_acc: 0.8568\n",
      "Epoch 136 / 300 \n",
      " - time (s): 2.5158181190490723 - sq_loss: 7.5917182584817056e-06 - tot_loss: 3.2464312827546564 - acc: 0.8164666666666667 - val_acc: 0.8235\n",
      "Epoch 137 / 300 \n",
      " - time (s): 2.5381929874420166 - sq_loss: 7.3256414907518774e-06 - tot_loss: 2.7316160950995254 - acc: 0.8419166666666666 - val_acc: 0.851\n",
      "Epoch 138 / 300 \n",
      " - time (s): 2.467820882797241 - sq_loss: 3.6472517876973143e-06 - tot_loss: 3.0568454180281606 - acc: 0.83805 - val_acc: 0.8474\n",
      "Epoch 139 / 300 \n",
      " - time (s): 2.4780266284942627 - sq_loss: 3.839214514300693e-06 - tot_loss: 3.980285488251866 - acc: 0.8457 - val_acc: 0.8543\n",
      "Epoch 140 / 300 \n",
      " - time (s): 2.4812278747558594 - sq_loss: 1.444865688426944e-06 - tot_loss: 3.262079090942848 - acc: 0.7838166666666667 - val_acc: 0.7952\n",
      "Epoch 141 / 300 \n",
      " - time (s): 2.4890058040618896 - sq_loss: 3.0235009944590274e-06 - tot_loss: 3.1987395312139597 - acc: 0.8450166666666666 - val_acc: 0.8548\n",
      "Epoch 142 / 300 \n",
      " - time (s): 2.429107904434204 - sq_loss: 2.2956992324907333e-06 - tot_loss: 4.1022595688955334 - acc: 0.8144 - val_acc: 0.8231\n",
      "Epoch 143 / 300 \n",
      " - time (s): 2.4735352993011475 - sq_loss: 2.081622142213746e-06 - tot_loss: 3.0369960387904484 - acc: 0.8174166666666667 - val_acc: 0.8248\n",
      "Epoch 144 / 300 \n",
      " - time (s): 2.4676592350006104 - sq_loss: 1.3861115348845487e-06 - tot_loss: 3.414041699563086 - acc: 0.8106166666666667 - val_acc: 0.8222\n",
      "Epoch 145 / 300 \n",
      " - time (s): 2.5563716888427734 - sq_loss: 9.83149220701307e-06 - tot_loss: 3.3829405818105442 - acc: 0.8451833333333333 - val_acc: 0.8562\n",
      "Epoch 146 / 300 \n",
      " - time (s): 2.4945900440216064 - sq_loss: 1.4078784715820802e-06 - tot_loss: 3.232069422136874 - acc: 0.8437 - val_acc: 0.8527\n",
      "Epoch 147 / 300 \n",
      " - time (s): 2.462435007095337 - sq_loss: 1.5527468804066302e-06 - tot_loss: 3.5318415447247844 - acc: 0.8230166666666666 - val_acc: 0.8309\n",
      "Epoch 148 / 300 \n",
      " - time (s): 2.487290620803833 - sq_loss: 5.9393282754172105e-06 - tot_loss: 2.8580258524903 - acc: 0.8394333333333334 - val_acc: 0.8493\n",
      "Epoch 149 / 300 \n",
      " - time (s): 2.533536672592163 - sq_loss: 1.177738909063919e-06 - tot_loss: 2.926095211832603 - acc: 0.8481666666666666 - val_acc: 0.8567\n",
      "Epoch 150 / 300 \n",
      " - time (s): 2.439980983734131 - sq_loss: 2.233644181615091e-06 - tot_loss: 2.6621099351484645 - acc: 0.8422666666666667 - val_acc: 0.8532\n",
      "Epoch 151 / 300 \n",
      " - time (s): 2.5089173316955566 - sq_loss: 1.0497500397832482e-06 - tot_loss: 2.815168642166441 - acc: 0.7913833333333333 - val_acc: 0.798\n",
      "Epoch 152 / 300 \n",
      " - time (s): 2.508117198944092 - sq_loss: 1.9138105926685967e-06 - tot_loss: 3.547383433048708 - acc: 0.82855 - val_acc: 0.8363\n",
      "Epoch 153 / 300 \n",
      " - time (s): 2.500354766845703 - sq_loss: 3.5466755434754305e-06 - tot_loss: 3.1231927775615986 - acc: 0.8189666666666666 - val_acc: 0.827\n",
      "Epoch 154 / 300 \n",
      " - time (s): 2.3855504989624023 - sq_loss: 1.4743232895852998e-05 - tot_loss: 3.4735460599949874 - acc: 0.8241333333333334 - val_acc: 0.8347\n",
      "Epoch 155 / 300 \n",
      " - time (s): 2.497648239135742 - sq_loss: 1.030345515573572e-06 - tot_loss: 3.582785461509843 - acc: 0.8181166666666667 - val_acc: 0.8255\n",
      "Epoch 156 / 300 \n",
      " - time (s): 2.439666509628296 - sq_loss: 4.6253198888734914e-06 - tot_loss: 3.016199432416215 - acc: 0.8297333333333333 - val_acc: 0.8452\n",
      "Epoch 157 / 300 \n",
      " - time (s): 2.469794511795044 - sq_loss: 2.829861386999255e-06 - tot_loss: 3.0160261854412056 - acc: 0.8398833333333333 - val_acc: 0.8477\n",
      "Epoch 158 / 300 \n",
      " - time (s): 2.419973611831665 - sq_loss: 1.4548120361723704e-06 - tot_loss: 3.3565828342241275 - acc: 0.7846333333333333 - val_acc: 0.7949\n",
      "Epoch 159 / 300 \n",
      " - time (s): 2.4479784965515137 - sq_loss: 9.78012121777283e-06 - tot_loss: 3.453221211399068 - acc: 0.8347 - val_acc: 0.8442\n",
      "Epoch 160 / 300 \n",
      " - time (s): 2.422508478164673 - sq_loss: 1.517545683782373e-06 - tot_loss: 3.325734080615007 - acc: 0.8386333333333333 - val_acc: 0.8477\n",
      "Epoch 161 / 300 \n",
      " - time (s): 2.5051374435424805 - sq_loss: 1.195857976199477e-06 - tot_loss: 2.836160445159976 - acc: 0.8185166666666667 - val_acc: 0.8285\n",
      "Epoch 162 / 300 \n",
      " - time (s): 2.5360107421875 - sq_loss: 5.194577170186676e-06 - tot_loss: 3.222462974879818 - acc: 0.8455833333333334 - val_acc: 0.8548\n",
      "Epoch 163 / 300 \n",
      " - time (s): 2.4253411293029785 - sq_loss: 2.0482961190282367e-05 - tot_loss: 3.104486777088823 - acc: 0.8357 - val_acc: 0.8453\n",
      "Epoch 164 / 300 \n",
      " - time (s): 2.445221424102783 - sq_loss: 1.405209786753403e-05 - tot_loss: 3.10715010419608 - acc: 0.8232666666666667 - val_acc: 0.8346\n",
      "Epoch 165 / 300 \n",
      " - time (s): 2.422313928604126 - sq_loss: 1.7804327399062458e-06 - tot_loss: 3.761000173834418 - acc: 0.8062 - val_acc: 0.8154\n",
      "Epoch 166 / 300 \n",
      " - time (s): 2.447086811065674 - sq_loss: 1.8114847080141772e-06 - tot_loss: 3.5513283851228152 - acc: 0.8219166666666666 - val_acc: 0.833\n",
      "Epoch 167 / 300 \n",
      " - time (s): 2.4432523250579834 - sq_loss: 3.839080818579532e-06 - tot_loss: 3.3984745928619304 - acc: 0.8385 - val_acc: 0.8484\n",
      "Epoch 168 / 300 \n",
      " - time (s): 2.4551191329956055 - sq_loss: 2.8546953672048403e-06 - tot_loss: 3.0695684415611595 - acc: 0.8414166666666667 - val_acc: 0.8513\n",
      "Epoch 169 / 300 \n",
      " - time (s): 2.4500632286071777 - sq_loss: 1.737862589834549e-06 - tot_loss: 2.827668209760418 - acc: 0.83675 - val_acc: 0.8463\n",
      "Epoch 170 / 300 \n",
      " - time (s): 2.4330663681030273 - sq_loss: 4.070209797646385e-06 - tot_loss: 2.8418697021870685 - acc: 0.8472833333333334 - val_acc: 0.8589\n",
      "Epoch 171 / 300 \n",
      " - time (s): 2.516554594039917 - sq_loss: 1.6245965070993407e-06 - tot_loss: 2.6331475074864557 - acc: 0.8239666666666666 - val_acc: 0.8317\n",
      "Epoch 172 / 300 \n",
      " - time (s): 2.517228126525879 - sq_loss: 1.1061746363338898e-06 - tot_loss: 3.369530713898598 - acc: 0.8028166666666666 - val_acc: 0.812\n",
      "Epoch 173 / 300 \n",
      " - time (s): 2.3996856212615967 - sq_loss: 2.0573611436702777e-06 - tot_loss: 3.0181193363573584 - acc: 0.8483833333333334 - val_acc: 0.8615\n",
      "Epoch 174 / 300 \n",
      " - time (s): 2.541806697845459 - sq_loss: 1.15629518404603e-06 - tot_loss: 3.118822672928218 - acc: 0.84675 - val_acc: 0.8564\n",
      "Epoch 175 / 300 \n",
      " - time (s): 2.462186098098755 - sq_loss: 3.052113243029453e-06 - tot_loss: 2.8440799750915176 - acc: 0.8416833333333333 - val_acc: 0.85\n",
      "Epoch 176 / 300 \n",
      " - time (s): 2.4776647090911865 - sq_loss: 2.1848652522749035e-06 - tot_loss: 2.872023212414888 - acc: 0.839 - val_acc: 0.8449\n",
      "Epoch 177 / 300 \n",
      " - time (s): 2.4833180904388428 - sq_loss: 5.4160536819836125e-06 - tot_loss: 2.7173575579963654 - acc: 0.8205333333333333 - val_acc: 0.832\n",
      "Epoch 178 / 300 \n",
      " - time (s): 2.4402568340301514 - sq_loss: 1.7716052980176755e-06 - tot_loss: 2.9007341863585907 - acc: 0.8519666666666666 - val_acc: 0.8621\n",
      "Epoch 179 / 300 \n",
      " - time (s): 2.5251691341400146 - sq_loss: 1.6811268324090634e-06 - tot_loss: 2.9564267472910615 - acc: 0.8269833333333333 - val_acc: 0.8346\n",
      "Epoch 180 / 300 \n",
      " - time (s): 2.4827158451080322 - sq_loss: 3.2633181490382412e-06 - tot_loss: 2.7596655745221597 - acc: 0.8446833333333333 - val_acc: 0.8554\n",
      "Epoch 181 / 300 \n",
      " - time (s): 2.4885082244873047 - sq_loss: 1.2912067859360832e-06 - tot_loss: 2.9165547256742457 - acc: 0.8303333333333334 - val_acc: 0.8386\n",
      "Epoch 182 / 300 \n",
      " - time (s): 2.4863405227661133 - sq_loss: 9.382161806570366e-06 - tot_loss: 2.8907698280982004 - acc: 0.85175 - val_acc: 0.866\n",
      "Epoch 183 / 300 \n",
      " - time (s): 2.520087242126465 - sq_loss: 7.815847311576363e-06 - tot_loss: 2.7868219631045577 - acc: 0.84625 - val_acc: 0.8571\n",
      "Epoch 184 / 300 \n",
      " - time (s): 2.3998119831085205 - sq_loss: 3.9758251659804955e-06 - tot_loss: 2.645420273658601 - acc: 0.8368 - val_acc: 0.8489\n",
      "Epoch 185 / 300 \n",
      " - time (s): 2.504829168319702 - sq_loss: 1.135349248215789e-05 - tot_loss: 2.8740796330066587 - acc: 0.80975 - val_acc: 0.8179\n",
      "Epoch 186 / 300 \n",
      " - time (s): 2.4811770915985107 - sq_loss: 1.1697703712343355e-06 - tot_loss: 2.92277499441127 - acc: 0.8585333333333334 - val_acc: 0.8702\n",
      "Epoch 187 / 300 \n",
      " - time (s): 2.4371414184570312 - sq_loss: 1.6810841998449177e-06 - tot_loss: 3.148621663971994 - acc: 0.81835 - val_acc: 0.8291\n",
      "Epoch 188 / 300 \n",
      " - time (s): 2.5076260566711426 - sq_loss: 2.793906332954066e-06 - tot_loss: 2.8239984534770883 - acc: 0.8447 - val_acc: 0.8513\n",
      "Epoch 189 / 300 \n",
      " - time (s): 2.476515769958496 - sq_loss: 2.6179125143244164e-06 - tot_loss: 2.7152409151542543 - acc: 0.84955 - val_acc: 0.8609\n",
      "Epoch 190 / 300 \n",
      " - time (s): 2.405531167984009 - sq_loss: 2.1710961846110877e-06 - tot_loss: 2.653751754166933 - acc: 0.8396833333333333 - val_acc: 0.8539\n",
      "Epoch 191 / 300 \n",
      " - time (s): 2.439551591873169 - sq_loss: 1.2148289897595532e-05 - tot_loss: 2.6834885810694686 - acc: 0.8404 - val_acc: 0.8516\n",
      "Epoch 192 / 300 \n",
      " - time (s): 2.463599443435669 - sq_loss: 7.185596132330829e-06 - tot_loss: 2.84468068900469 - acc: 0.8145833333333333 - val_acc: 0.8226\n",
      "Epoch 193 / 300 \n",
      " - time (s): 2.468998670578003 - sq_loss: 1.4580652987206122e-06 - tot_loss: 2.854881373863236 - acc: 0.82525 - val_acc: 0.8362\n",
      "Epoch 194 / 300 \n",
      " - time (s): 2.430445909500122 - sq_loss: 3.668764293252025e-06 - tot_loss: 2.5499932679022095 - acc: 0.8566666666666667 - val_acc: 0.8704\n",
      "Epoch 195 / 300 \n",
      " - time (s): 2.439234972000122 - sq_loss: 2.8118520276620984e-06 - tot_loss: 2.8835655462607974 - acc: 0.8464166666666667 - val_acc: 0.8561\n",
      "Epoch 196 / 300 \n",
      " - time (s): 2.508056402206421 - sq_loss: 8.706598237040453e-06 - tot_loss: 2.7561192186567496 - acc: 0.8417833333333333 - val_acc: 0.8554\n",
      "Epoch 197 / 300 \n",
      " - time (s): 2.457829475402832 - sq_loss: 1.559750671731308e-06 - tot_loss: 2.786351778795506 - acc: 0.8359333333333333 - val_acc: 0.8473\n",
      "Epoch 198 / 300 \n",
      " - time (s): 2.4976117610931396 - sq_loss: 1.5488377584915725e-06 - tot_loss: 2.8498159928795985 - acc: 0.8380833333333333 - val_acc: 0.8511\n",
      "Epoch 199 / 300 \n",
      " - time (s): 2.4248244762420654 - sq_loss: 5.133465037943097e-06 - tot_loss: 2.8367411547610573 - acc: 0.8525833333333334 - val_acc: 0.8631\n",
      "Epoch 200 / 300 \n",
      " - time (s): 2.5184385776519775 - sq_loss: 1.9462172531348187e-06 - tot_loss: 2.869037305609254 - acc: 0.8542333333333333 - val_acc: 0.8638\n",
      "Epoch 201 / 300 \n",
      " - time (s): 2.513230562210083 - sq_loss: 1.469964786338096e-06 - tot_loss: 2.9281255443414693 - acc: 0.84245 - val_acc: 0.8538\n",
      "Epoch 202 / 300 \n",
      " - time (s): 2.55281138420105 - sq_loss: 1.2424969781932305e-06 - tot_loss: 2.9934983535603124 - acc: 0.8354333333333334 - val_acc: 0.8442\n",
      "Epoch 203 / 300 \n",
      " - time (s): 2.53326678276062 - sq_loss: 2.2693188839184586e-06 - tot_loss: 3.2314256518188813 - acc: 0.7497166666666667 - val_acc: 0.7556\n",
      "Epoch 204 / 300 \n",
      " - time (s): 2.4967620372772217 - sq_loss: 1.1009985882992623e-06 - tot_loss: 3.2973974645781254 - acc: 0.84475 - val_acc: 0.8536\n",
      "Epoch 205 / 300 \n",
      " - time (s): 2.5081071853637695 - sq_loss: 1.2018537063340773e-06 - tot_loss: 3.270438522522568 - acc: 0.8530833333333333 - val_acc: 0.863\n",
      "Epoch 206 / 300 \n",
      " - time (s): 2.5059525966644287 - sq_loss: 2.0871555079793325e-06 - tot_loss: 2.891873649368563 - acc: 0.8571166666666666 - val_acc: 0.8641\n",
      "Epoch 207 / 300 \n",
      " - time (s): 2.5544304847717285 - sq_loss: 4.760589945362881e-06 - tot_loss: 2.8727329738212575 - acc: 0.8502 - val_acc: 0.8584\n",
      "Epoch 208 / 300 \n",
      " - time (s): 2.4289379119873047 - sq_loss: 1.0904168448178098e-05 - tot_loss: 2.8730434395592965 - acc: 0.8495333333333334 - val_acc: 0.8612\n",
      "Epoch 209 / 300 \n",
      " - time (s): 2.511141538619995 - sq_loss: 9.796142421691911e-07 - tot_loss: 2.5577360662884985 - acc: 0.8556166666666667 - val_acc: 0.8665\n",
      "Epoch 210 / 300 \n",
      " - time (s): 2.4544241428375244 - sq_loss: 1.74165279531735e-06 - tot_loss: 2.9040201772527325 - acc: 0.8519 - val_acc: 0.8624\n",
      "Epoch 211 / 300 \n",
      " - time (s): 2.496307134628296 - sq_loss: 2.241387846879661e-06 - tot_loss: 2.8832468385226093 - acc: 0.86145 - val_acc: 0.8733\n",
      "Epoch 212 / 300 \n",
      " - time (s): 2.5281851291656494 - sq_loss: 1.175665147457039e-06 - tot_loss: 3.0478834198743243 - acc: 0.8438833333333333 - val_acc: 0.8508\n",
      "Epoch 213 / 300 \n",
      " - time (s): 2.5204274654388428 - sq_loss: 2.1596742953988723e-06 - tot_loss: 2.8333834636505344 - acc: 0.8592333333333333 - val_acc: 0.8718\n",
      "Epoch 214 / 300 \n",
      " - time (s): 2.4600205421447754 - sq_loss: 1.8444328588884673e-06 - tot_loss: 3.2267453647831417 - acc: 0.8406333333333333 - val_acc: 0.8499\n",
      "Epoch 215 / 300 \n",
      " - time (s): 2.5127906799316406 - sq_loss: 2.072428287647199e-06 - tot_loss: 3.0868706002247563 - acc: 0.8480833333333333 - val_acc: 0.8583\n",
      "Epoch 216 / 300 \n",
      " - time (s): 2.594301462173462 - sq_loss: 3.6318740512797376e-06 - tot_loss: 3.017245789589424 - acc: 0.8640166666666667 - val_acc: 0.8717\n",
      "Epoch 217 / 300 \n",
      " - time (s): 2.521214723587036 - sq_loss: 8.976563549367711e-06 - tot_loss: 3.0010480391538294 - acc: 0.8386666666666667 - val_acc: 0.8476\n",
      "Epoch 218 / 300 \n",
      " - time (s): 2.437291383743286 - sq_loss: 1.893950866360683e-05 - tot_loss: 2.548079858681376 - acc: 0.8506 - val_acc: 0.8628\n",
      "Epoch 219 / 300 \n",
      " - time (s): 2.4665064811706543 - sq_loss: 5.5238328968698625e-06 - tot_loss: 3.169683563085073 - acc: 0.8426833333333333 - val_acc: 0.8532\n",
      "Epoch 220 / 300 \n",
      " - time (s): 2.53236722946167 - sq_loss: 1.557545033392671e-06 - tot_loss: 2.955667462770066 - acc: 0.8511833333333333 - val_acc: 0.8615\n",
      "Epoch 221 / 300 \n",
      " - time (s): 2.4769251346588135 - sq_loss: 4.054993951285724e-06 - tot_loss: 2.8596751271870744 - acc: 0.8464166666666667 - val_acc: 0.8541\n",
      "Epoch 222 / 300 \n",
      " - time (s): 2.48337459564209 - sq_loss: 1.283123765460914e-06 - tot_loss: 3.0506003245513966 - acc: 0.8409 - val_acc: 0.8501\n",
      "Epoch 223 / 300 \n",
      " - time (s): 2.461284637451172 - sq_loss: 1.1033660030079773e-06 - tot_loss: 2.865909069739928 - acc: 0.8502833333333333 - val_acc: 0.8616\n",
      "Epoch 224 / 300 \n",
      " - time (s): 2.4825313091278076 - sq_loss: 1.0407891295471927e-06 - tot_loss: 2.743810270192398 - acc: 0.8659 - val_acc: 0.8767\n",
      "Epoch 225 / 300 \n",
      " - time (s): 2.5768582820892334 - sq_loss: 1.726200025586877e-06 - tot_loss: 3.1212562237715247 - acc: 0.8386666666666667 - val_acc: 0.8479\n",
      "Epoch 226 / 300 \n",
      " - time (s): 2.5212504863739014 - sq_loss: 7.2039833867165726e-06 - tot_loss: 3.0584641269811073 - acc: 0.8655833333333334 - val_acc: 0.8792\n",
      "Epoch 227 / 300 \n",
      " - time (s): 2.4883320331573486 - sq_loss: 4.308738425606862e-06 - tot_loss: 3.004433384488948 - acc: 0.8466333333333333 - val_acc: 0.8574\n",
      "Epoch 228 / 300 \n",
      " - time (s): 2.718275785446167 - sq_loss: 5.32217518411926e-06 - tot_loss: 3.03005272612927 - acc: 0.8422666666666667 - val_acc: 0.8506\n",
      "Epoch 229 / 300 \n",
      " - time (s): 2.5063631534576416 - sq_loss: 2.3481397875002585e-06 - tot_loss: 2.8631633092945776 - acc: 0.8516333333333334 - val_acc: 0.8651\n",
      "Epoch 230 / 300 \n",
      " - time (s): 2.556518077850342 - sq_loss: 1.4042838074601605e-06 - tot_loss: 2.734862779000082 - acc: 0.82015 - val_acc: 0.8272\n",
      "Epoch 231 / 300 \n",
      " - time (s): 2.5187759399414062 - sq_loss: 5.116244665259728e-06 - tot_loss: 2.93988760653383 - acc: 0.8388166666666667 - val_acc: 0.8468\n",
      "Epoch 232 / 300 \n",
      " - time (s): 2.5091402530670166 - sq_loss: 4.823098606721032e-06 - tot_loss: 3.4424333947654304 - acc: 0.83305 - val_acc: 0.8408\n",
      "Epoch 233 / 300 \n",
      " - time (s): 2.5346834659576416 - sq_loss: 1.1708018519129837e-06 - tot_loss: 3.3159837203986626 - acc: 0.8241666666666667 - val_acc: 0.8332\n",
      "Epoch 234 / 300 \n",
      " - time (s): 2.375378131866455 - sq_loss: 4.181979875284014e-06 - tot_loss: 3.0754316950255998 - acc: 0.79495 - val_acc: 0.8004\n",
      "Epoch 235 / 300 \n",
      " - time (s): 2.4344332218170166 - sq_loss: 1.35193681671808e-06 - tot_loss: 3.0409235219324273 - acc: 0.84365 - val_acc: 0.8528\n",
      "Epoch 236 / 300 \n",
      " - time (s): 2.4433088302612305 - sq_loss: 1.268792857445078e-06 - tot_loss: 2.89096877539896 - acc: 0.8669666666666667 - val_acc: 0.878\n",
      "Epoch 237 / 300 \n",
      " - time (s): 2.394953727722168 - sq_loss: 2.6463953872735146e-06 - tot_loss: 3.096070801037513 - acc: 0.8372166666666667 - val_acc: 0.8456\n",
      "Epoch 238 / 300 \n",
      " - time (s): 2.4301199913024902 - sq_loss: 1.5189528994596913e-06 - tot_loss: 3.1687201871449133 - acc: 0.8556833333333334 - val_acc: 0.8652\n",
      "Epoch 239 / 300 \n",
      " - time (s): 2.475121021270752 - sq_loss: 1.8052072846330702e-05 - tot_loss: 3.125786402641097 - acc: 0.8490166666666666 - val_acc: 0.86\n",
      "Epoch 240 / 300 \n",
      " - time (s): 2.4322509765625 - sq_loss: 2.7688995032804087e-05 - tot_loss: 3.132448493190168 - acc: 0.8605 - val_acc: 0.873\n",
      "Epoch 241 / 300 \n",
      " - time (s): 2.4336798191070557 - sq_loss: 8.791249456407968e-06 - tot_loss: 3.315731248237171 - acc: 0.8542666666666666 - val_acc: 0.8649\n",
      "Epoch 242 / 300 \n",
      " - time (s): 2.4979124069213867 - sq_loss: 5.017005150875775e-06 - tot_loss: 2.8790358039254897 - acc: 0.8682 - val_acc: 0.8768\n",
      "Epoch 243 / 300 \n",
      " - time (s): 2.438079595565796 - sq_loss: 2.271652647323208e-06 - tot_loss: 3.0103385174120376 - acc: 0.8512666666666666 - val_acc: 0.861\n",
      "Epoch 244 / 300 \n",
      " - time (s): 2.4218852519989014 - sq_loss: 2.7363594199414365e-06 - tot_loss: 2.736474397769598 - acc: 0.86445 - val_acc: 0.8736\n",
      "Epoch 245 / 300 \n",
      " - time (s): 2.5086474418640137 - sq_loss: 3.918897164112423e-06 - tot_loss: 2.6247727313466385 - acc: 0.8377333333333333 - val_acc: 0.8454\n",
      "Epoch 246 / 300 \n",
      " - time (s): 2.399156093597412 - sq_loss: 1.2574330412462587e-06 - tot_loss: 2.878381142714943 - acc: 0.8535333333333334 - val_acc: 0.8624\n",
      "Epoch 247 / 300 \n",
      " - time (s): 2.4139392375946045 - sq_loss: 3.1877523269940866e-06 - tot_loss: 3.244381392893729 - acc: 0.8619166666666667 - val_acc: 0.8719\n",
      "Epoch 248 / 300 \n",
      " - time (s): 2.4487385749816895 - sq_loss: 2.9083896151860245e-05 - tot_loss: 3.042569808620101 - acc: 0.86665 - val_acc: 0.8754\n",
      "Epoch 249 / 300 \n",
      " - time (s): 2.3685927391052246 - sq_loss: 1.4141389328869991e-05 - tot_loss: 2.5995192497339303 - acc: 0.8388166666666667 - val_acc: 0.8462\n",
      "Epoch 250 / 300 \n",
      " - time (s): 2.404097080230713 - sq_loss: 1.6991714801406488e-05 - tot_loss: 2.9925361745463306 - acc: 0.8628333333333333 - val_acc: 0.8738\n",
      "Epoch 251 / 300 \n",
      " - time (s): 2.501697540283203 - sq_loss: 3.3581347906874726e-06 - tot_loss: 2.7709586976809533 - acc: 0.84075 - val_acc: 0.8515\n",
      "Epoch 252 / 300 \n",
      " - time (s): 2.4545235633850098 - sq_loss: 1.309352455791668e-06 - tot_loss: 3.2297131538218764 - acc: 0.856 - val_acc: 0.8683\n",
      "Epoch 253 / 300 \n",
      " - time (s): 2.4875528812408447 - sq_loss: 1.3404564924712759e-06 - tot_loss: 2.8599506026616837 - acc: 0.8654666666666667 - val_acc: 0.8792\n",
      "Epoch 254 / 300 \n",
      " - time (s): 2.4520716667175293 - sq_loss: 5.387841156334616e-06 - tot_loss: 2.86156399174979 - acc: 0.8660166666666667 - val_acc: 0.8759\n",
      "Epoch 255 / 300 \n",
      " - time (s): 2.4269003868103027 - sq_loss: 2.4833025236148387e-06 - tot_loss: 3.0002332754556846 - acc: 0.8491666666666666 - val_acc: 0.8571\n",
      "Epoch 256 / 300 \n",
      " - time (s): 2.433061361312866 - sq_loss: 2.187642394346767e-06 - tot_loss: 3.0051255120035876 - acc: 0.8512 - val_acc: 0.8632\n",
      "Epoch 257 / 300 \n",
      " - time (s): 2.483613967895508 - sq_loss: 4.6266095523606054e-06 - tot_loss: 3.061755736943269 - acc: 0.8591666666666666 - val_acc: 0.8715\n",
      "Epoch 258 / 300 \n",
      " - time (s): 2.4448933601379395 - sq_loss: 2.820438339767861e-06 - tot_loss: 3.061389321210072 - acc: 0.8554666666666667 - val_acc: 0.8657\n",
      "Epoch 259 / 300 \n",
      " - time (s): 2.418632984161377 - sq_loss: 1.4773318980587646e-05 - tot_loss: 2.9537264090677127 - acc: 0.8198 - val_acc: 0.8335\n",
      "Epoch 260 / 300 \n",
      " - time (s): 2.409559965133667 - sq_loss: 1.108067408495117e-05 - tot_loss: 2.6349486128328863 - acc: 0.83855 - val_acc: 0.8497\n",
      "Epoch 261 / 300 \n",
      " - time (s): 2.4901416301727295 - sq_loss: 6.085951554268831e-06 - tot_loss: 2.719395790391445 - acc: 0.8503333333333334 - val_acc: 0.8606\n",
      "Epoch 262 / 300 \n",
      " - time (s): 2.4353978633880615 - sq_loss: 4.687547971116146e-06 - tot_loss: 2.496800248814452 - acc: 0.8532333333333333 - val_acc: 0.8645\n",
      "Epoch 263 / 300 \n",
      " - time (s): 2.5141189098358154 - sq_loss: 4.15714885093621e-06 - tot_loss: 2.762874070572707 - acc: 0.8376333333333333 - val_acc: 0.8478\n",
      "Epoch 264 / 300 \n",
      " - time (s): 2.423234701156616 - sq_loss: 1.664553019509185e-05 - tot_loss: 2.717812788405354 - acc: 0.8619333333333333 - val_acc: 0.8756\n",
      "Epoch 265 / 300 \n",
      " - time (s): 2.482832908630371 - sq_loss: 9.59468525252305e-06 - tot_loss: 3.012692760119535 - acc: 0.8503 - val_acc: 0.8608\n",
      "Epoch 266 / 300 \n",
      " - time (s): 2.477932929992676 - sq_loss: 1.4335923879116308e-06 - tot_loss: 2.895134753052389 - acc: 0.8616166666666667 - val_acc: 0.8737\n",
      "Epoch 267 / 300 \n",
      " - time (s): 2.5019924640655518 - sq_loss: 8.244457603723276e-06 - tot_loss: 2.994405564198132 - acc: 0.8592666666666666 - val_acc: 0.8726\n",
      "Epoch 268 / 300 \n",
      " - time (s): 2.4344024658203125 - sq_loss: 1.1359637028363068e-05 - tot_loss: 2.7398806136907297 - acc: 0.8491166666666666 - val_acc: 0.8593\n",
      "Epoch 269 / 300 \n",
      " - time (s): 2.4625062942504883 - sq_loss: 1.1810081559815444e-05 - tot_loss: 2.7002432555691485 - acc: 0.8637 - val_acc: 0.8753\n",
      "Epoch 270 / 300 \n",
      " - time (s): 2.4952006340026855 - sq_loss: 1.050144146574894e-05 - tot_loss: 2.8068643115957457 - acc: 0.8382166666666667 - val_acc: 0.8474\n",
      "Epoch 271 / 300 \n",
      " - time (s): 2.4036827087402344 - sq_loss: 1.5035786418593489e-05 - tot_loss: 2.617407067666136 - acc: 0.8699333333333333 - val_acc: 0.8774\n",
      "Epoch 272 / 300 \n",
      " - time (s): 2.4847049713134766 - sq_loss: 2.0075955035281368e-05 - tot_loss: 2.5313005369807797 - acc: 0.8637833333333333 - val_acc: 0.8711\n",
      "Epoch 273 / 300 \n",
      " - time (s): 2.4429023265838623 - sq_loss: 1.892108775791712e-05 - tot_loss: 2.7862907475100656 - acc: 0.8584333333333334 - val_acc: 0.8687\n",
      "Epoch 274 / 300 \n",
      " - time (s): 2.4157299995422363 - sq_loss: 2.2082822397351265e-05 - tot_loss: 2.89130594100061 - acc: 0.84985 - val_acc: 0.858\n",
      "Epoch 275 / 300 \n",
      " - time (s): 2.3441481590270996 - sq_loss: 2.575477765276446e-06 - tot_loss: 2.887477697102213 - acc: 0.8534833333333334 - val_acc: 0.8631\n",
      "Epoch 276 / 300 \n",
      " - time (s): 2.406661033630371 - sq_loss: 1.94347080650914e-06 - tot_loss: 3.2566478428532264 - acc: 0.8465666666666667 - val_acc: 0.8583\n",
      "Epoch 277 / 300 \n",
      " - time (s): 2.3858962059020996 - sq_loss: 1.6643464277876774e-06 - tot_loss: 3.027716522032506 - acc: 0.8478833333333333 - val_acc: 0.8554\n",
      "Epoch 278 / 300 \n",
      " - time (s): 2.433565855026245 - sq_loss: 5.249730293144239e-06 - tot_loss: 2.9053657056306292 - acc: 0.8623333333333333 - val_acc: 0.8738\n",
      "Epoch 279 / 300 \n",
      " - time (s): 2.4894959926605225 - sq_loss: 1.736141257424606e-06 - tot_loss: 2.6744083158587273 - acc: 0.8475666666666667 - val_acc: 0.8616\n",
      "Epoch 280 / 300 \n",
      " - time (s): 2.494828939437866 - sq_loss: 1.73574187556369e-06 - tot_loss: 2.886600266214714 - acc: 0.8690833333333333 - val_acc: 0.8776\n",
      "Epoch 281 / 300 \n",
      " - time (s): 2.484049081802368 - sq_loss: 3.297859166195849e-06 - tot_loss: 2.9702640251475714 - acc: 0.8687 - val_acc: 0.879\n",
      "Epoch 282 / 300 \n",
      " - time (s): 2.3959860801696777 - sq_loss: 1.4168293773764162e-06 - tot_loss: 2.8192049273624207 - acc: 0.8563166666666666 - val_acc: 0.8678\n",
      "Epoch 283 / 300 \n",
      " - time (s): 2.418924570083618 - sq_loss: 1.2276086636120453e-06 - tot_loss: 2.7319065975698322 - acc: 0.8671 - val_acc: 0.8754\n",
      "Epoch 284 / 300 \n",
      " - time (s): 2.6239824295043945 - sq_loss: 1.3719546814172645e-06 - tot_loss: 2.9397453952188926 - acc: 0.84935 - val_acc: 0.8603\n",
      "Epoch 285 / 300 \n",
      " - time (s): 2.5159852504730225 - sq_loss: 3.125151806671056e-06 - tot_loss: 3.134662318506116 - acc: 0.84205 - val_acc: 0.8538\n",
      "Epoch 286 / 300 \n",
      " - time (s): 2.5275990962982178 - sq_loss: 9.936263268173207e-06 - tot_loss: 3.28161427048326 - acc: 0.8574166666666667 - val_acc: 0.8715\n",
      "Epoch 287 / 300 \n",
      " - time (s): 2.479308605194092 - sq_loss: 1.675802536738047e-06 - tot_loss: 3.1160719126168033 - acc: 0.8342833333333334 - val_acc: 0.8426\n",
      "Epoch 288 / 300 \n",
      " - time (s): 2.453917980194092 - sq_loss: 1.1552502883205307e-06 - tot_loss: 2.741012693560265 - acc: 0.8646 - val_acc: 0.8769\n",
      "Epoch 289 / 300 \n",
      " - time (s): 2.4314334392547607 - sq_loss: 1.5929235814837739e-06 - tot_loss: 2.7259291678910813 - acc: 0.8603 - val_acc: 0.8734\n",
      "Epoch 290 / 300 \n",
      " - time (s): 2.420208692550659 - sq_loss: 2.7290857360640075e-06 - tot_loss: 2.9118996793308725 - acc: 0.8609833333333333 - val_acc: 0.8714\n",
      "Epoch 291 / 300 \n",
      " - time (s): 2.4563958644866943 - sq_loss: 1.2891288179162075e-06 - tot_loss: 2.8852793818915643 - acc: 0.8502 - val_acc: 0.8628\n",
      "Epoch 292 / 300 \n",
      " - time (s): 2.4183011054992676 - sq_loss: 4.6867730816302355e-06 - tot_loss: 2.81812342296098 - acc: 0.8654166666666666 - val_acc: 0.8752\n",
      "Epoch 293 / 300 \n",
      " - time (s): 2.456385374069214 - sq_loss: 1.4929137250874192e-06 - tot_loss: 3.1634664147959484 - acc: 0.86005 - val_acc: 0.8678\n",
      "Epoch 294 / 300 \n",
      " - time (s): 2.48943829536438 - sq_loss: 2.380797013756819e-06 - tot_loss: 2.7098769299936976 - acc: 0.8317666666666667 - val_acc: 0.8421\n",
      "Epoch 295 / 300 \n",
      " - time (s): 2.4246153831481934 - sq_loss: 2.6823436201084405e-06 - tot_loss: 2.886252990074354 - acc: 0.8646333333333334 - val_acc: 0.8738\n",
      "Epoch 296 / 300 \n",
      " - time (s): 2.4332528114318848 - sq_loss: 1.2290261111047585e-06 - tot_loss: 2.6172082150164897 - acc: 0.8680166666666667 - val_acc: 0.8808\n",
      "Epoch 297 / 300 \n",
      " - time (s): 2.467092275619507 - sq_loss: 2.0119850887567736e-06 - tot_loss: 3.0446136625405416 - acc: 0.8706333333333334 - val_acc: 0.8799\n",
      "Epoch 298 / 300 \n",
      " - time (s): 2.4788765907287598 - sq_loss: 1.974358383449726e-06 - tot_loss: 2.9172555745808495 - acc: 0.8684333333333333 - val_acc: 0.8745\n",
      "Epoch 299 / 300 \n",
      " - time (s): 2.4506125450134277 - sq_loss: 2.3575860268465476e-06 - tot_loss: 2.8693175178352703 - acc: 0.8578166666666667 - val_acc: 0.8691\n",
      "Epoch 300 / 300 \n",
      " - time (s): 2.438969612121582 - sq_loss: 1.4946592727937968e-06 - tot_loss: 2.683251003475789 - acc: 0.8607333333333334 - val_acc: 0.8731\n"
     ]
    }
   ],
   "source": [
    "# Iterations\n",
    "print('Train on', N, 'samples, validate on', N_test, 'samples')\n",
    "for k in range(niter):\n",
    "    start = time.time()\n",
    "    '''\n",
    "    # update V4\n",
    "    V4 = (y_one_hot + gamma*U4 + alpha*V4)/(1 + gamma + alpha)\n",
    "    \n",
    "    # update U4 \n",
    "    U4 = (gamma*V4 + rho*(torch.mm(W4,V3) + b4.repeat(1,N)))/(gamma + rho)\n",
    "    '''\n",
    "    # update V11\n",
    "    V11 = (y_one_hot + gamma*U11 + alpha*V11)/(1 + gamma + alpha)\n",
    "    \n",
    "    # update U11\n",
    "    U11 = (gamma*V11 + rho*(torch.mm(W11, V10) + b11.repeat(1,N)))/(gamma + rho)\n",
    "    \n",
    "    '''\n",
    "    for i in range(1):\n",
    "        # update W11, b11, V10 and U10; update W10, b10, V9 and U9; update W9, b9, V8 and U8\n",
    "        W11, b11, V10, U10, W10, b10, V9, U9, W9, b9, V8, U8 = \\\n",
    "        three_blocks_update(W11, b11, W10, b10, W9, b9, W8, b8, V11, U11, V10, U10, V9, U9, V8, U8, V7, d10, d9, d8)\n",
    "    \n",
    "    for i in range(2):\n",
    "        # update W8, b8, V7 and U7; update W7, b7, V6 and U6; update W6, b6, V5 and U5\n",
    "        W8, b8, V7, U7, W7, b7, V6, U6, W6, b6, V5, U5 = \\\n",
    "        three_blocks_update(W8, b8, W7, b7, W6, b6, W5, b5, V8, U8, V7, U7, V6, U6, V5, U5, V4, d7, d6, d5)\n",
    "    \n",
    "    for i in range(3):\n",
    "        # update W5, b5, V4 and U4; update W4, b4, V3 and U3; update W3, b3, V2 and U2\n",
    "        W5, b5, V4, U4, W4, b4, V3, U3, W3, b3, V2, U2 = \\\n",
    "        three_blocks_update(W5, b5, W4, b4, W3, b3, W2, b2, V5, U5, V4, U4, V3, U3, V2, U2, V1, d4, d3, d2)\n",
    "    '''    \n",
    "    \n",
    "    # update W11, b11, V10 and U10\n",
    "    W11, b11, V10, U10 = block_update(W11, b11, W10, b10, V11, U11, V10, U10, V9, d10)\n",
    "    \n",
    "    # update W10, b10, V9 and U9\n",
    "    W10, b10, V9, U9 = block_update(W10, b10, W9, b9, V10, U10, V9, U9, V8, d9)\n",
    "    \n",
    "    # update W9, b9, V8 and U8\n",
    "    W9, b9, V8, U8 = block_update(W9, b9, W8, b8, V9, U9, V8, U8, V7, d8)\n",
    "    \n",
    "    # update W8, b8, V7 and U7\n",
    "    W8, b8, V7, U7 = block_update(W8, b8, W7, b7, V8, U8, V7, U7, V6, d7)\n",
    "    \n",
    "    # update W7, b7, V6 and U6\n",
    "    W7, b7, V6, U6 = block_update(W7, b7, W6, b6, V7, U7, V6, U6, V5, d6)\n",
    "    \n",
    "    # update W6, b6, V5 and U5\n",
    "    W6, b6, V5, U5 = block_update(W6, b6, W5, b5, V6, U6, V5, U5, V4, d5)\n",
    "    \n",
    "    \n",
    "    # update W5, b5, V4 and U4\n",
    "    W5, b5, V4, U4 = block_update(W5, b5, W4, b4, V5, U5, V4, U4, V3, d4)\n",
    "    \n",
    "    # update W4, b4, V3 and U3\n",
    "    W4, b4, V3, U3 = block_update(W4, b4, W3, b3, V4, U4, V3, U3, V2, d3)\n",
    "    \n",
    "    # update W3, b3, V2 and U2\n",
    "    W3, b3, V2, U2 = block_update(W3, b3, W2, b2, V3, U3, V2, U2, V1, d2)\n",
    "    \n",
    "    '''\n",
    "    for i in range(3):\n",
    "        # update W5, b5, V4 and U4; update W4, b4, V3 and U3; update W3, b3, V2 and U2\n",
    "        W5, b5, V4, U4, W4, b4, V3, U3, W3, b3, V2, U2 = \\\n",
    "        three_blocks_update(W5, b5, W4, b4, W3, b3, W2, b2, V5, U5, V4, U4, V3, U3, V2, U2, V1, d4, d3, d2)\n",
    "    '''\n",
    "    \n",
    "    # update W2, b2, V1 and U1\n",
    "    W2, b2, V1, U1 = block_update(W2, b2, W1, b1, V2, U2, V1, U1, x_train, d1)\n",
    "        \n",
    "    # update W1 and b1\n",
    "    W1, b1 = updateWb_js(U1, x_train, W1, b1, alpha, rho)\n",
    "\n",
    "    # compute updated training activations\n",
    "    _, a1_train = feed_forward(W1, b1, x_train)\n",
    "    _, a2_train = feed_forward(W2, b2, a1_train)\n",
    "    _, a3_train = feed_forward(W3, b3, a2_train)\n",
    "    _, a4_train = feed_forward(W4, b4, a3_train)\n",
    "    _, a5_train = feed_forward(W5, b5, a4_train)\n",
    "    _, a6_train = feed_forward(W6, b6, a5_train)\n",
    "    _, a7_train = feed_forward(W7, b7, a6_train)\n",
    "    _, a8_train = feed_forward(W8, b8, a7_train)\n",
    "    _, a9_train = feed_forward(W9, b9, a8_train)\n",
    "    _, a10_train = feed_forward(W10, b10, a9_train)\n",
    "    \n",
    "    \n",
    "    # training prediction\n",
    "    pred = torch.argmax(torch.addmm(b11.repeat(1, N), W11, a10_train), dim=0)\n",
    "    # pred = torch.argmax(torch.addmm(b4.repeat(1, N), W4, a3_train), dim=0)\n",
    "    \n",
    "    # compute test activations\n",
    "    _, a1_test = feed_forward(W1, b1, x_test, N_test)\n",
    "    _, a2_test = feed_forward(W2, b2, a1_test, N_test)\n",
    "    _, a3_test = feed_forward(W3, b3, a2_test, N_test)\n",
    "    _, a4_test = feed_forward(W4, b4, a3_test, N_test)\n",
    "    _, a5_test = feed_forward(W5, b5, a4_test, N_test)\n",
    "    _, a6_test = feed_forward(W6, b6, a5_test, N_test)\n",
    "    _, a7_test = feed_forward(W7, b7, a6_test, N_test)\n",
    "    _, a8_test = feed_forward(W8, b8, a7_test, N_test)\n",
    "    _, a9_test = feed_forward(W9, b9, a8_test, N_test)\n",
    "    _, a10_test = feed_forward(W10, b10, a9_test, N_test)\n",
    "    \n",
    "    # test/validation prediction\n",
    "    pred_test = torch.argmax(torch.addmm(b11.repeat(1, N_test), W11, a10_test), dim=0)\n",
    "    # pred_test = torch.argmax(torch.addmm(b4.repeat(1, N_test), W4, a3_test), dim=0)\n",
    "    \n",
    "    # compute training loss\n",
    "    loss1[k] = gamma/2*torch.pow(torch.dist(V11,y_one_hot,2),2).cpu().numpy()\n",
    "    # loss1[k] = gamma/2*torch.pow(torch.dist(V4,y_one_hot,2),2).cpu().numpy()\n",
    "    loss2[k] = loss1[k] \\\n",
    "    + compute_loss(W1, b1, x_train, U1) \\\n",
    "    + compute_loss(W2, b2, V1, U2) \\\n",
    "    + compute_loss(W3, b3, V2, U3) \\\n",
    "    + compute_loss(W4, b4, V3, U4) \\\n",
    "    + compute_loss(W5, b5, V4, U5) \\\n",
    "    + compute_loss(W6, b6, V5, U6) \\\n",
    "    + compute_loss(W7, b7, V6, U7) \\\n",
    "    + compute_loss(W8, b8, V7, U8) \\\n",
    "    + compute_loss(W9, b9, V8, U9) \\\n",
    "    + compute_loss(W10, b10, V9, U10) \\\n",
    "    + compute_loss(W11, b11, V10, U11) \n",
    "    \n",
    "    # compute training accuracy\n",
    "    correct_train = pred == y_train\n",
    "    accuracy_train[k] = np.mean(correct_train.cpu().numpy())\n",
    "    \n",
    "    # compute validation accuracy\n",
    "    correct_test = pred_test == y_test\n",
    "    accuracy_test[k] = np.mean(correct_test.cpu().numpy())\n",
    "    \n",
    "    # compute training time\n",
    "    stop = time.time()\n",
    "    duration = stop - start\n",
    "    time1[k] = duration\n",
    "    \n",
    "    # print results\n",
    "    print('Epoch', k + 1, '/', niter, '\\n', \n",
    "          '-', 'time (s):', time1[k], '-', 'sq_loss:', loss1[k], '-', 'tot_loss:', loss2[k], \n",
    "          '-', 'acc:', accuracy_train[k], '-', 'val_acc:', accuracy_test[k])"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### Visualization of training results"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "Text(0.5,1,'validation accuracy')"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYcAAAEICAYAAAC0+DhzAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAG2RJREFUeJzt3X+U1fWd3/Hn696ZYUAQVCZEAReMxBSz2UgpcevW3dWNoE0Xd+vmYNuVpnQ57Zo22+420aatbhKzMW1i46kxpZEGjUd02eRIWlzDqqlnz67o+CP+AH9M/AWIMAKCiPwYePeP72fwOvfXzL0z3Bm+r8c5c+Z7P9/P9973lzvMaz6f7+feq4jAzMysVKHVBZiZ2ejjcDAzszIOBzMzK+NwMDOzMg4HMzMr43AwM7MyDgczQNL3JP3n4e47xBpmSQpJbcN932ZDJb/OwcY6Sa8C/zIi/qrVtTRD0izgFaA9IvpaW43lnUcOdsLzX+JmQ+dwsDFN0h3AmcBPJO2T9MWS6Zllkl4HHkx9/1zSm5L2SHpY0rkl9/MDSV9L278haYukP5a0Q9I2SZ9rsO9pkn4iaa+kxyR9TdJfD/LczpC0VtIuST2S/qBk3wJJ3el+t0v6dmrvlPRDSTslvZ0ec1pT/8iWSw4HG9Mi4veB14F/FBETI+KbJbt/Hfg7wMJ0+z5gDvAh4Angzhp3/WFgMjAdWAbcIumUBvreAryb+ixNX4O1GtgCnAFcAXxd0kVp33eA70TEycBHgHtS+9JUy0zgNOBfAe8N4THNAIeDndiuj4h3I+I9gIhYGRHvRMRB4HrgVyRNrnLsYeArEXE4ItYB+4BzhtJXUhH4x8B1EbE/IjYCqwZTuKSZwAXAlyLiQEQ8BXwfuKrkMc+WNDUi9kXEIyXtpwFnR8SRiHg8IvYO5jHNSjkc7ES2uX9DUlHSNyT9QtJe4NW0a2qVY3cOuCi8H5g4xL5dQFtpHQO2azkD2BUR75S0vUY2OoFshPJR4Pk0dfSZ1H4HcD+wWtIbkr4pqX2Qj2l2jMPBTgTVltyVtv8TYDHwW2TTLrNSu0auLHqBPmBGSdvMQR77BnCqpEklbWcCWwEi4qWIuJJsiuxGYI2kk9Lo5U8jYi7w94HP8P5ow2zQHA52ItgOnFWnzyTgILATmAB8faSLiogjwI+A6yVNkPQxBvmLOiI2A38D/Fm6yPwJstHCDwEk/TNJXRFxFHg7HXZU0m9K+uU0pbWXbJrp6PCemeWBw8FOBH8G/Ke0OudPqvS5nWxaZiuwEXikSr/h9nmykcqbZFM+d5GF1GBcSTbCeQP4Mdm1i/7XciwCnpO0j+zi9JJ0beXDwBqyYNgE/L/0uGZD4hfBmR1Hkm4EPhwRQ1m1ZHbceeRgNoIkfUzSJ5RZQDY19ONW12VWj185ajayJpFNJZ1Bdm3kW8C9La3IbBA8rWRmZmU8rWRmZmXG7LTS1KlTY9asWa0uw8xsTHn88cffioiuev3GbDjMmjWL7u7uVpdhZjamSHptMP08rWRmZmUcDmZmVsbhYGZmZRwOZmZWxuFgZmZlHA5mZlbG4WBmZmVyFw6r/uZVfvLzN1pdhpnZqJa7cPjhI6+x7pltrS7DzGxUy104FAviyFG/2aCZWS25C4eChLPBzKy2/IVDAY76bcrNzGrKXTgU5WklM7N6chcOhYI8cjAzqyN34eCRg5lZfbkLh4JXK5mZ1ZW7cChKeFbJzKy2uuEgaaWkHZKeLWn7r5Kel/S0pB9LmlKy71pJPZJekLSwpH1RauuRdE1J+2xJG1L73ZI6hvMEByoU4IjTwcyspsGMHH4ALBrQth74eER8AngRuBZA0lxgCXBuOua7koqSisAtwKXAXODK1BfgRuCmiDgb2A0sa+qM6ij4moOZWV11wyEiHgZ2DWj7aUT0pZuPADPS9mJgdUQcjIhXgB5gQfrqiYiXI+IQsBpYLEnARcCadPwq4PImz6mmolcrmZnVNRzXHP4FcF/ang5sLtm3JbVVaz8NeLskaPrbK5K0XFK3pO7e3t6GivVqJTOz+poKB0lfBvqAO4ennNoiYkVEzI+I+V1dXQ3dh1crmZnV19bogZL+OfAZ4OKIY/M0W4GZJd1mpDaqtO8EpkhqS6OH0v4joihPK5mZ1dPQyEHSIuCLwG9HxP6SXWuBJZLGSZoNzAEeBR4D5qSVSR1kF63XplB5CLgiHb8UuLexUxmc7L2VRvIRzMzGvsEsZb0L+FvgHElbJC0D/gcwCVgv6SlJ3wOIiOeAe4CNwF8CV0fEkTQq+DxwP7AJuCf1BfgS8O8l9ZBdg7htWM9wgILEUaeDmVlNdaeVIuLKCs1Vf4FHxA3ADRXa1wHrKrS/TLaa6bgoFuTXOZiZ1ZHLV0j7grSZWW25C4dCwdNKZmb15C4civK0kplZPbkLB69WMjOrL3/h4NVKZmZ15S4cvFrJzKy+3IWD35XVzKy+3IVD0auVzMzqymU4eFrJzKy23IWD5NVKZmb15C4cil6tZGZWV/7CwdNKZmZ15S4cChIREA4IM7OqchcOxYIAvJzVzKyG/IaDRw5mZlXlLhyUZQPOBjOz6nIXDkV5WsnMrJ78hYOnlczM6spdOBTSyMGvdTAzqy534eDVSmZm9eUuHAqeVjIzqyt/4eDVSmZmddUNB0krJe2Q9GxJ26mS1kt6KX0/JbVL0s2SeiQ9LWleyTFLU/+XJC0taf+7kp5Jx9ws9S82HRlerWRmVt9gRg4/ABYNaLsGeCAi5gAPpNsAlwJz0tdy4FbIwgS4DvgUsAC4rj9QUp8/KDlu4GMNq4KvOZiZ1VU3HCLiYWDXgObFwKq0vQq4vKT99sg8AkyRdDqwEFgfEbsiYjewHliU9p0cEY9E9mZHt5fc14joHzkc9bySmVlVjV5zmBYR29L2m8C0tD0d2FzSb0tqq9W+pUJ7RZKWS+qW1N3b29tQ4V6tZGZWX9MXpNNf/MflN21ErIiI+RExv6urq6H76J9W8sjBzKy6RsNhe5oSIn3fkdq3AjNL+s1IbbXaZ1RoHzHvTyuN5KOYmY1tjYbDWqB/xdFS4N6S9qvSqqXzgT1p+ul+4BJJp6QL0ZcA96d9eyWdn1YpXVVyXyOifymrp5XMzKprq9dB0l3AbwBTJW0hW3X0DeAeScuA14DPpu7rgMuAHmA/8DmAiNgl6avAY6nfVyKi/yL3H5KtiBoP3Je+RoxXK5mZ1Vc3HCLiyiq7Lq7QN4Crq9zPSmBlhfZu4OP16hguXq1kZlZf7l4h7dVKZmb15S4cvFrJzKy+3IWDVyuZmdWXu3DwaiUzs/ryFw4Ff9iPmVk9uQsHf0yomVl9uQuHgt+y28ysrtyFQ9GrlczM6spfOPSvVjra4kLMzEax3IVD/+fM+ZqDmVl1uQuHolcrmZnVldtw8MjBzKy63IWDVyuZmdWXu3DwaiUzs/ryFw5erWRmVlfuwsGrlczM6stdOHi1kplZfbkNB48czMyqy104FOSRg5lZPbkLB39MqJlZffkLB38SnJlZXbkLB6Uz9usczMyqayocJP07Sc9JelbSXZI6Jc2WtEFSj6S7JXWkvuPS7Z60f1bJ/Vyb2l+QtLC5U6qt6FdIm5nV1XA4SJoO/FtgfkR8HCgCS4AbgZsi4mxgN7AsHbIM2J3ab0r9kDQ3HXcusAj4rqRio3XV49VKZmb1NTut1AaMl9QGTAC2ARcBa9L+VcDlaXtxuk3af7EkpfbVEXEwIl4BeoAFTdZVlVcrmZnV13A4RMRW4L8Br5OFwh7gceDtiOhL3bYA09P2dGBzOrYv9T+ttL3CMR8gabmkbkndvb29DdX9/mqlhg43M8uFZqaVTiH7q382cAZwEtm00IiJiBURMT8i5nd1dTV0HykbfEHazKyGZqaVfgt4JSJ6I+Iw8CPgAmBKmmYCmAFsTdtbgZkAaf9kYGdpe4Vjhp0kJIeDmVktzYTD68D5kiakawcXAxuBh4ArUp+lwL1pe226Tdr/YEREal+SVjPNBuYAjzZRV11FyauVzMxqaKvfpbKI2CBpDfAE0Ac8CawA/i+wWtLXUttt6ZDbgDsk9QC7yFYoERHPSbqHLFj6gKsj4kijdQ1GoSCvVjIzq6HhcACIiOuA6wY0v0yF1UYRcQD4vSr3cwNwQzO1DEVR8molM7MacvcKachWLHm1kplZdbkMh4IvSJuZ1ZTPcCjI4WBmVkMuw8GrlczMastlOHjkYGZWWy7DwSMHM7Pa8hkOXq1kZlZTLsOhUPBqJTOzWvIZDvI1BzOzWnIZDr7mYGZWWy7DwauVzMxqy2U4eORgZlZbPsOhIPqOOBzMzKrJZTiMay9wyGtZzcyqymc4tBU42OdwMDOrJqfhUHQ4mJnVkNNwKHDw8Ih+2JyZ2ZiWy3DoaCtwyCMHM7OqchkOnlYyM6stn+HQXuBgn6eVzMyqyWc4eLWSmVlNOQ0HTyuZmdXSVDhImiJpjaTnJW2S9KuSTpW0XtJL6fspqa8k3SypR9LTkuaV3M/S1P8lSUubPal6xqUL0uH3VzIzq6jZkcN3gL+MiI8BvwJsAq4BHoiIOcAD6TbApcCc9LUcuBVA0qnAdcCngAXAdf2BMlI62rLT9ujBzKyyhsNB0mTgQuA2gIg4FBFvA4uBVanbKuDytL0YuD0yjwBTJJ0OLATWR8SuiNgNrAcWNVrXYIxzOJiZ1dTMyGE20Av8b0lPSvq+pJOAaRGxLfV5E5iWtqcDm0uO35LaqrWXkbRcUrek7t7e3oYLH9deBPBrHczMqmgmHNqAecCtEXEe8C7vTyEBENmk/rBN7EfEioiYHxHzu7q6Gr6f90cOXs5qZlZJM+GwBdgSERvS7TVkYbE9TReRvu9I+7cCM0uOn5HaqrWPGE8rmZnV1nA4RMSbwGZJ56Smi4GNwFqgf8XRUuDetL0WuCqtWjof2JOmn+4HLpF0SroQfUlqGzHHwuGww8HMrJK2Jo//N8CdkjqAl4HPkQXOPZKWAa8Bn0191wGXAT3A/tSXiNgl6avAY6nfVyJiV5N11TSuLbvm4GklM7PKmgqHiHgKmF9h18UV+gZwdZX7WQmsbKaWofC0kplZbfl8hXR7dtperWRmVlk+w+HYtJLDwcyskpyGg5eympnVkstw6PBqJTOzmnIZDp5WMjOrLafh4GklM7Na8hkOXq1kZlZTLsOho+jXOZiZ1ZLLcGgrFigW5GklM7MqchkOkD5H2quVzMwqync4eFrJzKyiHIdD0dNKZmZV5Dcc2gterWRmVkV+w8HTSmZmVeU2HDraChw47GklM7NKchsO49uLHPBqJTOzivIbDh1tvOeRg5lZRfkNh3ZPK5mZVZPjcCh65GBmVkV+w6GjyHuHHA5mZpXkNhw6PXIwM6sqt+Ewvt0jBzOzapoOB0lFSU9K+j/p9mxJGyT1SLpbUkdqH5du96T9s0ru49rU/oKkhc3WNBjj24v0HQ0OH/FyVjOzgYZj5PAFYFPJ7RuBmyLibGA3sCy1LwN2p/abUj8kzQWWAOcCi4DvSioOQ101je/IHsJTS2Zm5ZoKB0kzgH8IfD/dFnARsCZ1WQVcnrYXp9uk/Ren/ouB1RFxMCJeAXqABc3UNRid7Vk4HPDUkplZmWZHDv8d+CLQPzdzGvB2RPSl21uA6Wl7OrAZIO3fk/ofa69wzAdIWi6pW1J3b29vU4WPb/fIwcysmobDQdJngB0R8fgw1lNTRKyIiPkRMb+rq6up+/K0kplZdW1NHHsB8NuSLgM6gZOB7wBTJLWl0cEMYGvqvxWYCWyR1AZMBnaWtPcrPWbEHBs5eFrJzKxMwyOHiLg2ImZExCyyC8oPRsQ/BR4CrkjdlgL3pu216TZp/4MREal9SVrNNBuYAzzaaF2D1elpJTOzqpoZOVTzJWC1pK8BTwK3pfbbgDsk9QC7yAKFiHhO0j3ARqAPuDoiRvw3dv+0kt9fycys3LCEQ0T8DPhZ2n6ZCquNIuIA8HtVjr8BuGE4ahms96eV/DoHM7OBcv0KafC0kplZJbkNh86O7NQdDmZm5XIbDhM6shk1vwjOzKxcbsOhs80jBzOzanIbDm3FAh3FgsPBzKyC3IYDQGd7wS+CMzOrINfh4E+DMzOrLN/h4E+DMzOrKNfh4I8KNTOrLNfhMKGjyP5DffU7mpnlTK7DYWJnO/sOOBzMzAbKdThM6mzjHYeDmVmZXIfDyZ1t7HU4mJmVyXU4TOps550Dh1tdhpnZqJPrcDi5s42DfUc52OcVS2ZmpXIdDpM62wF83cHMbICch0P2zqwOBzOzD8p5OPSPHHzdwcysVM7DwSMHM7NKHA545GBmNlCuw+HkNK3k1zqYmX2QwwFPK5mZDdRwOEiaKekhSRslPSfpC6n9VEnrJb2Uvp+S2iXpZkk9kp6WNK/kvpam/i9JWtr8aQ3ORE8rmZlV1MzIoQ/444iYC5wPXC1pLnAN8EBEzAEeSLcBLgXmpK/lwK2QhQlwHfApYAFwXX+gjLRiQZzUUWTvex45mJmVajgcImJbRDyRtt8BNgHTgcXAqtRtFXB52l4M3B6ZR4Apkk4HFgLrI2JXROwG1gOLGq1rqPwWGmZm5YblmoOkWcB5wAZgWkRsS7veBKal7enA5pLDtqS2au2VHme5pG5J3b29vcNRut+Z1cysgqbDQdJE4C+AP4qIvaX7IiKAaPYxSu5vRUTMj4j5XV1dw3KfJ49vZ69HDmZmH9BUOEhqJwuGOyPiR6l5e5ouIn3fkdq3AjNLDp+R2qq1HxenndTBzn2HjtfDmZmNCc2sVhJwG7ApIr5dsmst0L/iaClwb0n7VWnV0vnAnjT9dD9wiaRT0oXoS1LbcdE1aRy9+w4er4czMxsT2po49gLg94FnJD2V2v4j8A3gHknLgNeAz6Z964DLgB5gP/A5gIjYJemrwGOp31ciYlcTdQ1J16Rx7Hr3EIePHKW9mOuXfZiZHdNwOETEXwOqsvviCv0DuLrKfa0EVjZaSzOmThwHwK53DzHt5M5WlGBmNurk/k/lrklZOPS+46klM7N+DgeHg5lZGYfDRIeDmdlADof+kYNXLJmZHZP7cOhsLzJpXJtHDmZmJXIfDpBe6+BwMDM7xuFAFg7b9x5odRlmZqOGwwH4pdMm8OrO/a0uw8xs1HA4ALOnTuStfQf91t1mZonDAZg99SQAXn3LowczM3A4AHBWVxYOL7+1r8WVmJmNDg4H4MxTJyDBK2+92+pSzMxGBYcD2Wsdzpg83uFgZpY4HJKPfGgiL273tJKZGTgcjpl35hSef3OvPzLUzAyHwzF/b9apRMATr+1udSlmZi3ncEjOO3MKxYJ47NXj9iF0ZmajlsMhmdDRxsfPOJm//cXOVpdiZtZyDocSl5z7YZ54/W1e91tpmFnOORxK/O686RQEax7f3OpSzMxayuFQ4vTJ47nwo13cueF19uz3qiUzyy+HwwD/YeE57N5/iK+v20REtLocM7OWGDXhIGmRpBck9Ui6plV1nHvGZJZf+BHu7t7MN+57nsNHjraqFDOzlmlrdQEAkorALcCngS3AY5LWRsTGVtTzxYXnsPfAYf7nwy/z043b+d3zpvPJM6fw0WmTmDy+nc72YivKMjM7bkZFOAALgJ6IeBlA0mpgMdCScCgUxNd/55f5zXM+xK0/6+Fb61/8wP5xbQUmjmujUBBFiWIh+yoIJNW9//o9htrR8sA/DtZv3Rf+AePaRvaP1NESDtOB0iVCW4BPDewkaTmwHODMM88c8aI+PXcan547jT37D/PsG3t45a132fPeYfa+d5h9B/s4GsGRo8GRoxzbrmewVzF8vcNK+afBSuk4/KkwWsJhUCJiBbACYP78+cft/8vkCe1ccPZULjh76vF6SDOzlhotF6S3AjNLbs9IbWZm1gKjJRweA+ZImi2pA1gCrG1xTWZmuTUqppUiok/S54H7gSKwMiKea3FZZma5NSrCASAi1gHrWl2HmZmNnmklMzMbRRwOZmZWxuFgZmZlHA5mZlZGY/WVuJJ6gdcaOHQq8NYwl9MqPpfR6UQ5lxPlPMDnUuqXIqKrXqcxGw6NktQdEfNbXcdw8LmMTifKuZwo5wE+l0Z4WsnMzMo4HMzMrEwew2FFqwsYRj6X0elEOZcT5TzA5zJkubvmYGZm9eVx5GBmZnU4HMzMrExuwkHSIkkvSOqRdE2r6xkqSa9KekbSU5K6U9upktZLeil9P6XVdVYiaaWkHZKeLWmrWLsyN6fn6WlJ81pXebkq53K9pK3puXlK0mUl+65N5/KCpIWtqboySTMlPSRpo6TnJH0htY+556bGuYy550ZSp6RHJf08ncufpvbZkjakmu9OH2+ApHHpdk/aP2tYComIE/6L7G3AfwGcBXQAPwfmtrquIZ7Dq8DUAW3fBK5J29cAN7a6ziq1XwjMA56tVztwGXAf2Ucmnw9saHX9gziX64E/qdB3bvpZGwfMTj+DxVafQ0l9pwPz0vYk4MVU85h7bmqcy5h7btK/78S03Q5sSP/e9wBLUvv3gH+dtv8Q+F7aXgLcPRx15GXksADoiYiXI+IQsBpY3OKahsNiYFXaXgVc3sJaqoqIh4FdA5qr1b4YuD0yjwBTJJ1+fCqtr8q5VLMYWB0RByPiFaCH7GdxVIiIbRHxRNp+B9hE9nnuY+65qXEu1Yza5yb9++5LN9vTVwAXAWtS+8Dnpf/5WgNcLKnpD5nOSzhMBzaX3N5C7R+c0SiAn0p6XNLy1DYtIral7TeBaa0prSHVah+rz9Xn01TLypLpvTFzLmkq4jyyv1LH9HMz4FxgDD43koqSngJ2AOvJRjZvR0Rf6lJa77FzSfv3AKc1W0NewuFE8GsRMQ+4FLha0oWlOyMbU47JdcljufbkVuAjwCeBbcC3WlvO0EiaCPwF8EcRsbd031h7biqcy5h8biLiSER8EphBNqL52PGuIS/hsBWYWXJ7RmobMyJia/q+A/gx2Q/M9v5hffq+o3UVDlm12sfccxUR29N/5qPA/+L96YlRfy6S2sl+md4ZET9KzWPyual0LmP5uQGIiLeBh4BfJZvG6//0ztJ6j51L2j8Z2NnsY+clHB4D5qSr/R1kF23WtrimQZN0kqRJ/dvAJcCzZOewNHVbCtzbmgobUq32tcBVaWXM+cCekimOUWnAvPvvkD03kJ3LkrSaZDYwB3j0eNdXTZqXvg3YFBHfLtk15p6baucyFp8bSV2SpqTt8cCnya6hPARckboNfF76n68rgAfTiK85rb4yf7y+yFZavEg2d/flVtczxNrPIltZ8XPguf76yeYVHwBeAv4KOLXVtVap/y6yIf1hsrnSZdVqJ1upcUt6np4B5re6/kGcyx2p1qfTf9TTS/p/OZ3LC8Clra5/wLn8GtmU0dPAU+nrsrH43NQ4lzH33ACfAJ5MNT8L/JfUfhZZgPUAfw6MS+2d6XZP2n/WcNTht88wM7MyeZlWMjOzIXA4mJlZGYeDmZmVcTiYmVkZh4OZmZVxOJiZWRmHg5mZlfn/n8OtGb6WvysAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAEICAYAAACktLTqAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJztnXd4XNW1t9+l0Yx6l9yL3MENXDA2vWMIF5OE5AIhCSmQBqTfQG7CTQjp9eZCQkhCCCRACORLHDAhYDC92IBxxb3JllWsXkczs78/TtGZ0Uga2bJHI6/3eebxOfvsObPOHOs366y99tpijEFRFEUZXqQl2wBFURRl8FFxVxRFGYaouCuKogxDVNwVRVGGISruiqIowxAVd0VRlGGIirtyTBCRc0SkwrO/UUTOSaTvYXzW3SLyzcN9v6IMB9KTbYByfGKMmTUY5xGR64BPGmPO8Jz704NxbkVJZdRzV5QUQUTUGVMSRsVdSRgR+ZqIPBrT9r8i8kt7+2MisllEmkVkp4h8qo9z7RaRC+ztLBG5T0TqRWQTcEpM31tEZId93k0i8l67/UTgbmCJiLSISIPdfp+I3OF5//Uisl1E6kRkuYiM8RwzIvJpEdkmIg0icpeISC82LxKRV+1+lSJyp4gEPMdnicjT9udUicjX7XafiHzdcw1vish4ESm3Pz/dc45VIvJJe/s6EXlZRH4uIoeAb4nIFBF5VkQOiUitiPxZRAo97x8vIn8TkRq7z50iErBtmuPpN0JE2kSkrLd7pKQ2Ku7KQHgYuFRE8sASLeCDwIP28WrgMiAf+BjwcxGZn8B5/weYYr8uBj4ac3wHcCZQAHwb+JOIjDbGbAY+DbxqjMk1xhTGvA8ROQ/4vm3naGCPfR1eLsP6QZlr97u4FzvDwBeBUmAJcD7wWftz8oBngH8BY4CpwEr7fV8CrgYuxfpuPg609fWFeDgV2AmMBL4LiH09Y4ATgfHAt2wbfMDj9jWWA2OBh40xQfuar/Wc92pgpTGmJkE7lFTDGKMvfSX8Al4CPmJvXwjs6KPv34HP29vnABWeY7uBC+ztncBSz7EbvH3jnHctsMzevg54Keb4fcAd9vbvgR95juUCXUC5vW+AMzzHHwFuSfC7+ALw/+ztq4G3e+m3xbE3pr3c/vx0T9sqrDEE59r29mPDFc7nYv3g1HjP5+l3KrAXEHt/DfDBZP9/0tfRe6nnrgyUB7GEDOAaur12ROQSEXnNDgE0YHmqpQmccwywz7O/x3tQRD4iImvtcEgDMDvB8zrnds9njGkBDmF5tQ4HPdttWD8APRCR6SLyuIgcFJEm4HseO8ZjPWHEo69j/eH9XhCRkSLysIjst234U4wNe4wxodiTGGNex7q2c0TkBKwni+WHaZOSAqi4KwPlr1gCMQ54L7a4i0gG8BjwE2CksUIkK7DCCP1RiSVMDhOcDRGZCPwWuBEosc+7wXPe/sqaHgAmes6XA5QA+xOwK5ZfA+8C04wx+cDXPXbsAyb38r59WCGnWFrtf7M9baNi+sRe3/fstjm2DdfG2DChj4HXP9r9Pww8aozp6KWfMgxQcVcGhLFitKuAPwC7jBX3BggAGVhhgZCIXAJclOBpHwFuFZEi+0fjJs+xHCwxqwFr0BbLc3eoAsZ5BzZjeAj4mIicbP8AfQ943RizO0HbvOQBTUCL7f1+xnPscWC0iHxBRDJEJE9ETrWP/Q74johME4u5IlJif5f7gWvtQdePE/9HINaGFqBRRMYCX/UcewPrh/IHIpIjIpkicrrn+J+wfpCvBe4/jOtXUggVd+VweBC4AE9IxhjTDNyMJdT1WCGbRB/7v40VOtkF/Bt4wHPeTcBPgVexhHwO8LLnvc8CG4GDIlIbe2JjzDPAN7GeKiqxxPOqBO2K5StY19WM9TTxF8/nNGONQfwHVphnG3CuffhnWN/Lv7F+HH4PZNnHrscS6EPALOCVfmz4NjAfaASeAP7msSFsf/5UrPh6BfCfnuP7gLewfixfHMB1KymIM7iiKMpxgIjcCxwwxnwj2bYoRxedFKEoxwkiUg68D5iXXEuUY4GGZRTlOEBEvoM1EP1jY8yuZNujHH00LKMoijIMUc9dURRlGJK0mHtpaakpLy9P1scriqKkJG+++WatMabfmkBJE/fy8nLWrFmTrI9XFEVJSURkT/+9EgzLiMhSEdliV9a7Jc7xiSKyUkTW2VXtxg3UYEVRFGXw6Ffc7UpzdwGXADOBq0VkZky3nwD3G2PmArdjVa1TFEVRkkQinvsiYLsxZqfpLh26LKbPTKyZggDPxTmuKIqiHEMSEfexRFemqyC6oh7AO1iTI8CqXZEnIiWxJxKRG0RkjYisqanRMtKKoihHi8FKhfwKcLaIvA2cjVUMKRzbyRhzjzFmoTFmYVmZLgCjKIpytEgkW2Y/0eVYxxFTLtUYcwDbcxeRXOD9xpiGwTJSURRFGRiJeO6rgWkiMskuq3oVMdX+RKRURJxz3QrcO7hmKoqiKAOhX3G3V3W5EXgK2Aw8YozZKCK3i8jldrdzgC0ispXutR4VRTkOMMbQFY4cs887lp/lcLhlWkLhCH98ZTdPbTxIOHJsS70kFHM3xqwwxkw3xkwxxnzXbrvNGLPc3n7UGDPN7vNJY0zn0TRaUZTkU98a5KmNB3lyw0EWfOdpmjq6+ux/qKWTG+5fQ21LtDxEIoa/rtlHc5z3d3SFo4Q1GIpw/k+f55rfvkZ9a7BfG9uCIVo6o1cdDIUjdIUjvL7zEPe+1HcNtW1VzSz9xQssuOMZGtqCPa6xpTPEH17eRavnM4Kh7h+fB17bw/8s38inHniTO57Y1K+9g4mW/FUUhRe21tDSGeKs6WV0doUpyc2goytMWzBMcU78Ra5+/fwO7nlhJx9YMI6mjhAb9zexZEqPJDmXl3cc4t+bqlh28ljeM3c0oXCE+1/dw6iCTL766Dqqmzupbw3y4SUTmViSwz/fOcBND73ND98/hx01rVw8axR1rUH21rWxt66Nnz69hTuumENbMERmuo+0tJ4rOn7+4bW0dob44MLxvLKjlg+dOpFH36xgU2UTVU0dVNS3M6k0h3NPGBH1vrf21vOPt/cTDBu2VbcQjhg+dt9qNh1o4sX/OpcR+Zk0tnexaks13/7nJn61age/+fACAr403verV/j9dQs5aXwhP396K2dOK2VSaQ5/eHk38ycU8fL2Wj519hQmleYc2U3rBxV3RRnCtAfD3PjgW9x66YlMHdFz3W5jDNffv4YrF4xj6ezRh/UZkYjh1r+tJ2IMS6aUsOlAE//6wlnc/vgmXt5ey/NfPTfu+17cZi189dJ269/NlX2L+/bqFgAq6tsAWLWlhtsf30RGuhVAuOu57bQFw1Q2dXDL0hP4wl/WAvDUxiqefbeaxrYuGtu7KM0NMGNUHmt219MZCnPp/77IhJIc7rvulB4Cv2F/I1VNHWytaqG2pZNVW6wU7Ia2LsYVWYthffGRtdy+bDaXnzTGfd9Dr+/lr29WkB3wcda0Uirq23l7b4N73UU5fj71wJtcvcha7repvYv3//oV5k8oIhiO8N0nNvPNy2bS1BHik2dOZvHkYtbua+Cmh94GYMHEoqMu7loVUlGGEI3tXXzmT2+yr84SwJ21Lax8t5qXtsWfF1Lf1sUzm6v59J/eimrvCkfYVtVMWzA6JBGJGDbsb+RgYwfGGG77xwauuuc19je0U9nYwQtba9hS1UxTRxf/2nCQPYfa4oY/apo72VzZBEBlo7XO9qbKJhrbuvjjK7vjxqh32OK+q7aVO5/dxl/ftKbPdNphjLaglT2dnibc/+puAMYUZPLGrjoA1u5r4Nl3q7ls7hgWTCxma1UzD7y6h92H2nhhaw1/fr275Mq2qma2VjVT2dhBxEBtSydnTC2lurmT6uZOguEIuw61cvb0MiYWZ/PlR9ZS1xokYsfF397X4Nq0ZEoJVy4YhwhkB3y8tL2W57fU0BU2rNxcTUGWnzXfuICSnAze3FNPcU6Adw828/Bq6/omleSQke7jzqvnc+LofH7wvjl8YKE3AfHooOKuKMeIysZ2fvDku1GCW93UwaceWENjmxXLXb2rjic3HOR/V24DoL7Vaq9u7sQYKzbtjVkfaGh3tzu6LHE0xnDlr1/hwp+/wNceWx9lwyfvX8Nl//cSV979Cr94Zhv3v7qHN3bXucdrW4IYAw+/sZc6W9S/+Y8NXPnrV3hzTz13PbcdYwyv7OixXC2bK5u4+eG3+Z/lG9lkC78Xx3P/+9r9/OTfW3lqYxULJxYxuiCTjy6Z6PbbfaiNv6zex9JZozh1cokbM99S1UwwHOGMqaXMG19IxMAdT2zmpHEFzBiZxzObqwF47t1qLvz5C1z08xfcc6YJfOeK2fg8nr0xcPb0Mn545Vy6woYP/uZVzvrxc2yubHJtBVgyuZRPnDGJZ750NuefOJKXtteyenc9APsb2hlbmEVepp9PnDEJgM+eY61xvmpLNWkCowszAZhQks2Tnz+Tq2xv/2ij4q4MO1ZurqKhrf/Bth01LRzyCOX6ikbePdhTlI6EpzdVcd5PV9EeDPPEukrufn4Hn3rgTdezfWl7LU9trOKN3XV0hsLsqm0F4O9v72d/Qzt19nVUN3fy6s5DfPXRdXzz7xvc8+/3iPuK9ZUYY3h7XwPvVDQyoTibJ9dXUt1kedbrKizP9z9OGkNlYwf/u3Ibl8wexd3Xzufbl8+KsvsXz2xztx9fV8maPfVc94c3+PFTW1i1pYZ/bThIaW6A0QWWcGX609hW1cLzW60njNqW6O8/FI6419bR1T3g+MkzJ/HqrefzpQtnMKk0h7GFWbyzr4GmjhCXnzyGiSXZUecRgVPKizlpfKHb9s3LZjK5LIeK+jZC4Qhfe2xd1HtOGlfAJbNHM6k0h1PKi8jL6I5GTyjO5oRR+cwZW8D26hb2N7Rzyf9aa4ffeO5U3jNnNDPH5JPuS2NKWS7nTC+jprkz6sdrTKEV3vnY6eV86z9m8uElEynOCdDcEWJ0QRZ+X3JkVsVdSRm6whF+9+JO10ONR3VzB5/44xoefGMvYGUufO7Bt1i1pZrr/vAGj75Z4fZ9710vs+COZ9hc2URda5Brfvsal//fy6zcXJWwTQ++vpcr7no5bppbKBzhoTf2srOmlR01LW7Y4cVttSx/5wAAe+3wy5PrK5n9P0/x+PpKsvw+AH77wk43JFLd3Mn9r1hhhyc3HOSBV3cTiRj213eL+5ceeYef/HsLT6yrJOBL485r5hGKGDc88MdX9pAT8PHd987mZx88ie++dzZ3XTOfpbNH8xFbkBzagmEunjWSXI8QNneEyEhP4ztPbGKlHR6ZXGbFjd87byxBT4pilR2qcdhX304wHKEw2w/A9JG5PP/Vc7h41igACrL9PPeVc7jm1G6v9pTyYlfc022Pe8bIPAqy/RTnBDhnRhk3nTeVheXFjCvKoqK+nRe311Ld3MnVi7rDHn++fjF3fWg+AD++8iQe/tRicgLWd+yc/7/fcyKfOnsyD3z8VPd9nzp7Mnd9aH6Ut3/5yd1xead5bKHzA+fjutMnkZHuY4r9vThx/WSgA6pKyvD8lhrueGIzowuyeM/c+IOHzuP07tpWLr/zJUJhw6bKJhragryy4xD5mX6uXDCOrnCEpg7rcf9nT29lQnE2rcEQpbkZ/P6lXZx/4sge5w6GIoQjhqyAj3DE0NEV5r//vh5jrEdw5z2N7V189a/v8PquOtptQd9V20pda5C8jHSmjMjl9n9u4rQppew9ZIn74+sq6Qob3tnXwLwJhUwuzeXh1Xv5oB2b3VbVTHVzJ9edVs6myia++Y+NZKT7ONDQTqY/jae/eDZ3PLGJ3724i5yMdM6aXsrccYWcOa2Uh97Yy2fPmcI7FQ2cPrWU/Ew/y06OLg8lIpwwKo9t1S3UNFtPMzeeO42qpk7W7mvgipPHcKChg8+cM4Xr719DKGK4/OQxPPpmBS9vP8Q1iyayv8GK2QMcbIoWd+dH7PQppTyxvpK54wqZWNJzQNEZZJw6IpfinIDb55TyYt7aW8/iyd0Dtvd9bJG7Pa4om85QhN++sJOCLD9fvfgE/rJ6HyW5GVE/UOOLLTGfWJLDpsomd3/x5BL33G98/XwqGzvIy/T3sM/vS+Pxm87g/57dRmcowqotNa7n7mXqiFxW7653z58M1HNXhiwtnSHWVzS6+2vtQa7dh1p7fc+OGuvY67vqWFfR6D4+Z/nTMQb22ZkaBz2e5aYDTfxj7X4umTOaueMKqW/rmW+9encd07/xJB+593WqmjpY8v2VXH7nS4wvsv5473tlt9v3N8/v4JnNVYjgerO7als51BqkNC+DH75/Lq3BEJ978C122qEKr9c7qTSHj51eTkdXhH+stTz8ysYOwhHDRTNH8pcbFpOfmc66/Q0caLRivuOLs7n1khMJRQxpAl+9+AQArl08kcrGDla+W017MExuZu/+3NeWnsCPr5zLndfM43PnTmHOuAJmjcknLyOdH115Eo98egnnnjCC3310IZ8+ewrzxhdy4qg8Ar40JpRk84P3zeErF02nMNvPys1VnP/TVWyvbgZwn0DmjiuI+jeWco+YA0y0xXHGqDwe/fRpfPGC6XHf53jIr+w4xMWzRlKcE+Dk8YWcMCovbv+pI3IZW5hFpv2U5GVEfmZU2CeW2WML+M2HF3Li6HwAxsbxzqeU5UbZlQzUc1eGHJGI4Z4Xd/KDJ98F4KHrF/P81hpetlPu9vQl7rbnvsf2iL94wXQeWbOPvXXWe/bVWWEMJ1Z9+tQSXt5+CIAlk0tYu6+BjQcaY0/Lj5/aAsDq3fX84pltbtaF2I/mL26rZV9dG+OLs3lxWy0LJxZz23/M5JnNVfxl9T7bc++kOMdK4/v25bN6DHY6TCrJcdMeG9ujf2hOGJ2PiFBemsOeQ200dYRcz7G8NIeHrl/M2KIsxtpt558wgtLcAE9tOEh7V5jsQE8xc/AK2mVzrfDDly+awUdPKyeQ3u0HnjNjBOfMsPLC//OUCZw5rYyCLD8FWX5uPG8aK9Yf5B37R/mzf36LJz9/ljs4e8HMkazaUhP3yQhgclkOi8qLucIOfxTnBLjpvKlcPGsUs8fG/0EAy3N3OG1KKQB3X7sAeqa+A3DrpSe4Nh0u5XZIZ3RB7+I+vkg9d2WIct5PVvG5B9/qs88/1u7nty/sPKzzP7elmsvvfImbHnrbnaH45IaD/ODJd13P6BfPbOXu53ewfr8lGLtt4Y6H4wk7fGjxBCYUZ7thgdqWTjq6wm6WyQUekZk3oZCibD8NbV1UN3W4A5FhO30QrDirdxDTGPjyhdMRgcfeqqChLciGA42cNrWE2WML+MIF05lSlsvO2lYOtQQpyrbi2stOHuvGbJ0BPickUV6aQ6bfx8j8jKhrGZmf4cbFJxRns+dQG/vr210hB1g0qThqP92Xxsj8TJo6umgLhtx4fqIU5wSYPjK+9wsQSE+jPCZfe5Q9yAqwtaqFx96soL4tSJpYP1wP3bA4ykYvmX4fj3x6CafaIRIR4csXzehT2CHaQz51suX1j8jPZEReZtz+owuymDWm73P2x9LZo/nShdM5Kc5TyKJJxXxgwTjOmp686rcq7kqf7Kxt5Yl1lX32uffl3fz8ma1uzY/n3q3mrue293vuvYfauPnBt6lrDbJifSVf/es6jDH8a6OVifHPG08n05/Gmj317nt8adKv5+5403mZ6ZTkBCjM9kdlaFTUt7kDkY64Zwd8zBiZR2F2gPauMOf+ZBWLvreSHzz5Lh+/bzVtwTCzxuQTMVY8f/HkYneQ74xppZw+pZTH3qrg1R2HMAZOn1rqft6k0hx21bRQ1xqkxBbnTL/P9e5Om2oJ2c3nT+Xm86dxnj1bckJx9GCi82MHVviior6N2pbOfuO62QEfzR0hOroiZAWO/sO6I+7vmzfWmqX5zFYqGzsoyg7EnUU6GORkpFOcE6C8JDuuJ300KMjyc/P500iPkw2Tk5HOjz9wEmV5GXHeeWxQcVeOiHDEsPVgM23BMOsqrJj4n17bw69X7XD7/GtDZVzvf8WGSpo7Qzx0/WJuPm8a/9p4kG3VLazaUs25M0aQ7ktjcmku4YihLC+Djy6ZyMdPL6eqqbPH5ByA5o4u9je0M8f28iaV5iAiFGRFD4ztq2vnQGM7pbkBxhVlUZobYM7YAtJ9aa5n3WoPhN79/A43ve9s2wvbV9/GmIIsV2wnl+Vy4cyR7Ktr5+9r95Pl93GyJ8QxqTSHpo4Q1c2dFOd2Z6T85ynWYOlHlpRz03lTWTrL8gRzbE/eeaR3fgROGNUt7hNLsnESdE7rY1YoQHYgnXo7pbKvsMxgMSrfEveTxhfymbOnUNnYwaotNW6mzNHi8pPG8KFTJ/bf8ThBxV3pFe8sw94q2u2ta6PdTk18bac1GWZbdQstnd0Fm1ZuruaJdZW0dIbc7BGA6qZOcgI+xhdnc9Z0y9N9ZPU+mjtCbkx22khL2JZMLuHby2Yzd5wlmrtqe3rvz75rTWJ53zwrE8QJcxTEiMqfX9/LKzsOMbYwCxHhR1fO5euXngjQQ4ByM9IJ+NLICfhYMLHI/l6scMXpU0uZXJpDQZbfjVf/e1MVJ48vjMptnjWmW5RLPOmGnzhjEk994SxOn1rKly+aQVaM8Doe+fyJhYwtzHJ/XAA3FFKaG+Ckcb0P/gHkZPg4ZOedDzQsczg4YwBzxxUw3b5/tS2dvdaoGSy+dfksrj9r8lH9jFRCB1SVXvGGMmpbOnl8XSU5AV/UDLt33WwUH6/uOMTHT5/kZqRUNXWQW5bLgUYrBHL7PzfyyJoKXvrauYwryqa6ucN9bHWE+O9rrXVgltgx16mO1zraivvOGpOPL0245revc9/HTmHehCLXlsfXVTIqP5Mr5o3l9sc3ue+N9dyfsfPYJ82wxPK8E7rj7l5xv2jmSK5dPJHVu+tobO+iJLf7Ebs4N8D1Z07mpvOmAnDi6Dz8PqErbFhY3m0TEBUv9gqciDCjl2wO6Bb3CcU5vHzL3KhjThbJuTNG9BvqyPKnu5OhYn9AjgaXzhmFLw1OHl/olhYA3Kci5dig4q70irdU6v6Gdu59aRfhiOE/TxnPr1btwJcmtAXDpImVBfHGrkPsqGnBcfirmjqYUpbLgQZrYPKRNdYEolv/tp4HPnEqNc2d7oBXYXaAomw/tS1BxhVlud72NHswzxsCeeRTS7j5obf5r0fX8cTNZxKKRFh258tsr2nhY6dNojA7wJ8+eao7YFaYZYlKRnoad14zn2Aowhu7DnFOTCVAiBagK+aN5azpZe6g2F7PQG5JTgC/L8310DPSfcwcnc87FY2uh++Q48mzHoj3Ot4eJCzO6RnOKMvL4KsXz3AnAfVFTobPvSfHwnPPDqTz3nnjAGtsoSwvg5rmo++5K9GouCu94q1RvcOemg2Wd+2kBl40cySTSnOYVJLNE+sO8O7BZvc91U1WPRRvdglYaYN1rUFqWjo50RNHnlSaQ/3ehqgwxvknjuAH75vDWdO6QxILJhbx7ctn8cn71/DkhkomFGezrbqFy+aO5vqzrPoeTjocdHvuhdl+Lpxpeem9TYLyeu4j86MzLYo8IluS03Og7OTxhazf3xj1NOGQn5lOU0doQAI3e2wB558wglMn9YypiwifO3dqQufJ9gyiHouYeyzjirKoae6kUD33Y0pCMXcRWSoiW0Rku4jcEuf4BBF5TkTeFpF1InLp4JuqHGu8nvurOw6521/8yzvu9rqKRk4Ylc+ogiwiBl7ZUeum+B1s6uBQazBq8YJpdv72zpoWapo6o7IJJpVax7wpan5fGlctmhA1BRzgnBllZPl9rN3X4Mbfv3jh9LiZEo5gx4Zn4uH13L0pfWDF353MFe/AqMPnzp3KHz62KO7nXGGPA3hDO/2Rk5HO7687pUeq4UDxCvqx8NxjcdIe4z2BKEePfsVdRHzAXcAlwEzgahGZGdPtG1jL783DWmP1V4NtqHJs+e4Tm9z6LAAv2hOISm1xcgYtDzZ1cMKoPEYVWO0vbqtlclkuuRnpVDV1RFUtBNwwwsYDTTR3hqLE3alT4vXceyPdl8asMfmsr2hkV20rvjTpdcKII7aJiHum30emPw0RGBGTxiYiFNmed0kcD3xEfmbUoKeX2y6byRM3n9FrfvfRJErck+K5W/dFY+7HlkTCMouA7caYnQAi8jCwDPCuGWUA5y+yADgwmEYqA2fjgUZOHJUfNdhW1dTBiLwMRPrPNf7rmxVRBbqceiOP33QGORk+9tW187e3rcHPE0bnMyo/y+23cGIREWN4fWcdDfZU/hF5GVQ3d3L2jDLueWEnr++yngS84n7alBIml+YwP05YIx5zxhXw0Bt7Kc3NYHxRVtQsSi/d4p6YuBRmBQhFTNxqfsXZgcOKH1s/Rkc2aeZw8cb8kyHuY92xAxX3Y0kiYZmxwD7PfoXd5uVbwLUiUgGsAG4aFOuUw2J3bSvv+eVL/HvTQbft3xsPsvj7K/mVJ/88HtVNHbR0hmhs73KzZT68uDt3eFRBJnmZfiaX5biThSzPvTuEMW1ELsXZATZVNvH/7B+ARZOsWYPTR+ZRXprtpk16veN5E4p49ivnuN5xf5w0rpCOrghPbTrY56o2AwnLOP2dJ5F4xwK+tKhiVEMdr+ee7T/2djv1XZJZROt4ZLDy3K8G7jPGjAMuBR4QkR7nFpEbRGSNiKypqYm/soxy5DipiM7gZlNHF1/4y1qMgT+8vKvPkrnvv/sV7nh8E96FdK47vRzonikJVvhifFE2uRnpjCvKoijb73rOU0fm9VhI+IazJvPlC6dTkOVncmmuW9fjSGbwOR6+MfQZl87NSMfvk4Qn0Vwxb6yb7RFLcU6A4pxAQk8/QwXvgGoyPPdTyot58b/O7bOMgTL4JPIzvh/wrgk1zm7z8glgKYAx5lURyQRKgWpvJ2PMPcA9AAsXLow/K0Y5YpyVenbbA42bDjTRFgzzqbMn85vnd/Lkhkr++U4l75s/1i0QBVb98Yr6drfok0NZXgaPfWYJuRnR4nhKeTGN7V2u0I3Kz2RvXRvTRuTy/ffN4e0z3Z/jAAAgAElEQVS9DXzwlPFUNnQwY1SeOwFpyogc2Nh97sNlQkk2N5w1mXte2NlngSYR4Rf/OY/ZY/uP5QN8+uwpvR775JmT2N/Q0evxoUhOkmPuoF57MkhE3FcD00RkEpaoXwVcE9NnL3A+cJ+InAhkAuqaJwknPr7Lzst2Jhp9ZEk5v3l+Jxv2N/Hsu9U8+2418ycUuTMK61qtJdZ21rREnS/b72PBxOIen/PjK+fi/YUeVZBJRX0bk+zCV8578kdF/yhcc+pEOrsi5GamUzaA7JF43LL0BOZPKOLMaaV99ust9XGgLJhYzIIUm+GeleRsGSU59CvuxpiQiNwIPAX4gHuNMRtF5HZgjTFmOfBl4Lci8kWswdXrTLwVcpVjgiPuToGtLVXNFGX7GVOQSU7A51ZIBHjszQpuOn+a9T7b4/fOKsz0p8UtjAT0mBk5Y2QebcFQ3BrZXsYWZvGNy2ITrg6PtDRh6ez+J/IczzgDqoH0tB4ppcrwJaHRFWPMCqyBUm/bbZ7tTcDpg2uacrg44t7Q1kVDW5DNlc2cMCrfLaK1zyPuTR1dXPZ/L7JwYrFbjdDLQAYO//s9J7qVIZWhgzOgmowJTErySJ0hfyVhajyLPu+oaWVrVbO7XFt+rLi3h9iwv4kN+5vcHwUvOQMQdytHXAVkqOEMqGpI5vhCq0IeA17YWsNjnoWZjwbtwTDfW7GZpo4uapo73Zmg/950kLZgmJl2bZb8LL9bzha6B18Bnljfs257zjGo/60cXRyPPVmDqUpy0L/cY8BH7n0DgPcviJ9eNxg8v7WGe17Yyawx+dQ0d3LJnNEcag1y/yt7gO48c2+ud1G2n+o43jpYaY+hiEmpfG4lPhl2rF099+ML9dyHCev3WwtlbK9uob6ti5F5mSyZUkJ7V5jRBZlMtNd7zPes6D66IIvq5g57O7qOyqiCTKuOeYYKQqojImT7fRpzP85QcU9B9hxq5ZrfvkajPbW/sb2LdXZu+urd1szP0ryAu0LPksklbi6647n7fUJJboBaexGHeROsHHRnok9RdoCyvIyEZ3UqQ5vsDB0POd7QZ+4hhJM9Gjv7samji39tOMgHFoxDRFi1pYZXdhxiw4FG6lqDfP7ht90l197aa3nwYwqymDoiF79PONeTBeMtopUTSHdXWJo/oYgV6w8yY2Qer++qozDbzzcvm0lJnOqHSuqRm5GuIbbjDL3bxxBjTJ/T1n/+zDZ+uXIbO753aVQ+8r/WH+S/HlvHovJiyktz2FZtlRXYWtXMXc9td4U94EsjGIrgSxMWlBeRn+nnlVvOp9Qj0AVZ1i3Pz/KT7Qm5nDS+EBFrUeZ3KhoozA64cXol9bnjijlHfQ1TZWih4n4MCYYjZKT3/mj8y5XbACvM4q2g12zXVd/f0M6Wqma2VlkzSB99s4LaliB3XDGbx96q4KRxhdz3ym7mTyh0Y+ux0/vzbc89P9MflQlTmpvBly6YzsLyYrrCERZP7nvRZSW1WNLPItrK8EPF/RjS0dW3uDvUtUaXlHUKff386a2s2VPvtm+yywpcMW8s1y6eyMrNVdz3ym7OnBa/pjhEh2W8nntOwOfOVFUhUJTURwdUjyGdfVRj9FZqrGuNrqjYFrQ8d2/ZALCqIZblZbix1AUTizhjainLTh5Db8TG3B0GMllJUZShj/5FH0Oc+ujNHV00d4Tcgl1gpTA61LVG5563B633eXPSswM+2oLhqDrmzsLQfRHludupcSI6NV1RhhvquR9DOkKWd/7Lldv40O9ejzq22Q6xQE/Pvb2rey3T9DThghNHcsGJ1kLPk0oGtr5mvtdzt731nEB6StUnVxSlf1TcjyFO6KW2JeguVuHgDbn09Ny7QzaLJ5fwu48udMsLDHTx5KLsAKW5GUwbmasFpRRlGKNhmWOIE5ZpD4YJhqKrJ7YHw+QEfIgIh2KEv80j7iPzrZmkzrJ2k0oHtghCID2NNd+4AIBnNlUBA6v8qChKaqB/1ccQx3Nv7woTjCmN294VJtPvIycjnfoYcW/v8oq7ldq4YGIRU0fkMi/BxaTj4WTL6GCqogw/9K/6GNLuEfdwxBCOGHeyUkdXhEy/j+KcgOu5VzV18M93DsT13CeX5fLMl84+InucbBmtH6Moww8V92OI47k7/wZDEbcMa0dXmEx/GsU5AaqarGJey9ce4LsrNlPiyXl3xH0wcERdy/oqyvAjoQFVEVkqIltEZLuI3BLn+M9FZK392ioiDYNvaurTacfcveLu0GGHZYpzAm5Ypq7N+tcbg3fCMoNBtuu5q7grynCj379qEfEBdwEXAhXAahFZbi+tB4Ax5oue/jcB846CrSmPkwrphGc6w2HA77ZlecIyxhjqWrpFfcbIPLrCESaX5Q6aPTkq7ooybEnEc18EbDfG7DTGBIGHgWV99L8aeGgwjBtuuAOq9qSkrrCJOpbp91GY7aczFKEzFHE9d4DFk4t59ivnDGoJXicklKsxd0UZdiQi7mOBfZ79CrutByIyEZgEPNvL8RtEZI2IrKmpqRmorSmLU+Cxo4+wTLs9oOrUnukMRaKyZrKOQlw8kJ7GmdNKmX8EGTeKogxNBlsxrgIeNcbELaJijLkHuAdg4cKFJl6f4UxHVxhjjBuW8Yp7pz2gmpFu/d52hsJRnvvRmmj0wCf6LlegKEpqkojnvh8Y79kfZ7fF4yo0JBNFJGLceusdXRG6wsZdICPac7di7q64d0WiZrHq+peKogyERMR9NTBNRCaJSABLwJfHdhKRE4Ai4NXBNTG1CUU8cfVQOGpCUjDcve3E3DNsEW8Lhmls764xoyvXK4oyEPoVd2NMCLgReArYDDxijNkoIreLyOWerlcBDxtnrTgFwPXSATqC4ajSvu3BiFs3pr0rTFag23Ovbu7A+02q564oykBIKOZujFkBrIhpuy1m/1uDZ9bwIRTx5LKHosX9/57dRk1zJyu/fLY1QzW9O+Ze2dgRdR4t7qUoykDQBOejTJTn3hWJCsvsqGmloS1Ipx17zwx0Z8sctMU9Iz2NTs9MVkVRlETQkr9Hmdhcdm/53sb2IKGIcWPrmek+AjGeu7MYh4ZlFEUZCCruR5lozz16QNUR/toWq357pidb5mBjOwDl9mIc2Vr/RVGUAaDifpSJirl3RaJi7g5OymNWII1Mv3VLamzBn1CS7R5TFEVJFFWMQeTnT2/l7ud3RLV5PfeWzpBbesDLIbuGTGZ6d8y93l5qb5RdBfJozFBVFGX4oooxiKzaWkNuho9Pnz3FbXNCL9NG5LK9poXq5o4e73PDMp5UyIa2IFl+H5fNHU1HKMyYgsEr9asoyvBHPfdBJBSORHnq0O25Lywvxhh4fWddj/c5JX29nntrMEx2wMeI/Ew+e85UXcBaUZQBoeI+iDirK3lxYu4LJhYhAq/uPNTjfU5p36yAjwx/9y3R9EdFUQ4XFfdBJBRH3J39omw/U8ty3bRHryPueu7+NAK+7luiKyQpinK4qLgPIuGIIRxTfMGpLZPuS2NhebHbnpne7ZUfarVi7ll+H2lpgt9nKb967oqiHC4q7oNIVzhCOBKdDROy1T49TXj//O4y+M5kJfBky9gTlZy4u5YcUBTlcFFxH0SsmHt0mxNz96UJCyZ2L4oRLe7dk5gAN2NGxV1RlMNFxX0QCUUMkV5i7ulpgojw0PWL+f775kTF1lvtkgTOBCZH3DW3XVGUw0XFfRCxYu6x2TLWvs9ea2/JlBKuXjTBFXAHEVzBd2q6Z2s9GUVRDhMV90GkK16eux1z9/uiv+pAjLiPzMt0c9m7PXcVd0VRDg8V90Gkrzx3x3N3iBX78tJsd1tj7oqiHCkJibuILBWRLSKyXURu6aXPB0Vkk4hsFJEHB9fM1MDJc99V28qOmha3DayYuxfHc3dCMU5pX++xnAyNuSuKcnj0qx4i4gPuAi4EKoDVIrLcGLPJ02cacCtwujGmXkRGHC2DhzKO537uT1YBsPsH73E9+VjP3RF1v08IhrtL+0J3KqTWcFcU5XBJxHNfBGw3xuw0xgSBh4FlMX2uB+4yxtQDGGOqB9fMoY8xJv6Aaj8xdydTprzUK+4allEU5chIRNzHAvs8+xV2m5fpwHQReVlEXhORpfFOJCI3iMgaEVlTU1NzeBYPUZzwizcV8tv/3MiX//oOEMdzjxlQ9YZlnPoyOqCqKMrhMlgDqunANOAc4GrgtyJSGNvJGHOPMWahMWZhWVnZIH300MAJv4Q84v7cu90PML3F3B0mFHsHVJ0ZqhpzVxTl8EhEPfYD4z374+w2LxXA68aYLmCXiGzFEvvVg2JlChDPc999qM3djvXcM+wwzeM3ncFbe+vd2amgYRlFUY6cRDz31cA0EZkkIgHgKmB5TJ+/Y3ntiEgpVphm5yDaOeQJ2XUHYmPuDukxMXe/L42M9DRmjy3gI0vKo45pnruiKEdKv+JujAkBNwJPAZuBR4wxG0XkdhG53O72FHBIRDYBzwFfNcb0LFw+jHE899g8d4fYsMzIgkzGFmbF7RtQz11RlCMkoaCuMWYFsCKm7TbPtgG+ZL+OO9qDYdrtrJdwxJCXmU5zRyiqT2xY5nPnTuETp0+Kez4n5q713BVFOVxUPQaBE2/7l+tth40hP9PfQ9xjPfcMz5J6sWhYRlGUI0XLDwwSwZAVczcGInHi7rGee1+MLcqiMNtPXqb+9iqKcnioehwhJo6QO0LvZSALXF9x8liWzh7Vq2evKIrSH+q5HyFNMeEX6Bb3sryMwzpnWppojruiKEeEivsR4qyi5KUzHOGqU8bzh+tOSYJFiqIoKu5HTF1rsEdbMBQhO5B+2J67oijKkaLifoTUtvQUd4B0n+iAqKIoSUPF/QiJ57mDlR2jJXsVRUkWKu5HSLyYO3QviK0oipIMVNyPkEN9eO6KoijJQsX9COlN3GNnpCqKohxLVNyPkN7CMr40/WoVRUkeqkBHSG8Dquq5K4qSTDRX7wjp6ArHbXdi7o99ZkncWayKoihHExX3I6QzTh0ZsPLcARZMLD6W5iiKogAaljli4hUJA82WURQluSQk7iKyVES2iMh2EbklzvHrRKRGRNbar08OvqlDk85QJG58XWPuiqIkk37DMiLiA+4CLsRaCHu1iCw3xmyK6foXY8yNR8HGIU0wFCE3M52Gtq6ods2WURQlmSSiQIuA7caYncaYIPAwsOzompUaRCKGYDgSdzk8n2q7oihJJBEJGgvs8+xX2G2xvF9E1onIoyIyPt6JROQGEVkjImtqamoOw9yhRTBsxdvjFQhTz11RlGQyWAr0T6DcGDMXeBr4Y7xOxph7jDELjTELy8rKBumjk4cj7rkZPcVdY+6KoiSTRMR9P+D1xMfZbS7GmEPGGGeq5u+ABYNj3tCms8sW97ieu4q7oijJIxFxXw1ME5FJIhIArgKWezuIyGjP7uXA5sEzceiinruiKEOVfrNljDEhEbkReArwAfcaYzaKyO3AGmPMcuBmEbkcCAF1wHVH0eYhQ6c9OzV+zF3FXVGU5JHQDFVjzApgRUzbbZ7tW4FbB9e0ocnu2laKsgMUZPtdzz1etky6DqgqipJEVIEGyFX3vMYPn3oX0Ji7oihDFxX3AdDc0cXBpg42HWgC+om5+1TcFUVJHiruA2BfXTsA26tbMMa4nrvG3BVFGWqouA+AvXWtALR0hqhq6iQYtgZUczP8PfpqtoyiKMlExX0A7K1rc7e3V7e4FSFzMnw9+qrnrihKMlFxHwB769rw27H0bdXNbi33eGEZzZZRFCWZqAINgL117cwYlUd+Zjo7alpccXfCMl5vXT13RVGSiYr7ANhX18bE4hxG5mdS2xzsFnfbcw94SkFqzF1RlGSi4j4ADrV0UpaXQVF2gIb2YHfMPWDF3DP83V+neu6KoiQTFfcBEAxHyEhPoyDbT0NbF50hK1sm0+/D7xMy0j2eu+a5K4qSRFTcE8QYQzAUIZCeRlG2n/q2bs894EuzXh5x94mKu6IoyUPFPUFCEUPEWEJemB2wPfcIfp+Qlib409OiYu4allEUJZkkVDhModtLT0+j0OenMxShsb3LFXS/L42M9O58d02FVBQlmai4J4hX3DP9lohXN3WQYW8HfGnRA6oac1cUJYmoe5kgTpGwjHQfhVlWXnt1c6fruQdiwjKaCqkoSjJRcU+QqLBMdgCAg40d7iCq3ycE0tNwxlE15q4oSjJJSNxFZKmIbBGR7SJySx/93i8iRkQWDp6JQ4NOj7gX5XR77k76Y1YgnYz0NDdLRrNlFEVJJv3G3EXEB9wFXAhUAKtFZLkxZlNMvzzg88DrR8PQZONNeyzMCrjtjuf+rf+YSabfxwvbakkzhjT13BVFSSKJeO6LgO3GmJ3GmCDwMLAsTr/vAD8EOgbRviGDM2EpIz2NwuzuEr+O5z5vQhEnjs7HJ6KZMoqiJJ1EVGgssM+zX2G3uYjIfGC8MeaJvk4kIjeIyBoRWVNTUzNgY5NJbLZMpr97INVLeppovF1RlKRzxC6miKQBPwO+3F9fY8w9xpiFxpiFZWVlR/rRx5TubBnrKyu2B1W9ue0AaWmimTKKoiSdRMR9PzDesz/ObnPIA2YDq0RkN7AYWD6cBlUXf28lv1y5Dej21C+ePQqA/Q3tUX19aaI57oqiJJ1ExH01ME1EJolIALgKWO4cNMY0GmNKjTHlxphy4DXgcmPMmqNicRI42NTB6t31QLe4f/acqYBVc8aLTz13RVGGAP1myxhjQiJyI/AU4APuNcZsFJHbgTXGmOV9n2F44UxUKsvL4G+fPc0Nzzj4RDCq7YqiJJmEyg8YY1YAK2Labuul7zlHbtbQIdYz9w6gzp9Q1KO/NZiq6q4oSnLR2jL9EInW9h7ZMbFopoyiKEMBFfd+CEUiUfux2TGxqLgrijIUUHHvh3CM657Rj+eeJiBaekBRlCSj4t4PseLurfwYD1+akKbirihKklFx7wevuKenSb81Y3xpaWhkRlGUZKPi3g8hj7j3N5gK4EvTipCKoiQfrXDVD+GBirtobRlFUZKPins/eD33/gZTwZmhql+roijJRcMy/RAOD8xzz8v0a/kBRVGSjop7P3jz3PvLlAH4wfvnHE1zFEVREkLFvR8ixuu59z2BCWB0QdbRNEdRFCUhNDjcDwPNllEURRkKqFr1Q8gTc89IICyjKIoyFFC16gdvKmSGX78uRVFSA1WrfogKy6jnrihKiqBq1Q8DncSkKIoyFEhIrURkqYhsEZHtInJLnOOfFpH1IrJWRF4SkZmDb+qx59l3q6hs7F4jVcVdUZRUod9USBHxAXcBFwIVwGoRWW6M2eTp9qAx5m67/+XAz4ClR8HeY4Yxhhvuf5OTxxe6bRqWURQlVUhErRYB240xO40xQeBhYJm3gzGmybObA8SsX5R6hCKGUMTQ0hly23RAVVGUVCERtRoL7PPsV9htUYjI50RkB/Aj4OZ4JxKRG0RkjYisqampORx7jwodXWHO/ckqXt5e67Z1ha2ZqZ2h7hmqYwuzj7ltiqIoh8OguaLGmLuMMVOArwHf6KXPPcaYhcaYhWVlZYP10UfMtqoWdtW28t0nNrttQVvU24NhAP722dO44azJSbFPURRloCQi7vuB8Z79cXZbbzwMXHEkRh1rnNBLbmb3EETQ9tzbuyxxzwmkaylfRVFShkTEfTUwTUQmiUgAuApY7u0gItM8u+8Btg2eiUcfV9wzusW9y56Z2mGLu46lKoqSSvSbLWOMCYnIjcBTgA+41xizUURuB9YYY5YDN4rIBUAXUA989GgaPdjUtwUByA50FwbrCkXH3H1ao11RlBQioaqQxpgVwIqYtts8258fZLuOOk0dXVzw0+f51YfmU9dqiXu05x6J6q812hVFSSWOW3e0uqmT6uZOdtS0uOLuXfw6GCPuGm9XFCWVOG7F3fHMg6GIK+5BT9qjdxvUc1cUJbVQcQ+buOLeFY6eh6Weu6IoqYSKeyjCIVvcO0PhHscddNFrRVFSieNWsZwsmK5whHpX3D1hmRhxV21XFCWVOG4lywm7eGPunV2esEyPmPtx+1UpipKCHLeK5Yh3S2fIncQUHZbRmLuiKKnL8SvudtilprnTbfOGYoLhcFR/zZZRFCWVOG7F3RHypo4uty06LNPtuYtE58AriqIMdY5fcbfDMs0dVkgm4EvrdUBVvXZFUVKN41bcnZi647nnZ6X3mgqp8XZFUVKN41jc7QFV23PPy/RHee5R4i4q7oqipBbHrbjHhmXyM9N7naGqnruiKKnG8SvuMYtx5GdFe+5eoU/XYu6KoqQYx61qxZYXyM/yE44YQm7NGY25K4qSuqi42+TbS+y5ZQlCmi2jKErqktBiHcOR2JK++Zl+wBL357dWcqCx3T2mnruiKKlGQp67iCwVkS0isl1Ebolz/EsisklE1onIShGZOPimDi6x5QXysyxxb2zv4rN/fosV6w+6x1TcFUVJNfoVdxHxAXcBlwAzgatFZGZMt7eBhcaYucCjwI8G29DBJrbqoyPuThExLyruiqKkGol47ouA7caYncaYIPAwsMzbwRjznDGmzd59DRg3uGYOPrFVH52Ye0NbT3HXmLuiKKlGIuI+Ftjn2a+w23rjE8CT8Q6IyA0iskZE1tTU1CRu5QA40NDODfevodWu9Ngbsdkw2QFL3Ovbunr09Wm5X0VRUoxBVS0RuRZYCPw43nFjzD3GmIXGmIVlZWWD+dEua/bU8+9NVWytau6znzdbJiM9jYx066tQz11RlOFAItky+4Hxnv1xdlsUInIB8N/A2caYztjjxwrHY3cmJ/VG0FP10Svuje3xPHcVd0VRUotEPPfVwDQRmSQiAeAqYLm3g4jMA34DXG6MqR58MxPHFfdg3+Lu9dwz/T4CtrjXq+euKMowoF9xN8aEgBuBp4DNwCPGmI0icruIXG53+zGQC/xVRNaKyPJeTnfUcVZVautH3L157pbn7gPix9y1lruiKKlGQpOYjDErgBUxbbd5ti8YZLsOG0fUB+K5Z6T7yPD3jLmnCUSMeu6KoqQewy4NpNtz7ztbJkrc/d4B1W7P3QnVaMxdUZRUY9iJuxNzb+tvQDUcO6BqhWW84h6xu6jnrihKqjEMxT2xsEzQs+pSpt9HVsAS99qW7kSfiK3umueuKEqqMexUqzXBAdWuGM89J+DD75Oomu4hW9zVc1cUJdUYfuIeTFTcI2T5LW89I92HiFCcE4jb1+dTcVcUJbUYfuLu5rn3P6Cak+GIu/U1FGX3Iu66hqqiKCnGMBR3y2Pvz3PvDEXIybAyQZ00SMdzz/RHfy0allEUJdUYhuLee/mBg40dzP6fp1hX0WB57naxMCdTxhH3kfmZUe/TVEhFUVKNYSXuxpg+Y+7bqptp6Qyx8UATXWFDbi+e+4i8DAAml+UAkK4xd0VRUoxhJe4dXRE3Nz2euNc0W2mOBxs7CEcM2RndA6rQLe4FWX4e+8wS/vixRYB67oqipB7Dag3VFk8N93gDqo6472+w1kd1Y+7p0Z57ViCdBROL3Vmu6ZrnrihKijGsVMsR40B6WlzP3ZmgVFFvLRqVG4gWdydbJttOkfT7rPY0zZZRFCXFGFbi7njuZbkZcWeo9ua5Z9piXuJ67ta+kyWjMXdFUVKNYSXuThpkWV4GbV1hjDFRx2tsz31/vSXuYwozGZWfydQRuQAUxYi7iFCU7afAXjxbURQlVRhWMXcnU6Y0N4NwxBAMR9he3cKsMQVAt+fuDLoWZgd47evnu+93PHcnLAOw/MYzKMmNP7lJURRlqDKsPPfKhg7A8sgBHn+nkvf88iWe32otxl3T3BmV+eKkQjqU5GZwyexRLJlS4raNL852F89WFEVJFRISdxFZKiJbRGS7iNwS5/hZIvKWiIRE5MrBNzMxntlcxbiiLGaOzgfgkTX7AHhi3QGCoQj1bV1MsXPXAc6YVhr1fl+a8OtrF7CwvPjYGa0oinIU6FfcRcQH3AVcAswErhaRmTHd9gLXAQ8OtoG94WTGNHd0YYxhZ00LL22rZemsUW7M/PVddQA8vamKqibLq582Ig+wJirFeu6KoijDhUTUbRGw3RizE0BEHgaWAZucDsaY3faxSLwTDDbfW7GZB17dw0WzRvL4ukpmjy3gnX0NAFwyZxTNHd057lcvGs9Db+zjy399B4CLZo1kX30b31k2+1iYqiiKkhQSEfexwD7PfgVw6uF8mIjcANwAMGHChMM5BY+vO8A9L+wkPU34x9oDTC7NYcvBJr5wwTRmjs5n/oQiQhHDndfMw+9L44ITR2IMPLx6H8tOHsN75oxm2cljD+uzFUVRUoVjGpcwxtwD3AOwcOFC00/3uBRlB7hw5kg+f/40nlhfyefPn0Z6mpDu644w+X3CZXPHuPvff98cPn7GJKaW5ZKmpQQURTkOSETc9wPjPfvj7LakcPrUUk6fag2Ezh5bkNB7RITpI/OOplmKoihDikSyZVYD00RkkogEgKuA5UfXLEVRFOVI6FfcjTEh4EbgKWAz8IgxZqOI3C4ilwOIyCkiUgF8APiNiGw8mkYriqIofZNQzN0YswJYEdN2m2d7NVa4RlEURRkCDKsZqoqiKIqFiruiKMowRMVdURRlGKLiriiKMgxRcVcURRmGSOyCFsfsg0VqgD2H8dZSoHaQzUkWei1DE72WoYlei8VEY0xZf52SJu6Hi4isMcYsTLYdg4Fey9BEr2VootcyMDQsoyiKMgxRcVcURRmGpKK435NsAwYRvZahiV7L0ESvZQCkXMxdURRF6Z9U9NwVRVGUflBxVxRFGYaklLiLyFIR2SIi20XklmTbM1BEZLeIrBeRtSKyxm4rFpGnRWSb/W9Rsu2Mh4jcKyLVIrLB0xbXdrH4pX2f1onI/ORZ3pNeruVbIrLfvjdrReRSz7Fb7WvZIiIXJ8fqnojIeKnGxskAAAOfSURBVBF5TkQ2ichGEfm83Z5y96WPa0nF+5IpIm+IyDv2tXzbbp8kIq/bNv/FXh8DEcmw97fbx8sHxRBjTEq8AB+wA5gMBIB3gJnJtmuA17AbKI1p+xFwi719C/DDZNvZi+1nAfOBDf3ZDlwKPAkIsBh4Pdn2J3At3wK+EqfvTPv/WgYwyf4/6Ev2Ndi2jQbm29t5wFbb3pS7L31cSyreFwFy7W0/8Lr9fT8CXGW33w18xt7+LHC3vX0V8JfBsCOVPPdFwHZjzE5jTBB4GFiWZJsGg2XAH+3tPwJXJNGWXjHGvADUxTT3Zvsy4H5j8RpQKCKjj42l/dPLtfTGMuBhY0ynMWYXsB3r/2LSMcZUGmPesrebsRbTGUsK3pc+rqU3hvJ9McaYFnvXb78McB7wqN0ee1+c+/UocL6IHPFiz6kk7mOBfZ79Cvq++UMRA/xbRN4UkRvstpHGmEp7+yAwMjmmHRa92Z6q9+pGO1xxryc8lhLXYj/Kz8PyElP6vsRcC6TgfRERn4isBaqBp7GeLBqMtbIdRNvrXot9vBEoOVIbUknchwNnGGPmA5cAnxORs7wHjfVclpK5qalsu82vgSnAyUAl8NPkmpM4IpILPAZ8wRjT5D2WavclzrWk5H0xxoSNMSdjrVC3CDjhWNuQSuK+Hxjv2R9nt6UMxpj99r/VwP/DuulVzqOx/W918iwcML3ZnnL3yhhTZf9BRoDf0v2IP6SvRUT8WGL4Z2PM3+zmlLwv8a4lVe+LgzGmAXgOWIIVBnOWNvXa616LfbwAOHSkn51K4r4amGaPOAewBh6WJ9mmhBGRHBHJc7aBi4ANWNfwUbvbR4F/JMfCw6I325cDH7GzMxYDjZ4wwZAkJvb8Xqx7A9a1XGVnNEwCpgFvHGv74mHHZX8PbDbG/MxzKOXuS2/XkqL3pUxECu3tLOBCrDGE54Ar7W6x98W5X1cCz9pPXEdGskeWBzgKfSnWKPoO4L+Tbc8AbZ+MNbr/DrDRsR8rtrYS2AY8AxQn29Ze7H8I67G4Cyte+InebMfKFrjLvk/rgYXJtj+Ba3nAtnWd/cc22tP/v+1r2QJckmz7PXadgRVyWQestV+XpuJ96eNaUvG+zAXetm3eANxmt0/G+gHaDvwVyLDbM+397fbxyYNhh5YfUBRFGYakUlhGURRFSRAVd0VRlGGIiruiKMowRMVdURRlGKLiriiKMgxRcVcURRmGqLgriqIMQ/4/xpkP3TrluYEAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "plt.figure()\n",
    "plt.plot(np.arange(1, niter + 1), loss2)\n",
    "plt.title('training loss')\n",
    "\n",
    "plt.figure()\n",
    "plt.plot(np.arange(1, niter + 1), accuracy_test)\n",
    "plt.title('validation accuracy')"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
