{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {
    "collapsed": false
   },
   "source": [
    "# Tutorial on self-normalizing networks on the MNIST data set: convolutional neural networks\n",
    "\n",
    "*Author:* Guenter Klambauer, 2017\n",
    "\n",
    "tested under Python 3.5 and Tensorflow 1.1 \n",
    "\n",
    "Derived from: [Aymeric Damien](https://github.com/aymericdamien/TensorFlow-Examples/) "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Extracting /tmp/data/train-images-idx3-ubyte.gz\n",
      "Extracting /tmp/data/train-labels-idx1-ubyte.gz\n",
      "Extracting /tmp/data/t10k-images-idx3-ubyte.gz\n",
      "Extracting /tmp/data/t10k-labels-idx1-ubyte.gz\n"
     ]
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "import numpy as np\n",
    "\n",
    "from __future__ import absolute_import, division, print_function\n",
    "import numbers\n",
    "from tensorflow.contrib import layers\n",
    "from tensorflow.python.framework import ops\n",
    "from tensorflow.python.framework import tensor_shape\n",
    "from tensorflow.python.framework import tensor_util\n",
    "from tensorflow.python.ops import math_ops\n",
    "from tensorflow.python.ops import random_ops\n",
    "from tensorflow.python.ops import array_ops\n",
    "from tensorflow.python.layers import utils\n",
    "\n",
    "from sklearn.preprocessing import StandardScaler\n",
    "from scipy.special import erf,erfc\n",
    "\n",
    "# Import MNIST data\n",
    "from tensorflow.examples.tutorials.mnist import input_data\n",
    "mnist = input_data.read_data_sets(\"/tmp/data/\", one_hot=True)\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### (1) Definition of scaled exponential linear units (SELUs)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def selu(x):\n",
    "    with ops.name_scope('elu') as scope:\n",
    "        alpha = 1.6732632423543772848170429916717\n",
    "        scale = 1.0507009873554804934193349852946\n",
    "        return scale*tf.where(x>=0.0, x, alpha*tf.nn.elu(x))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### (2) Definition of dropout variant for SNNs\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def dropout_selu(x, rate, alpha= -1.7580993408473766, fixedPointMean=0.0, fixedPointVar=1.0, \n",
    "                 noise_shape=None, seed=None, name=None, training=False):\n",
    "    \"\"\"Dropout to a value with rescaling.\"\"\"\n",
    "\n",
    "    def dropout_selu_impl(x, rate, alpha, noise_shape, seed, name):\n",
    "        keep_prob = 1.0 - rate\n",
    "        x = ops.convert_to_tensor(x, name=\"x\")\n",
    "        if isinstance(keep_prob, numbers.Real) and not 0 < keep_prob <= 1:\n",
    "            raise ValueError(\"keep_prob must be a scalar tensor or a float in the \"\n",
    "                                             \"range (0, 1], got %g\" % keep_prob)\n",
    "        keep_prob = ops.convert_to_tensor(keep_prob, dtype=x.dtype, name=\"keep_prob\")\n",
    "        keep_prob.get_shape().assert_is_compatible_with(tensor_shape.scalar())\n",
    "\n",
    "        alpha = ops.convert_to_tensor(alpha, dtype=x.dtype, name=\"alpha\")\n",
    "        keep_prob.get_shape().assert_is_compatible_with(tensor_shape.scalar())\n",
    "\n",
    "        if tensor_util.constant_value(keep_prob) == 1:\n",
    "            return x\n",
    "\n",
    "        noise_shape = noise_shape if noise_shape is not None else array_ops.shape(x)\n",
    "        random_tensor = keep_prob\n",
    "        random_tensor += random_ops.random_uniform(noise_shape, seed=seed, dtype=x.dtype)\n",
    "        binary_tensor = math_ops.floor(random_tensor)\n",
    "        ret = x * binary_tensor + alpha * (1-binary_tensor)\n",
    "\n",
    "        a = tf.sqrt(fixedPointVar / (keep_prob *((1-keep_prob) * tf.pow(alpha-fixedPointMean,2) + fixedPointVar)))\n",
    "\n",
    "        b = fixedPointMean - a * (keep_prob * fixedPointMean + (1 - keep_prob) * alpha)\n",
    "        ret = a * ret + b\n",
    "        ret.set_shape(x.get_shape())\n",
    "        return ret\n",
    "\n",
    "    with ops.name_scope(name, \"dropout\", [x]) as name:\n",
    "        return utils.smart_cond(training,\n",
    "            lambda: dropout_selu_impl(x, rate, alpha, noise_shape, seed, name),\n",
    "            lambda: array_ops.identity(x))\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### (3) Scale input to zero mean and unit variance"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "scaler = StandardScaler().fit(mnist.train.images)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# Parameters\n",
    "learning_rate = 0.025\n",
    "training_iters = 50\n",
    "batch_size = 128\n",
    "display_step = 1\n",
    "\n",
    "# Network Parameters\n",
    "n_input = 784 # MNIST data input (img shape: 28*28)\n",
    "n_classes = 10 # MNIST total classes (0-9 digits)\n",
    "keep_prob_ReLU = 0.5 # Dropout, probability to keep units\n",
    "dropout_prob_SNN = 0.05 # Dropout, probability to dropout units\n",
    "\n",
    "# tf Graph input\n",
    "x = tf.placeholder(tf.float32, [None, n_input])\n",
    "y = tf.placeholder(tf.float32, [None, n_classes])\n",
    "keep_prob = tf.placeholder(tf.float32) #dropout (keep probability for ReLU)\n",
    "dropout_prob =  tf.placeholder(tf.float32) #dropout (dropout probability for SNN)\n",
    "is_training = tf.placeholder(tf.bool)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# Create some wrappers for simplicity\n",
    "def conv2d(x, W, b, strides=1):\n",
    "    # Conv2D wrapper, with bias and relu activation\n",
    "    x = tf.nn.conv2d(x, W, strides=[1, strides, strides, 1], padding='SAME')\n",
    "    x = tf.nn.bias_add(x, b)\n",
    "    return tf.nn.relu(x)\n",
    "\n",
    "def conv2d_SNN(x, W, b, strides=1):\n",
    "    # Conv2D wrapper, with bias and relu activation\n",
    "    x = tf.nn.conv2d(x, W, strides=[1, strides, strides, 1], padding='SAME')\n",
    "    x = tf.nn.bias_add(x, b)\n",
    "    return selu(x)\n",
    "\n",
    "def maxpool2d(x, k=2):\n",
    "    # MaxPool2D wrapper\n",
    "    return tf.nn.max_pool(x, ksize=[1, k, k, 1], strides=[1, k, k, 1],\n",
    "                          padding='SAME')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# Create model\n",
    "def conv_net_ReLU(x, weights, biases, keep_prob):\n",
    "    # Reshape input picture\n",
    "    x = tf.reshape(x, shape=[-1, 28, 28, 1])\n",
    "\n",
    "    # Convolution Layer\n",
    "    conv1 = conv2d(x, weights['wc1'], biases['bc1'])\n",
    "    # Max Pooling (down-sampling)\n",
    "    conv1 = maxpool2d(conv1, k=2)\n",
    "\n",
    "    # Convolution Layer\n",
    "    conv2 = conv2d(conv1, weights['wc2'], biases['bc2'])\n",
    "    # Max Pooling (down-sampling)\n",
    "    conv2 = maxpool2d(conv2, k=2)\n",
    "\n",
    "    # Fully connected layer\n",
    "    # Reshape conv2 output to fit fully connected layer input\n",
    "    fc1 = tf.reshape(conv2, [-1, weights['wd1'].get_shape().as_list()[0]])\n",
    "    fc1 = tf.add(tf.matmul(fc1, weights['wd1']), biases['bd1'])\n",
    "    fc1 = tf.nn.relu(fc1)\n",
    "    \n",
    "    # Apply Dropout\n",
    "    fc1 = tf.nn.dropout(fc1, keep_prob)\n",
    "\n",
    "    # Output, class prediction\n",
    "    out = tf.add(tf.matmul(fc1, weights['out']), biases['out'])\n",
    "    return out"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# Create model\n",
    "def conv_net_SNN(x, weights, biases, dropout_prob, is_training):\n",
    "    # Reshape input picture\n",
    "    x = tf.reshape(x, shape=[-1, 28, 28, 1])\n",
    "\n",
    "    # Convolution Layer\n",
    "    conv1 = conv2d_SNN(x, weights['wc1'], biases['bc1'],)\n",
    "    # Max Pooling (down-sampling)\n",
    "    conv1 = maxpool2d(conv1, k=2)\n",
    "\n",
    "    # Convolution Layer\n",
    "    conv2 = conv2d_SNN(conv1, weights['wc2'], biases['bc2'])\n",
    "    # Max Pooling (down-sampling)\n",
    "    conv2 = maxpool2d(conv2, k=2)\n",
    "\n",
    "    # Fully connected layer\n",
    "    # Reshape conv2 output to fit fully connected layer input\n",
    "    fc1 = tf.reshape(conv2, [-1, weights['wd1'].get_shape().as_list()[0]])\n",
    "    fc1 = tf.add(tf.matmul(fc1, weights['wd1']), biases['bd1'])\n",
    "    fc1 = selu(fc1)\n",
    "    \n",
    "    # Apply Dropout\n",
    "    fc1 = dropout_selu(fc1, dropout_prob,training=is_training)\n",
    "\n",
    "    # Output, class prediction\n",
    "    out = tf.add(tf.matmul(fc1, weights['out']), biases['out'])\n",
    "    return out"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "# RELU: Store layers weight & bias\n",
    "## Improved with MSRA initialization\n",
    "\n",
    "weights = {\n",
    "    # 5x5 conv, 1 input, 32 outputs\n",
    "    'wc1': tf.Variable(tf.random_normal([5, 5, 1, 32],stddev=np.sqrt(2/25)) ),\n",
    "    # 5x5 conv, 32 inputs, 64 outputs\n",
    "    'wc2': tf.Variable(tf.random_normal([5, 5, 32, 64],stddev=np.sqrt(2/(25*32)))),\n",
    "    # fully connected, 7*7*64 inputs, 1024 outputs\n",
    "    'wd1': tf.Variable(tf.random_normal([7*7*64, 1024],stddev=np.sqrt(2/(7*7*64)))),\n",
    "    # 1024 inputs, 10 outputs (class prediction)\n",
    "    'out': tf.Variable(tf.random_normal([1024, n_classes],stddev=np.sqrt(2/(1024))))\n",
    "}\n",
    "\n",
    "biases = {\n",
    "    'bc1': tf.Variable(tf.random_normal([32],stddev=0)),\n",
    "    'bc2': tf.Variable(tf.random_normal([64],stddev=0)),\n",
    "    'bd1': tf.Variable(tf.random_normal([1024],stddev=0)),\n",
    "    'out': tf.Variable(tf.random_normal([n_classes],stddev=0))\n",
    "}"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### (4) Initialization with STDDEV of sqrt(1/n)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "# SNN: Store layers weight & bias\n",
    "weights2 = {\n",
    "    # 5x5 conv, 1 input, 32 outputs\n",
    "    'wc1': tf.Variable(tf.random_normal([5, 5, 1, 32],stddev=np.sqrt(1/25)) ),\n",
    "    # 5x5 conv, 32 inputs, 64 outputs\n",
    "    'wc2': tf.Variable(tf.random_normal([5, 5, 32, 64],stddev=np.sqrt(1/(25*32)))),\n",
    "    # fully connected, 7*7*64 inputs, 1024 outputs\n",
    "    'wd1': tf.Variable(tf.random_normal([7*7*64, 1024],stddev=np.sqrt(1/(7*7*64)))),\n",
    "    # 1024 inputs, 10 outputs (class prediction)\n",
    "    'out': tf.Variable(tf.random_normal([1024, n_classes],stddev=np.sqrt(1/(1024))))\n",
    "}\n",
    "\n",
    "biases2 = {\n",
    "    'bc1': tf.Variable(tf.random_normal([32],stddev=0)),\n",
    "    'bc2': tf.Variable(tf.random_normal([64],stddev=0)),\n",
    "    'bd1': tf.Variable(tf.random_normal([1024],stddev=0)),\n",
    "    'out': tf.Variable(tf.random_normal([n_classes],stddev=0))\n",
    "}\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "collapsed": false
   },
   "outputs": [],
   "source": [
    "# Construct model\n",
    "pred_ReLU = conv_net_ReLU(x, weights, biases, keep_prob)\n",
    "pred_SNN = conv_net_SNN(x, weights2, biases2, dropout_prob,is_training)\n",
    "\n",
    "# Define loss and optimizer\n",
    "cost_ReLU = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred_ReLU, labels=y))\n",
    "cost_SNN = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred_SNN, labels=y))\n",
    "\n",
    "optimizer_ReLU = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(cost_ReLU)\n",
    "optimizer_SNN = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(cost_SNN)\n",
    "\n",
    "# Evaluate ReLU model\n",
    "correct_pred_ReLU = tf.equal(tf.argmax(pred_ReLU, 1), tf.argmax(y, 1))\n",
    "accuracy_ReLU = tf.reduce_mean(tf.cast(correct_pred_ReLU, tf.float32))\n",
    "\n",
    "# Evaluate SNN model\n",
    "correct_pred_SNN = tf.equal(tf.argmax(pred_SNN, 1), tf.argmax(y, 1))\n",
    "accuracy_SNN = tf.reduce_mean(tf.cast(correct_pred_SNN, tf.float32))\n",
    "\n",
    "\n",
    "# Initializing the variables\n",
    "init = tf.global_variables_initializer()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "training_loss_protocol_ReLU = []\n",
    "training_loss_protocol_SNN = []"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {
    "collapsed": false
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "RELU: Nbr of updates: 1, Minibatch Loss= 2.008754, Training Accuracy= 0.35156\n",
      "SNN: Nbr of updates: 1, Minibatch Loss= 2.451197, Training Accuracy= 0.39844\n",
      "RELU: Nbr of updates: 2, Minibatch Loss= 1.732718, Training Accuracy= 0.37500\n",
      "SNN: Nbr of updates: 2, Minibatch Loss= 2.160526, Training Accuracy= 0.65625\n",
      "RELU: Nbr of updates: 3, Minibatch Loss= 1.752255, Training Accuracy= 0.39844\n",
      "SNN: Nbr of updates: 3, Minibatch Loss= 1.601260, Training Accuracy= 0.53125\n",
      "RELU: Nbr of updates: 4, Minibatch Loss= 1.605729, Training Accuracy= 0.53906\n",
      "SNN: Nbr of updates: 4, Minibatch Loss= 0.978341, Training Accuracy= 0.70312\n",
      "RELU: Nbr of updates: 5, Minibatch Loss= 1.555425, Training Accuracy= 0.51562\n",
      "SNN: Nbr of updates: 5, Minibatch Loss= 0.645711, Training Accuracy= 0.82031\n",
      "RELU: Nbr of updates: 6, Minibatch Loss= 1.313229, Training Accuracy= 0.67969\n",
      "SNN: Nbr of updates: 6, Minibatch Loss= 0.401476, Training Accuracy= 0.90625\n",
      "RELU: Nbr of updates: 7, Minibatch Loss= 1.203895, Training Accuracy= 0.77344\n",
      "SNN: Nbr of updates: 7, Minibatch Loss= 0.453578, Training Accuracy= 0.92188\n",
      "RELU: Nbr of updates: 8, Minibatch Loss= 1.089910, Training Accuracy= 0.85938\n",
      "SNN: Nbr of updates: 8, Minibatch Loss= 0.297481, Training Accuracy= 0.95312\n",
      "RELU: Nbr of updates: 9, Minibatch Loss= 1.017870, Training Accuracy= 0.79688\n",
      "SNN: Nbr of updates: 9, Minibatch Loss= 0.365949, Training Accuracy= 0.91406\n",
      "RELU: Nbr of updates: 10, Minibatch Loss= 1.070305, Training Accuracy= 0.76562\n",
      "SNN: Nbr of updates: 10, Minibatch Loss= 0.405422, Training Accuracy= 0.90625\n",
      "RELU: Nbr of updates: 11, Minibatch Loss= 0.985618, Training Accuracy= 0.79688\n",
      "SNN: Nbr of updates: 11, Minibatch Loss= 0.460914, Training Accuracy= 0.88281\n",
      "RELU: Nbr of updates: 12, Minibatch Loss= 0.875668, Training Accuracy= 0.72656\n",
      "SNN: Nbr of updates: 12, Minibatch Loss= 0.349492, Training Accuracy= 0.90625\n",
      "RELU: Nbr of updates: 13, Minibatch Loss= 1.041480, Training Accuracy= 0.76562\n",
      "SNN: Nbr of updates: 13, Minibatch Loss= 0.436600, Training Accuracy= 0.89062\n",
      "RELU: Nbr of updates: 14, Minibatch Loss= 0.836483, Training Accuracy= 0.83594\n",
      "SNN: Nbr of updates: 14, Minibatch Loss= 0.356240, Training Accuracy= 0.92188\n",
      "RELU: Nbr of updates: 15, Minibatch Loss= 0.824995, Training Accuracy= 0.81250\n",
      "SNN: Nbr of updates: 15, Minibatch Loss= 0.407508, Training Accuracy= 0.87500\n",
      "RELU: Nbr of updates: 16, Minibatch Loss= 0.739613, Training Accuracy= 0.85156\n",
      "SNN: Nbr of updates: 16, Minibatch Loss= 0.289174, Training Accuracy= 0.92969\n",
      "RELU: Nbr of updates: 17, Minibatch Loss= 0.782138, Training Accuracy= 0.80469\n",
      "SNN: Nbr of updates: 17, Minibatch Loss= 0.314916, Training Accuracy= 0.91406\n",
      "RELU: Nbr of updates: 18, Minibatch Loss= 0.687675, Training Accuracy= 0.85156\n",
      "SNN: Nbr of updates: 18, Minibatch Loss= 0.243602, Training Accuracy= 0.94531\n",
      "RELU: Nbr of updates: 19, Minibatch Loss= 0.647239, Training Accuracy= 0.82812\n",
      "SNN: Nbr of updates: 19, Minibatch Loss= 0.205704, Training Accuracy= 0.96094\n",
      "RELU: Nbr of updates: 20, Minibatch Loss= 0.673955, Training Accuracy= 0.78906\n",
      "SNN: Nbr of updates: 20, Minibatch Loss= 0.293074, Training Accuracy= 0.92188\n",
      "RELU: Nbr of updates: 21, Minibatch Loss= 0.643871, Training Accuracy= 0.84375\n",
      "SNN: Nbr of updates: 21, Minibatch Loss= 0.305403, Training Accuracy= 0.92969\n",
      "RELU: Nbr of updates: 22, Minibatch Loss= 0.577555, Training Accuracy= 0.91406\n",
      "SNN: Nbr of updates: 22, Minibatch Loss= 0.225528, Training Accuracy= 0.96875\n",
      "RELU: Nbr of updates: 23, Minibatch Loss= 0.539012, Training Accuracy= 0.90625\n",
      "SNN: Nbr of updates: 23, Minibatch Loss= 0.207042, Training Accuracy= 0.96094\n",
      "RELU: Nbr of updates: 24, Minibatch Loss= 0.595193, Training Accuracy= 0.85938\n",
      "SNN: Nbr of updates: 24, Minibatch Loss= 0.297265, Training Accuracy= 0.89844\n",
      "RELU: Nbr of updates: 25, Minibatch Loss= 0.610190, Training Accuracy= 0.83594\n",
      "SNN: Nbr of updates: 25, Minibatch Loss= 0.255643, Training Accuracy= 0.95312\n",
      "RELU: Nbr of updates: 26, Minibatch Loss= 0.708689, Training Accuracy= 0.69531\n",
      "SNN: Nbr of updates: 26, Minibatch Loss= 0.161673, Training Accuracy= 0.98438\n",
      "RELU: Nbr of updates: 27, Minibatch Loss= 0.702952, Training Accuracy= 0.79688\n",
      "SNN: Nbr of updates: 27, Minibatch Loss= 0.215801, Training Accuracy= 0.94531\n",
      "RELU: Nbr of updates: 28, Minibatch Loss= 0.470672, Training Accuracy= 0.88281\n",
      "SNN: Nbr of updates: 28, Minibatch Loss= 0.269345, Training Accuracy= 0.91406\n",
      "RELU: Nbr of updates: 29, Minibatch Loss= 0.554051, Training Accuracy= 0.83594\n",
      "SNN: Nbr of updates: 29, Minibatch Loss= 0.296727, Training Accuracy= 0.92188\n",
      "RELU: Nbr of updates: 30, Minibatch Loss= 0.504638, Training Accuracy= 0.84375\n",
      "SNN: Nbr of updates: 30, Minibatch Loss= 0.227030, Training Accuracy= 0.93750\n",
      "RELU: Nbr of updates: 31, Minibatch Loss= 0.566984, Training Accuracy= 0.85938\n",
      "SNN: Nbr of updates: 31, Minibatch Loss= 0.212100, Training Accuracy= 0.96875\n",
      "RELU: Nbr of updates: 32, Minibatch Loss= 0.505076, Training Accuracy= 0.86719\n",
      "SNN: Nbr of updates: 32, Minibatch Loss= 0.224962, Training Accuracy= 0.92188\n",
      "RELU: Nbr of updates: 33, Minibatch Loss= 0.487980, Training Accuracy= 0.87500\n",
      "SNN: Nbr of updates: 33, Minibatch Loss= 0.192593, Training Accuracy= 0.96094\n",
      "RELU: Nbr of updates: 34, Minibatch Loss= 0.377008, Training Accuracy= 0.93750\n",
      "SNN: Nbr of updates: 34, Minibatch Loss= 0.164228, Training Accuracy= 0.96094\n",
      "RELU: Nbr of updates: 35, Minibatch Loss= 0.468827, Training Accuracy= 0.89062\n",
      "SNN: Nbr of updates: 35, Minibatch Loss= 0.222637, Training Accuracy= 0.92969\n",
      "RELU: Nbr of updates: 36, Minibatch Loss= 0.456475, Training Accuracy= 0.90625\n",
      "SNN: Nbr of updates: 36, Minibatch Loss= 0.223814, Training Accuracy= 0.92969\n",
      "RELU: Nbr of updates: 37, Minibatch Loss= 0.521786, Training Accuracy= 0.83594\n",
      "SNN: Nbr of updates: 37, Minibatch Loss= 0.289590, Training Accuracy= 0.91406\n",
      "RELU: Nbr of updates: 38, Minibatch Loss= 0.512233, Training Accuracy= 0.80469\n",
      "SNN: Nbr of updates: 38, Minibatch Loss= 0.254801, Training Accuracy= 0.92188\n",
      "RELU: Nbr of updates: 39, Minibatch Loss= 0.462405, Training Accuracy= 0.84375\n",
      "SNN: Nbr of updates: 39, Minibatch Loss= 0.192647, Training Accuracy= 0.95312\n",
      "RELU: Nbr of updates: 40, Minibatch Loss= 0.398073, Training Accuracy= 0.89844\n",
      "SNN: Nbr of updates: 40, Minibatch Loss= 0.127224, Training Accuracy= 0.97656\n",
      "RELU: Nbr of updates: 41, Minibatch Loss= 0.454393, Training Accuracy= 0.85156\n",
      "SNN: Nbr of updates: 41, Minibatch Loss= 0.204394, Training Accuracy= 0.92969\n",
      "RELU: Nbr of updates: 42, Minibatch Loss= 0.455688, Training Accuracy= 0.88281\n",
      "SNN: Nbr of updates: 42, Minibatch Loss= 0.198009, Training Accuracy= 0.95312\n",
      "RELU: Nbr of updates: 43, Minibatch Loss= 0.402138, Training Accuracy= 0.89062\n",
      "SNN: Nbr of updates: 43, Minibatch Loss= 0.170651, Training Accuracy= 0.96094\n",
      "RELU: Nbr of updates: 44, Minibatch Loss= 0.430634, Training Accuracy= 0.89062\n",
      "SNN: Nbr of updates: 44, Minibatch Loss= 0.216837, Training Accuracy= 0.96094\n",
      "RELU: Nbr of updates: 45, Minibatch Loss= 0.389273, Training Accuracy= 0.91406\n",
      "SNN: Nbr of updates: 45, Minibatch Loss= 0.180505, Training Accuracy= 0.96094\n",
      "RELU: Nbr of updates: 46, Minibatch Loss= 0.409469, Training Accuracy= 0.91406\n",
      "SNN: Nbr of updates: 46, Minibatch Loss= 0.193067, Training Accuracy= 0.94531\n",
      "RELU: Nbr of updates: 47, Minibatch Loss= 0.368824, Training Accuracy= 0.89062\n",
      "SNN: Nbr of updates: 47, Minibatch Loss= 0.158238, Training Accuracy= 0.97656\n",
      "RELU: Nbr of updates: 48, Minibatch Loss= 0.388534, Training Accuracy= 0.89844\n",
      "SNN: Nbr of updates: 48, Minibatch Loss= 0.229685, Training Accuracy= 0.93750\n",
      "RELU: Nbr of updates: 49, Minibatch Loss= 0.321354, Training Accuracy= 0.94531\n",
      "SNN: Nbr of updates: 49, Minibatch Loss= 0.143143, Training Accuracy= 0.96875\n",
      "RELU: Nbr of updates: 50, Minibatch Loss= 0.356414, Training Accuracy= 0.90625\n",
      "SNN: Nbr of updates: 50, Minibatch Loss= 0.160477, Training Accuracy= 0.96094\n",
      "Optimization Finished!\n",
      "\n",
      "ReLU: Testing Accuracy: 0.859375\n",
      "SNN: Testing Accuracy: 0.916016\n"
     ]
    }
   ],
   "source": [
    "# Launch the graph\n",
    "gpu_options = tf.GPUOptions(allow_growth=True)\n",
    "with tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) as sess:\n",
    "    sess.run(init)\n",
    "    step = 0\n",
    "    # Keep training until reach max iterations\n",
    "    while step < training_iters:\n",
    "        batch_x, batch_y = mnist.train.next_batch(batch_size)\n",
    "        batch_x_norm = scaler.transform(batch_x)\n",
    "        # Run optimization op (backprop)\n",
    "        sess.run(optimizer_ReLU, feed_dict={x: batch_x, y: batch_y,\n",
    "                                       keep_prob: keep_prob_ReLU})\n",
    "        sess.run(optimizer_SNN, feed_dict={x: batch_x_norm, y: batch_y,\n",
    "                                       dropout_prob: dropout_prob_SNN,is_training:True})\n",
    "        \n",
    "        \n",
    "        if step % display_step == 0:\n",
    "            #batch_x, batch_y = mnist.test.next_batch(batch_size)\n",
    "            #batch_x_norm = scaler.transform(batch_x)\n",
    "            # Calculate batch loss and accuracy\n",
    "            loss_ReLU, acc_ReLU = sess.run([cost_ReLU, accuracy_ReLU], feed_dict={x: batch_x,\n",
    "                                                              y: batch_y,\n",
    "                                                              keep_prob: 1.0})\n",
    "            training_loss_protocol_ReLU.append(loss_ReLU)\n",
    "            \n",
    "            loss_SNN, acc_SNN = sess.run([cost_SNN, accuracy_SNN], feed_dict={x: batch_x_norm,\n",
    "                                                              y: batch_y,\n",
    "                                                              dropout_prob: 0.0, is_training:False})\n",
    "            training_loss_protocol_SNN.append(loss_SNN)\n",
    "            \n",
    "            print( \"RELU: Nbr of updates: \" + str(step+1) + \", Minibatch Loss= \" + \\\n",
    "                  \"{:.6f}\".format(loss_ReLU) + \", Training Accuracy= \" + \\\n",
    "                  \"{:.5f}\".format(acc_ReLU))\n",
    "            \n",
    "            print( \"SNN: Nbr of updates: \" + str(step+1) + \", Minibatch Loss= \" + \\\n",
    "                  \"{:.6f}\".format(loss_SNN) + \", Training Accuracy= \" + \\\n",
    "                  \"{:.5f}\".format(acc_SNN))\n",
    "        step += 1\n",
    "    print(\"Optimization Finished!\\n\")\n",
    "\n",
    "    # Calculate accuracy for 256 mnist test images\n",
    "    print(\"ReLU: Testing Accuracy:\", sess.run(accuracy_ReLU, feed_dict={x: mnist.test.images[:512],\n",
    "                                      y: mnist.test.labels[:512],\n",
    "                                      keep_prob: 1.0}))\n",
    "    print(\"SNN: Testing Accuracy:\", sess.run(accuracy_SNN, feed_dict={x: scaler.transform(mnist.test.images[:512]),\n",
    "                                      y: mnist.test.labels[:512],\n",
    "                                      dropout_prob: 0.0, is_training:False}))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "collapsed": false,
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAagAAAEYCAYAAAAJeGK1AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xdc1dX/wPHXh8uSvZUpIA4EGYLg1hwNZ2ppWjkaNixt\n72Fl9atvS22YZVpqWo5Ss+neC8W9ARUXAoLIHp/fHwdcrMvlXi4XzvPx4HHz3s94Y8Wbcz7v8z6K\nqqpIkiRJUn1jZuwAJEmSJKkiMkFJkiRJ9ZJMUJIkSVK9JBOUJEmSVC/JBCVJkiTVSzJBSZIkSfWS\nTFCSJElSvSQTlCRJklQvyQQlSZIk1Uvmxg6gNtzc3FR/f39jhyFJkiTVQFxcXKqqqu7VHWfSCcrf\n359du3YZOwxJkiSpBhRFOaXNcXKKT5IkSaqXZIKSJEmS6iWZoCRJkqR6yaSfQUmS1PAVFhaSnJxM\nXl6esUORasja2hofHx8sLCx0Ol8mKEmS6rXk5GTs7e3x9/dHURRjhyNpSVVV0tLSSE5OJiAgQKdr\nyCk+SZLqtby8PFxdXWVyMjGKouDq6lqrka9MUJIk1XsyOZmm2v57a9wJ6tByyEk3dhSSJElSBRpv\ngso4DYsfgt+fBFU1djSSJNVjdnZ2Br+Hv78/7dq1IywsjB49enDqVPVrWf39/UlNTb3pvcmTJ/PJ\nJ59Ue1yZv/76i+joaNq2bUtkZCTPP//8tevY2NiQkpJy7dgb/x4URbl2LMAnn3zC5MmTq425Jhpv\ngnLygzveh2N/wZbpxo5GkiSJtWvXsm/fPnr27MmUKVMMfr8DBw7w1FNPMW/ePA4dOsSuXbsICgq6\n9rmbmxuffvpphedaWVmxdOnSShOfPjTuKr6Y8XBqM6yaDL4x4NfR2BFJklSFd1Yc5NC5K3q9Zlsv\nB94eGFLj85KSknjooYdITU3F3d2d2bNn4+fnx6JFi3jnnXfQaDQ4OjqyYcMGDh48yLhx4ygoKKCk\npIQlS5bQsmXLSq/dqVMnpk2bdu3P8+bNY9q0aRQUFBAbG8vXX3+NRqPR6fu90ccff8zrr79OmzZt\nANBoNDzxxBPXPn/ooYeYM2cOL7/8Mi4uLjeda25uzvjx4/n88895//33ax1LRRrvCApAUWDQdDGa\nWjQOsg33m4AkSQ3L008/zZgxY9i3bx/3338/EydOBODdd9/ln3/+Ye/evSxfvhyAGTNmMGnSJOLj\n49m1axc+Pj5VXvvvv//m7rvvBuDw4cP88ssvbN68mfj4eDQaDfPnz9fL93DgwAGioqIq/dzOzo6H\nHnqIqVOnVvj5hAkTmD9/PpmZmXqJ51aNewQFYO0Iw3+E7/vC0vFw/2Iwa9x5W5LqK11GOoaydetW\nli5dCsCDDz7ISy+9BECXLl0YO3Ysw4cPZ+jQoYAYEb3//vskJyczdOjQSkdPt912G+np6djZ2fHe\ne+8BsHr1auLi4ujQoQMAubm5eHh4VBpXZZVzulbUTZw4kYiICF544YVynzk4ODB69GimTZtGkyZN\ndLp+VeRPYgDPcLjrIzi5GjZVPN8qSZKkjRkzZjBlyhTOnDlDVFQUaWlpjBo1iuXLl9OkSRP69evH\nmjVrKjx37dq1nDp1ioiICN5++21ALHgdM2YM8fHxxMfHc/To0SqLEVxdXbl8+fJN72VlZeHk5MRX\nX31FREQEERERnDt3jpCQEOLi4qr8fpycnBg1ahRfffVVhZ8/88wzzJo1i+zs7CqvowuZoMpEjYV2\nw2HtB5C4wdjRSJJUz3Xu3JmFCxcCMH/+fLp16wbAyZMniY2N5d1338Xd3Z0zZ86QkJBAYGAgEydO\nZPDgwezbt6/S65qbm/PFF1/w008/kZ6eTu/evVm8ePG1arr09PQqK/y6d+/O8uXLycrKAmDp0qWE\nh4ej0WiYMGHCtUTn5eXFiy++yAcffMCxY8cAKCkpYcaMGeWu+dxzz/Htt99SVFRU7jMXFxeGDx/O\nrFmztPyb055MUGUUBQZ8Dq5BsPhhyLpo7IgkSaoncnJy8PHxufb12WefMX36dGbPnk1YWBhz5869\n9pzmxRdfpF27doSGhtK5c2fCw8P59ddfCQ0NJSIiggMHDjB69Ogq7+fp6cnIkSP56quvaNu2LVOm\nTOH2228nLCyMvn37cv78+WvHhoWFXYvrueeeIywsjKeeeoquXbsSERHBjBkz+P777yu8T1hYGF98\n8QUjR44kODiY0NBQEhISyh3n5ubGkCFDyM/Pr/A6zz//vEGq+RTVhNcARUdHq3rfsPDiIfiuF/hE\nw+hlYFb7ShlJknR3+PBhgoODjR2GpKOK/v0pihKnqmp0defKEdStmraFfv+DpI1weIWxo5EkSWq0\nZIKqSMQosHGTCUqSJMmIZIKqiJkGWt8Fx/6BoornXCVJkiTDkgmqMsGDoCALEtYbOxJJkqRGqVEn\nqBMpV8krLK74w8AeYGkPR+Q0nyRJkjE02gR1NiOXflM3Mm318YoPMLeCVrfDkZVQUkkSkyRJkgzG\nJBOUoigDFUWZWZv+T95OTRgc4cW3GxI4cLaS6wQPhJw0OL1V5/tIkmT66mK7jR9++OHadhuhoaEs\nW7YMgLFjx+Lt7X1tDVJqair+/v6AaFirKArTp1/fkeGpp55izpw5Bo+3LphkglJVdYWqquMdHR1r\ndZ03+rfF2caSl5fso6i4pPwBQX1BYwWH/6jVfSRJkqqSnJzM+++/z6ZNm9i3bx/btm0jLCzs2uca\njYYffvihwnM9PDyYOnUqBQUFdRVunWnUzWIdbSx4b3AIT8zfzXcbE3miZ4ubD7Cygxa9RLn5nR+K\nbhOSJBnPX6/Ahf36vWazdnDX/9X4NH1ut5GSkoK9vf21kZqdnd1No7ZnnnmGzz//nEcffbRcHO7u\n7nTp0oUff/yxws9NmUmOoPTprnae3BnSjM9XHSPh0tXyBwQPhCvJcG5P3QcnSVK9pc/tNsLDw2na\ntCkBAQGMGzeOFStuLs7y8/Oja9euzJ07t8JYXn75ZT755BOKixvW8/JGPYIq8+7gELZ8lsorS/ez\n8NGOmJndMFJqfRcoGjGK8m5vvCAlSdJppGMo+txuQ6PR8Pfff7Nz505Wr17Ns88+S1xc3E1dy199\n9VUGDx5M//79y8USGBhIbGwsP//8s4G+W+No9CMoAA8Ha97o35Ydien8vOP0zR/auIB/Fzgin0NJ\nklQ9XbfbUBSFmJgYXn31VRYuXMiSJUtu+rxly5ZERETw66+/Vnjf1157jY8++ghT7q96K5mgSt0b\n7UOXIFf+768jnM/MvfnD4EGQegwuHTVOcJIk1Tv63G7j3Llz7N69+9qf4+Pjad68ebl7vv7663zy\nyScVxtOmTRvatm1bbnrQlMkEVUpRFD4cEkZRSQlv/Hbg5t9C2pQOqQ8vN05wkiQZlaG32ygsLOSF\nF16gTZs2RERE8Msvv1S4zXpISAjt21f+qOH1118nOTlZv9+8EcntNm7x/cYEpqw8zLSRkQwK9wLE\njpbFM3tTWFjAlt5L0Jgp9GjlrvMWypIkaU9ut2HaarPdhiySuMW4LgGs2Heet5YdYOnuZM5l5HIu\nI4/7i1rxqsUC3vrxL87izv2xfrw7OBSNmUxSkiRJhiCn+G6hMVP4eFgYzjaWpF7Nx9/VlnuifGjR\nfQQAv/ZI4/EeLZi//TRPL9hNflHDKuuUJEmqL+QIqgKtm9mz9oWet7wbAifb4n1hNa+Mex43O0um\nrDxMRs5OZo6Oxs5K/lVKkqGoqiqn1E1QbR8hyRFUTQQPhFNb4GoKj3QL5LPh4exITOe+mVtJvSr3\njZIkQ7C2tiYtLa1BlU83BqqqkpaWhrW1tc7XkL/210TwQFj/ERz9E6LGMrS9D842ljwxP457vtnC\n3Idj8XWxMXaUktSg+Pj4kJyczKVLl4wdilRD1tbW5bpm1ISs4qsJVYWp4eDWCh5YfO3tuFOXeWjO\nTqzMzZgzLoa2Xg51F5MkSZKJ0baKT07x1YSiiFFUwjrIv963L6q5M4se74SZotB/+kaGz9jKnM2J\nXLySZ7xYJUmSTJxMUDUV0B1KCst1VG7V1J7lT3Xhmd6tyMgtYPKKQ3T8cLVMVpIkSTqSU3w1deU8\nfNYG7vwIOj5e6WEnUrJYue8Cf+4/z9GLWSgKjO8eyKt3yQWHkiQ1bnKhrqHYNwNbDzi/t8rDgjzs\nmdTHnkl9WnIiJYsvVh1n5oYEBoZ5Eepdu40WJUmSGgM5xVdTigKe4XBhX/XHlgrysOf9Ie1wtrFk\nyspDslxWkiRJCzJB6cIzDFIOQ6H2z5Ucm1jwbJ+WbEtI599DFw0YnCRJUsMgE5QuPMNBLYaUgzU6\nbWSMH0Eednz452EKikoMFJwkSVLDIBOULjzDxet57af5AMw1ZrzeP5iktBx+2pqk97AkSZIaEpmg\ndOHUHKwdqy2UqMhtrT3o3sqdaauPczm7wADBSZIkNQwyQelCUaBZmE4JCuCN/sFczS9i6urjeg5M\nkiSp4ZAJSlee4XDxIBQX1vjUVk3tGRnjx9xtpziRcrX6EyRJkhohmaB05RkOxfmQekyn05/t2wob\nCw0f/nlYz4FJkiQ1DDJB6epaoYRu03xudlZM6BXE6iMpbDqeqsfAJEmSGgaZoHTlGgQWNjWu5LvR\n2M7++Lo0YcrKQxSXyMW7kiRJN5IJSldmGmgaqvMICsDaQsMrdwZz5EIWM9af1GNwkiRJpk8mqNoo\na3lUovui237tmtE/zJP//XOUj/8+ItsgSZIklZIJqjY8w6HgKlxO1PkSiqIw7b5IRsb48fW6k7yy\nZD9FxbLLhCRJkuxmXhueYeL1fDy4ttD5MhozhQ+GhOJuZ8m0NSdIyy7gy1GRWFto9BSoJEmS6ZEj\nqNpwDwYzi1o9hyqjKArP3d6adwaFsPrIRR6ctZ3MnJqvsZIkSWooZIKqDXNLaNq2VpV8txrT2Z/p\nIyOJP5PB8G+3yp14JUlqtGSCqq2ylkd6LG4YEObF7LExJF/OYejXWziVlq23a0uSJJkKmaBqyzMc\nctMhM1mvl+3a0o0F4zuSXVDEqO+2czYjV6/XlyRJqu9kgqotzwjxWoMddrUV5uPEvIdjuZJXyP3f\nbSNFTvdJktSIyARVW01DQDHTS6FERUK9HZkzLoaUrHzu/347aVfzDXIfSZKk+kYmqNqytAG3VgZL\nUABRzZ2ZNaYDp9NzeHDWDlndJ0lSoyATlD54huu1kq8inVq4MnN0NCdSrjJ69g6y8mSSkiSpYZMJ\nSh+ahUHWObiaYtDb9GjlzpejIjlwNpOH5+wit6DYoPeTJEkyJpmg9OHa1huGHUUB3B7SjC9GRLDr\nVDqP/rSLK3IkJUlSAyUTlD40aydeLxjuOdSNBoZ78fE94WxNSKPf1I3EnUrX+tyLV/JIyZLVgJIk\n1X8yQelDEydw9jdoocSt7ony4dfHOqEocO+MrXz+37Eqm8xeyspn8vKDdPtoLX0/28D2hLQ6i1WS\nJEkXMkHpi2d4nSYoENV9f07sxt0R3kxdfZwRM7dxJj3npmMycwv55J+j9PjfWuZuO8WQSG9c7Sx5\ncNYOft9zttYxlJSo7Dl9udbXkSRJulW9SVCKotgqivKjoijfKYpyv7HjqTHPcLicBLkZdXpbe2sL\nPhsRwdT7Ijh2IYt+UzeyLP4suQXFfLPuJN0/XsuXa0/QO7gpq57rwUf3hLH0ic5E+jnxzC/xTFt9\nvFZ7UH2/KYEhX29h3VHDFohIktT4GDRBKYryg6IoKYqiHLjl/TsVRTmqKMoJRVFeKX17KLBYVdVH\ngUGGjMsgmpUWSlzYb5TbD47w5s9J3WjdzJ5JC+OJeX8VH/19hPZ+Tqyc2JXpIyMJcLMFwMnGkrkP\nxzI00pvP/jvGi4v3UVBU8z2oLmcXMH3NCQB+2Jykz29HkiTJ4PtBzQG+BH4qe0NRFA3wFdAXSAZ2\nKoqyHPAByn66m1799LW9ofZCQDejhODrYsPC8R35Zt1Jdp++zJO3BdHB36XCYy3Nzfh0eDh+rjZ8\nseo45zJy+eaBKBybWGh9v+lrTpCdX8TgCC+WxZ/jREoWQR72+vp2JElq5Aw6glJVdQNwa4lZDHBC\nVdUEVVULgIXAYESy8qkuLkVRxiuKsktRlF2XLl0yRNi6sfMAey+D9OSrCXONGU/3bsnscTGVJqcy\niqLwTJ9WfD4inJ1J6Qz7Zku5Z1iVOZWWzdxtSQyP9uWtAW2xNDdjthxFSZKkR8Z4BuUNnLnhz8ml\n7y0FhimK8g2worKTVVWdqapqtKqq0e7u7oaNtKY8w+BcvLGjqLEhkT7MfTiWlCt5jJuzU6suFR//\nfRRzMzOe69sKVzsr7o7wYsnuZDJyCuogYkmSGoN6UyShqmq2qqrjVFV9QlXV+caORyfeUZB6rM4L\nJfShY6Ar3z4YTWJqNs/+speSksoLJ3afvszK/ed5tHsgHg7WAIzrEkBeYQkLd56p9DxJkqSaMEaC\nOgv43vBnn9L3TJ9vLKBC8k5jR6KTTi1ceWtAW1YdvsgXq49XeIyqqnyw8jBudlY81j3w2vvBng50\nbuHKj1uSKKxiPZYkSZK2jJGgdgItFUUJUBTFErgPWG6EOPTPOwoUDZzeZuxIdDa6U3PujfJh2urj\n/H3gfLnP/zl4kV2nLvNs35bYWt1cYzOuSwDnM/P45+CFugpXkqQGzNBl5guArUBrRVGSFUV5WFXV\nIuAp4B/gMPCrqqoHDRlHnbGyg2ahcGa7sSPRmaIoTBkSSoSvE8/9upejF7KufVZYXMJHfx8hyMOO\nEdG+5c7t1caD5q42slhCkiS9MHQV30hVVT1VVbVQVdVHVdVZpe//qapqK1VVW6iq+r4hY6hzvh3h\nbBwUm24TVytzDd8+GIWtlTmP/rTrWuHDgh2nSUzN5pU722CuKf+fjsZMYUwnf+JOXSb+jOk9h5Mk\nqX6pN0USDYZfLBTmGG3Brr40dbBmxgNRXMjM4+kFe8jIKeCLVceJDXChd7BHpefdG+2DnZU5szcn\n1mG0kiQ1RCaZoBRFGagoyszMzExjh1Keb0fxasLTfGWimjvz3t0hbDyeyqAvN5OeXcDr/YNRFKXS\nc+ytLRge7cvKfee5eEV2TZckSXcmmaBUVV2hqup4R0dHY4dSnqM3OPqadKHEjUZ08GN0p+acTs9h\nULgXYT5O1Z4ztrM/xarK3K2n6iBCSZIaKkO3OmqcfGPh1GZQVahitGEq3hzQlpZN7enfzlOr4/1c\nbegT3JSfd5zmqV5BWFtoDByhJEkNkUmOoOo9v46QdR4yThs7Er2w0JjxYMfmuNhaan3OuC7+pGcX\nsCy+YSxxkySp7skRlCH4xorXM9vBublxYzGSToGutGlmz9RVxzmVloO/my0Bbrb4u9riZmdZ5XMs\nSZIk0CJBKYryMTAFyAX+BsKAZ1VVnWfg2ExX0xCwtBMJKmy4saMxCkVReHNAW95cdoCZGxIouqF1\nkr2VOc3dbLgr1JMne7aQyUqSpAppM4K6XVXVlxRFGQIkIfZt2gDIBFUZMw34RMNp06/kq40uQW6s\neb4nRcUlnM3IJTE1m6TUbJLScjhwNpP//XMUC43C+O4tjB2qJEn1kDYJquyY/sAiVVUz5W+8WvDt\nCBs+hrwrYO1g7GiMylxjRnNXW5q72kJr8V5JicrTC/fwwZ9H8HJqwoAwL+MGKUlSvaNNkcQfiqIc\nAaKA1YqiuANGXeBSr9dBlfGLBbXEZBvHGpqZmcKn94bTwd+Z537Zy47EW7cNkySpsas2Qamq+grQ\nGYhWVbUQyEZsMGg09XodVBmfDqCYNYgFu4ZibaHhu9HR+Lg04dGfdnHy0tVKj1VVlRV7z9Fv6kZW\n7ivfxFaSpIan2gSlKMq9QKGqqsWKoryBePYk52OqY2UviiUayIJdQ3GysWTO2BgsNApjZ+/gUlZ+\nuWMOnbvCiJnbeHrBHk5eusqLi/dWmcwkSWoYtJnie1NV1SxFUboCfYBZwDeGDauB8O0IybuguMjY\nkdRrfq42zBrTgUtZ+Tzy405yCsTf1+XsAt74fT8Dpm/kRMpVPhzajjUv9MTK3IwJ83eTV1hs5Mgl\nSTIkbRJU2U+B/sBMVVVXAtqv2GzM/DpCYTZcPGDsSOq9cF8npo9sz/6zmUxcEM/crUn0/GQdC3ac\nYXQnf9Y+35ORMX54OzXh0+HhHLmQxZSVh4wdtiRJBqRNgjqrKMq3wAjgT0VRrLQ8T7q2YHeHceMw\nEX3bNmXyoBBWHb7Im8sOEuLlwJ8TuzF5UAiONhbXjuvVpinjuwcyb9tp+TxKkhowbcrMhwN3Ap+o\nqpqhKIon8KJhw2ognHzBwRvObIPY8caOxiSM7uSPtYUGxyYW3N62aaWLeF+8ozU7EtN5Zck+2nk7\n4udqU8eRSpJkaNpU8eUAJ4E7FEV5CvBQVfVfg0fWUPjGNPoFuzU1PNqXO0KaVdlhwkJjxvSRkSgK\nPLVgNwVFJXUYoSRJdUGbVkeTgEeBpaVvzVMUZaaqqtMNGllD4dsRDv4Gmcng6GPsaBoUXxcbPr4n\nnMfnxfHR30d4c0BbY4ekd6qq8tpv+9mRmI6rrRWudpbiy9YKNztL3Oys6NrSDXtri+ovJkkmRpsp\nvoeBWFVVswEURfkI2ArIBKUNv9LnUKe3Qbt7jBtLA3RnaDPGdvZn1qZEOga60rdtU2OHpFd/HbjA\ngh1niPF3QVHgeMpVtiXkczmn8Noxj3YL4PX+DS85S5I2CUrheiUfpf9s1F5HiqIMBAYGBQUZMwzt\nNG0HFrZiwa5MUAbxar827DqVzguL9vLd6GhiAlyMHZJeZOYU8vZyUSzy86OxmGuuz8gXFZeQnlPA\nk/N2szUhzYhRSpLhaFONNxvYrijKZEVRJgPbEGuhjMYkOkmU0ZiDT5RcsGtAVuYavhzZHntrc4Z/\nu5VXl+4nM7ew+hOBcxm5nEipn4t+P/zrMOnZBXw0LOym5ASiv6GHvTWdW7hy6NwVsvK0+34lyZRo\nUyTxGTAOSC/9Gqeq6heGDqxB8e0o1kLlZxk7kgbL382Wf5/tzqPdAvhl52n6fLaelfvOo6pquWNV\nVWXziVTG/7SLrh+toc9n6xn9ww6216ORyLaENBbuPMPDXQMI9a78F7EOAS6UqLD7dEYdRidJdaPS\nKT5FUW6cJ0kq/br2maqqsruntsoax56Ng8Cexo6mwbKxNOf1/m0ZHOHNK0v3MeHn3fQJ9uDdwaF4\nOTXhan4RS3cn89PWU5xIuYqLrSVP9GyBjaU5P2xKZMTMbXTwd+bJ24Lo2crdaPtU5RUW8+rS/fi5\n2PBsn1ZVHtvezxmNmcLOxHR6tHKvowglqW5U9QwqDlC5/ryp7FdRpfSfAw0YV8Pi0wFQRLl5YE8j\nB9PwhXo78vuTXZi9OYnP/jtG38/W07dtU1YdTuFqfhFhPo58em84/cM8sbbQAPBQFzHymrkhgXGz\ndxLi5cCE24K4I6QZGrO6TVTT1xwnMTWbuQ/H0MRSU+WxtlbmhHg5sCNJ/r4oNTyVJihVVQPqMpAG\nzdpRNI6Vnc3rjLnGjEe7B3JnaDPe+P0Af+6/QP8wT0Z3ak6kn3O545tYahjbJYBRsc35fc9Zvll/\nkifn76Zna3e+Gx2NhaZumqccPn+Fb9cnMKy9D91aajci6uDvwtxtp8gvKsbKvOqEJkmmRJsqPkkf\nvCLg6F+gqiA3fKwzvi42/PhQDCUlKmZajIQszc0Y3sGXYVE+zNmSxHt/HOL13/bz0bAwg0/5FZeo\nvLJkH45NLHijf7DW53Xwd2HWpkT2J2cS7d8wKhglCWRPvbrjFQk5aZB5xtiRNEraJKcbacwUHu4a\nwMReQfy6K5mpq48bKLLrftySxN7kTN4a2BZnW+37MXfwFyNCOc0nNTQyQdUVz0jxei7euHFINfJs\n31YMa+/DF6uO8+suw/1ykZSazSf/HqVna3cGhddsuzVXOytauNuyU+5KLDUw2rQ6qmjOIKt0d11J\nW01DwMwczu2BtoOMHY2kJUVR+HBoO1Ky8nht6X6aOVjTXQ/Vclfzi9iRmMaWE2lsPpnG4fNXsLHU\nMOXuUJ2mEmMCXPhj33mKS9Q6L+qQJEPR5hnUbsAXuIyo4HMCLiiKchF4VFXVOAPG13BYWINHMJyX\nIyhTY2luxtf3t2f4t9t4Yl4cvz7eiRCvmi8ST88uYM7mRDafTGPvmQyKSlQszc2I8nPmhdtbcWeo\nJz7OunVl7+DvwoIdZzh6IYu2Xg46XUOS6httEtR/wGJVVf8BUBTldmAYosPE10Cs4cKrmEm1OrqR\nVyQcXiELJUyQvbUFs8d2YMjXmxk3eye/TeiCt1MTrc/PyClg1HfbOHYxizAfJ8Z3D6RLkBtRzZ2v\nlbrXRofS4oidSekyQUkNhjbPoDqWJSeA0q02Oqmqug2wMlhkVTCpVkc38oyA3MuQcdrYkUg6aOZo\nzZxxMeQWFjP2hx1k5BRodd6VvEJG/7CDhEvZzBkXw+8TuvDSnW3oEuSml+QE4OPcBE9Ha1koITUo\n2iSo84qivKwoSvPSr5eAi4qiaAC5CU9NeJUVSuwxbhySzlo3s+fbB6NISstm4Jeb2H36cpXHZ+cX\nMW72Tg6du8LX97fXy/OriiiKQgd/F3YmplfY3kmSTJE2CWoU4AP8XvrlV/qeBrHbrqStpiFgZiGf\nQ5m4zi3cWDi+IyUlcO+MrXy19gTFJeWTQl5hMY/8uIs9py8zbWQkfQy8FUiHABdSsvI5nZ6j92sX\nFZfw5Pw4Hp6zU24OKdUZbZrFpqqq+rSqqpGlX0+pqnpJVdUCVVVP1EWQDYa5FTRtK0dQDUBUcxf+\nnNSNu0Kb8b9/jvLA99u5kJl37fP8omIemxvHtsQ0Ph0eTr92ngaPKab0OdSOasrNi4pLiDt1uUYj\nrf/76wh/7r/A6iMpvL38gBylSXWi2gSlKEorRVFmKoryr6Ioa8q+6iK4BskzQqyFkv+DmzzHJhZM\nHxnJx/eEsTc5gzunbuDfgxcoLC7hqZ/3sP7YJT4c0o4hkXWzk3JLDzscm1iws5rnUJ/9d4xh32zh\n3T8OaZVOugtGAAAgAElEQVRoFu06w/ebEhnb2Z8Jt7VgwY4z/LglSU9RS1LltKniWwTMAL7n5o0L\nJV14RcLuH+FyErjIdoemTlEUhkf7Et3cmYkL9zB+bhwtPew4nnKVdwaFcF+MX53FYmam0MHfmZ1J\nlT8XS0rN5vuNiXg5WjN7cxKFxSW8Oyi00k4bcacu8/pvB+ga5MYb/YMxUxSOXbzKeysP08LDTut+\ngZKkC22eQRWpqvqNqqo7VFWNK/syeGQNlVeEeJXPoRqUQHc7ljzRmUe7BXDi0lVevasNYzr713kc\nHfxdSEzNJiUrr8LPp6w8hIVG4fcJXXisRyDztp3mtd/2U1LBM7Tzmbk8NjcOTydrvhwVibnGDDMz\nhc9HRNDSw44J83eTcKl+bvYoNQzaJKgViqI8qSiKp6IoLmVfBo+sofJoCxpL+RyqAbIy1/B6/7Yc\nfOcOHuvRwigxdCjd7n5XBaOotUdTWHU4hYm9W+LhYM0rd7bh6V5BLNx5hpeW7Lup0CO3oJhHf9pF\nXmEx34+Oxsnmem9AOytzvhsdjbnGjEd+2qX17sV1obhErbBgRTJN2kzxjSl9ffGG9+R+ULoytxJJ\nSvbka7BsLI23SUColyPWFmbsSEy/qTCjoKiE91YcItDNlnFdxNSyoig8f3trzM3M+HzVMQqLS/j0\n3nA0ZgovLt7LwXNX+H50NC2b2pe7j6+LDd/c354HZm3n6QV7+GFMdLlt6eva1fwiBn+5ifOZebT1\ndCDEy4EQb0dCvRxp2dSuzrZMkfSn2v+T5L5QBuAVCQeXyo4Skt5ZmpsR6etcrlBizpZEElKzmT2u\nA5bmN/+gntSnJeYahf/9c5SiYpVWTe35Y995Xr6zDb2DKy+Njw105b3BobyydD8f/nWENwe0Ncj3\npK23lx0kMTWbe6N8OXnpKovikvlx6ykALDVmtPG05+XSBdKSaahqy/deqqquURRlaEWfq6q61HBh\nNXBeERA3Gy4ngosciEr61SHAhS/XHCcrrxB7awtSruQxddVxerfx4LbWHhWeM+G2ICw1Zrz/52FW\n7j/P4AgvHu9R/X+b98X4cfRiFrM2JZJfVMx9HfwI8XIw+N5Zt1qx9xxLdiczsVcQz93eGoCSEpXE\ntGwOnrvCwbOZ/HPwAo/8uIt5j8QQ1Vw+pTAFVY2gegBrgIEVfKYCMkHp6saOEjJBSXoW4+9CiSoq\n8Hq29uCjv49SWKxWO8J5tHsgtlbmbEtIq9EGja/3CyYnv5hfdp5h3rbTtPSwY0h7bwZHeNeoX6Gu\nki/n8Npv+4n0c2Ji75bX3jczU2jhbkcLdzsGhXvxSLdAhn+7lbGzd/LL+E6yZ6EJUExxwd0NzWIf\nPX7c8BvJ6V1RAXzoDbGPw+3vGTsaqYHJzi8i7J1/ebxHIL2DmzL06y080bMFL9/ZxqD3zcgpYOX+\n8/y+5+y1UvfYABeGtvfmnihfg2wDUlyiMnLmNg6dv8KfE7vh51p1N/jkyzncO2MrhcUlLHq8MwFu\ntnqPSaqeoihxqqpGV3tcdQlKURQrRPdyf24Ycamq+m4tY6y16OhoddeuXcYOQzczbwNLWxj7h7Ej\nkRqgwV9uwkJjRkFxCRev5LHm+Z7YWtVd8cbptByWxZ/ltz1nSUjN5o3+wTzSTf+zBdNXH+fT/47x\n2fBwhrbXbkH0iZSrDP92K00sNCx+ohOejoYf5Uk30zZBaVPWsgwYDBQB2Td8SbXhFQHn90GJ7Gsm\n6V8Hfxd2nbrMvuRMXrmrTZ0mJwA/Vxue7t2S1c/3INLPiUW7kvXeHmn36ct8sfo4g8K9GBLprfV5\nQR52/DguhszcQh74fjtpV/P1GpekP9okKB9VVUeoqvqxqqqfln0ZPLKGzisS8jNFoYQk6VnZeqj2\nfk7cHaH9D299UxSFYe19OHoxi4Pnrmh93qWsfJbuTuZMJY1vs/IKeWZhPM0crHlPh12I2/k4MmtM\nNMmXcxk7eydZefVnLZd0nTYJaouiKO0MHklj41naUaI+LdjNugCFucaOQtKDLkFu3BnSjA+Hal/s\nYCgDw7ywNDdjcVyy1ue8v/IQz/26l24fr6XXp+t4Z8VB1h5NIbdAdFt7e/lBki/n8MV9ETg2sdAp\nrthAV755oD2Hz1/h4R/FomSpftFm3N8VGKsoSiKQj9j2XVVVNcygkTV0HsGgsRIJqt09lR9XXASa\nOpieST0OM3tC2AgY8Jnh7ycZlJ2VOTMejDJ2GAA42ljQN7gpy/ee47V+weXWYd3qQmYef+w7z9BI\nb0K8Hdlw7BI/bz/N7M1JWJqbEerlwO7TGUzsFXRtJ2Fd9WrTlE+HhzNpYTzTVh/npRoUkhQUlXDg\nXCb5hSXkFxWTX1RCQVHJtdcuQa40d5VFGLWhzU++uwweRWOksYBmoXB+b+XHJKyDn++D+xdBQDfD\nxVKYC4vGQsFVsYD4zv8Dc8tqT5MkbQ1t783K/edZdzSF20OaVXns3G1JFKsqz/RphZ+rDQ93DSCv\nsJgdiemsP3aJDccu0a2l200l5bUxOMKbDcdSmbkhgYHhXgR7Vl9+XlRcwgPfb69yB2M3Oyv+nNgV\nDwdrvcTZGFW1UNdBVdUrQFYdxtO4eEXC3l9EoYTZLb9VZpyBxQ9BUS6cWGXYBPX3q3DxAHR4FHZ+\nBwlrodUdhruf1Oh0b+WOm50lS3YnV5mgcguK+Xn7afoGN72pZNzaQkP3Vu4G25H4jf7BrDuawitL\n9rH0yS7VlsR/ufYEO5LSeeWuNoT7OGFpboaVuRnWFmZYajSkZOVdawM1/5FYo7eBMlVV/a39XPoa\nB+wqfY274c9SbXlGQEEWpCfc/H5RPvw6WqyXcvaHMzsMF8OBJaKrRZdJcMcHYO0o3tNWcREkrJf7\nW0lVstCYMTjCmzVHUricXVDpcb/Hn+VyTiEPd63bDmvOtpa8NbAte5MzmVPNXlc7k9KZtvo4QyK9\nebxHCzq1cCWquTOh3o4Eedjj52pDtL8LHwxpx/bEdD7971jdfBMNUKUJSlXVAaWvAaqqBpa+ln3J\n9gf6cGNHiRv99RKc2w1DvoHW/cU/F1X+P7XO0k7C8kngEwO93hTTesGD4MhKKNBy2/Cd38NPg+CI\nXM8lVW1Yex8Ki1VW7DtX4eeqqvLDpkRCvByICaj7VkSDwr3o2dqdT/89SvLliv/7z8wR1YM+zja8\nOzikyusNbe/DyBg/vll3ktWHL1Z7/8LiEtYeSSG/SBZrlNFq3KkoirOiKDGKonQv+zJ0YI2Cexsw\nt755b6jdcyFuDnR9FoIHgm8MFOXBhf36vXdRPiweB2YauOcH8UwMRMFGwVU4/m/11ygpgR3fin/e\n+JkcRUlVauvlQLCnA0sqqebbeDyV4ylXeahLgFEqDxVFYcrdoQC88Xv5be1VVeW13/Zz8Uoe00ZG\nYm9dffXg2wPbEuLlwLO/xFdaMg9iI8l7Zmxl3JydTJi/h8JiuT4StNvy/RFgA/AP8E7p62TDhtVI\naMyhWbvrI6hze2Dl8xDYU4xoAHxjxeuZ7fq9979vigKNu78BJ9/r7/t3A1sP7ab5TqwS05OBPcUo\nL3GDfmOUGpxh7b3Zm5zJiZTyj7Z/2JyIm50VA8I9Kzizbvg42/DC7a1Zd/QSy/fePNL7ddcZVu4/\nz3O3tyLC10mr61lbaPjm/ihU4Mn5u8uNjlRVZXFcMv2nbSTx0lXuj/Vj1eGLvLBor9zXCu1GUJOA\nDsApVVVvAyKBDING1Zh4RohEkZ0Kv4wGW3cYNkuMbAAcPMHRT78J6tByMfLp+CS06XfzZ2YaCBkC\nx/6BvGoWVm6fAXbNYMQ8sGsKmz7XX4xSgzQ4whuNmcLiuLM3vX8i5Srrjl5idKfmWJlrjBSdMKaz\nP+G+Try74tC152UnUq4yefkhOrdw5fHuNduM0s/Vhk/vDWf/2Uze++PQtfczcwt5esEeXli0lxBv\nR/5+pjvvD2nHy3e2YVn8uQpHcY2NNgkqT1XVPBB9+VRVPQK0NmxYjYhXpJhS++luuHoBRvwEtrfs\nV+MbIxKUPv5jvZwEy54Cr/bQ552KjwkdBsX5cPTPyq9z6RicXA0dHgYre5HsEtZqv/D4wgFIlrU2\njY27vRU9Wrnz257km0YIc7YkYmluxqhYPyNGJ2jMFP5vaDsycwuZsvIw+UXFTFywB2sLMz4fEYGZ\nDk1vbw9pxmPdA5m37TTL4s+yMymdflM38teBC7x4R2sWPNoRr9LO70/0bMGE21qwYMdpPvjzcJVJ\nKiOngI/+PsILi/Y2yIXG2qyDSlYUxQn4HfhPUZTLwCnDhtWIeJV2lLi4HwZOBe8KFlf6xsKBxZCZ\nfPN0XE3lZsAvD4h/vnd25WudfDqAoy/sXwzh91V8zI6ZYuv6qLHiz9EPiedQm76A4T9WHUfWRVFY\nobGC5w7JTRsbmWHtfVhzJIXNJ1Lp3sqdjJwClsSd5e4IL9zsrIwdHgDBng481iOQr9ae5OKVPA6d\nF7sLN63FmqYX7mjNntMZvLR4H4XFJfg427D48U5E+jmXP/b21mTnF/PdxkTsrCyY1OfmNV85BUXM\n3pzEjPUnuZpfhKqKLvZfjmpvkK7xxlLtCEpV1SGqqmaoqjoZeBOYBdxt6MAaDbfW4plP+zHiqyK+\nMeK1NtN8BTmw4D5IOSKKIpz9Kz/WzAxCh4oRUXZa+c/zMiH+ZzHSsivdAM/aQYymDi2D1BOVX7uk\nBH5/AnLSIOscpBzW/XuSTFLvYA8crM1ZslsUSyzYcYbcwmIequPS8uo83aslAW62bDqRyuhOzenT\ntvLdhbVhoTFj+qhIvJ2aMLS9D39O6lZhcgJRsPHWgLbcE+XD56uO8f1GsRSloKiEn7Ym0f3jdfzv\nn6PEBrjy16RuvDmgLX8duMCbyxrWtGCVIyhFUTTAQVVV2wCoqrq+TqKqxg37QRk7lNrTmMMz+0Q1\nX2UjiaahYGEj1kNV1RapMkUFYl3V6W0iObXsU/05ocNg81Q4vByix9382Z75UJgNsY/d/H7HJ2Db\n17BlKgyaXvF1d8wUU4NdnxXPrE6uhqbG3SpcqlvWFhoGhnuxZHcyGTkF/LQ1iS5BrrRpVr82ELS2\n0DB9ZCSLdp3h1X7BerlmUwdr1rzQU6tjzUqnGnMKipiy8jDJl3NZfeQiZ9JziQlw4dsH21/bGbhN\nMwfSrubz9bqTuNlaXttVuCpliczYvRqrUuUISlXVYuCooijGnxi+gaqqK1RVHe/o6GjsUPTDoknV\n01waczH1p8sIqqQYfnsMTvwHA78QIyNtNAsD15blq/lKSkSS8Y29vo6rjJ0HRD4A8QvgSgVrXS4e\nhP/egpZ3QO+3RZn9idU1/54kkzcsyoe8whImLYznfGYeD3WpX6OnMqHejrwzOBRrC+MUbphrzPhi\nRCQ9W7szZ0sSDtYWzBnXgV/Gdyy3bf2Ld7RmRLQv09acYM7myndJyCss5rsNCbR/7z+mra5itqMe\n0KZIwhk4qCjKakVRlpd9GTow6Ra+sWItVEENtuJSVVj5nOiv1/fd68+LtKEoYhSVtAmunL/+/on/\nxBYht46eynR+GtQSMZK6UWEeLHlUTAUO/kpcv0VvOLVF+0XBUoMR6etEoJst649dIsDNlttaexg7\npHrL0tyMbx+MYvHjnVjxVFd6tvaocNSjKArvDwnl9rZNmbziEMvib66ULClR+W1PMr0/Xc/7fx5G\nURRmrD9Zr/fD0iZBvQkMAN4FPr3hS6pLvrGgFsPZ3dqfs2py6aLf50Qro5oKHQaocOj36+9tnwH2\nnqLjREWc/cV5u2ZD7uXr769+B1IOinVXdqX91IJ6iWrBU5trHptk0hRFYViU2AF3XBd/nSrjGhMr\ncw3R/i7V/j2Za8yYNjKS2AAXnv91L+uPXQJg4/FLDJi+iWd/2YuzrQXzH4nl18c6kV9UzMwNCVVe\n05i0SVD9VFVdf+MX0K/asyT98indHVnbab5Nn8PmL0R1Xe+3dLuneyuxkHj/YvHnS0fh5BpRDKGp\nYhV912dE6fyO78WfT6wSI6qY8dCy7/XjmncRz97kNF+j9EDH5jzbpxXDo2tRmSqVY22h4bsx0bRs\nas/jc+MYOXMbD87awZW8QqbeF8HyCV3pEuRGkIcdg8K9+GnrKVLr6ShKmwTVt4L35BYcdc3GRVT8\nadM4dv9iMXoKHQb9PqldGXfoMDi7S6yf2jFTlIZHjav6nKYh4jnT9m9EV/bfnxTPm/q+e/NxFk1E\nkjopE1Rj5NhElE8b6/lOQ+ZgbcGPD3XAw8GKwxeu8OaAtqx+vgeDI7xvGoVN7N2S/KJivl1/0ojR\nVq7SBKUoyhOKouwHWiuKsu+Gr0RgX92FKF3jGwPJO0ShQmWKi2D1u6JDxZBvr3ek0FVIaVHFrtmi\n+KHdPeUXElek67OilPy7XmKqb9j3IiHdKqg3pB6DjNO1i1OSpJt42Fvz16RubH2lNw93DaiwQ0eg\nux13R3gzd9spUrLytLpudn6RvkOtVHXbbQwElpe+ln1Fqar6QB3EJt3KN1b8sE+rovLm8DLIOAXd\nX6h6Gk5bzs1Ft/PNU0Vpecx47c5r3gn8OkF2CvSZLKYKKxJUWvIup/kkSe9sLM1pYln1L6lP925J\nYbHKt+urfxb136GL9PjfWg6fr6YNmp5Utd1GpqqqSaqqjlRV9dQNX5VvISkZVnWNY1VVJBLXILFN\nh760uwdQRcIp63yhjQFfiHZKsU9UfoxbK3DwkdN8kmQkAW623B3hzbxtp0i5UvkoaltCGhN+3o23\nUxN8XWwqPU6f5DaPpsQ1CJo4V56gEteLxrOdJ5bfobc2QoaI1kfdnq/ZeR5tRMFEVbEoiqjmS1gP\nxYW1i1OSJJ083SuIohKVbyp5FnXgbCaP/LgLX+cmzB4Xg52VNl3yak8mKFNiZiam2yorlNg8VXQV\nDxuh3/vaecCzB26uwNOnFr0h/4psHitJRuLvZsuQSG9+3n663Cgq4dJVxvywAwdrc+Y+HIuLbSU9\nPA1AJihT4xsDqUch55aZ1vP7RAl47ONgoXtDS6MI7AmKRk7zSZIRlY2ivl53fRR1ITOPB2ftQAXm\nPhJ7reN6XZEJytSUPYe6dbSxZRpY2ol1T6amiZNY5yULJSTJaJq72jKsvTc/7zjNhcw8MnIKeHDW\ndjJyCvhxXAwt3O3qPCaZoEyNd3sx2rjxOdTlU3BgqWhl1ES7nT7rnRa9xV5SFXVPlySpTjzdqyUl\nJSqf/XeUcXN2cio9h+/GRNPOxzh9T2WCMjWWtqJk+8YEtfUrUMzEpoGmKqgPoIotPiRJMgpfFxvu\nifLh113J7D2TwfSRkXRuocW6RwORCcoU+cbC2TixKDc7DXb/BGHDwdHb2JHpzitCVChWN81X0vB2\nDZWk+uSpXkG0cLfl43vCuSOkmVFjkQnKFPnGQGEOXDwAO7+HolzRRdyUmWkg8DZRKFHZhmtH/4KP\n/GHvL3UamiQ1Jj7ONqx+vif3lDbzNSaZoExRWaFEwjrY8S20uhM89LOhmlEF9YGrF0XivVXcHFg4\nSpSjb59R56FJklT3ZIIyRU6+4OANGz4R/e502UqjPmrRS7zeOM2nqrD2Q1gxSRRS3PY6nNstOqsb\nSlEB7FsEs/vD5mmGu48kSVWSCcpU+cZAQRb4dBAtiBoCB0/wCLm+Hqq4CJY/Dev/DyIegJELRKWi\nooH4n/V//6spsP5j+KIdLH0EzmwTBShVNeeVJMlgTDJBKYoyUFGUmZmZmcYOxXjKpvm6TKrddhr1\nTVBvOLUVrl4SU3p75kL3F2Hwl6L5rZ2H6Gix7xf9FUyci4ffnoDPQ2Dt+9AsFO5fDIO/hqsXxHYj\n+nByDUxrD5nJ+rmerk6sggUjITfDuHFIUjVMMkGpqrpCVdXxjo7Gqc2vFyIfgCEz9dsUtj4I6g0l\nhTCjq9hevv9n0OuNm5Nw+EjIOq+fkvQNn8DMHnBoGbQfAxN2wgNLRBJsdQeYmcORP2p/H4D9SyD9\nJKx8ofJCEEM7sRoWjIKjf8KBJcaJwZCKC433dyvpnUkmKAmwsofwEfptClsf+HUCCxvIy4AR88Tu\nvbdqfRdYO4n9qWprz1yxaeJzh6D/J2IX4TJNnCCgOxz+o/Y/9FRVFLVYOcCxv+Dw8tpdTxcn14pR\nqVsr0Xh4/6K6j8GQVBW+7w2/PiinZRuIBvbTTTJ55lYwciE8sgraVDI6NLcSW4Ac+QPyajHNm54o\ndgpuO7jyDhxtBohRz6Ujut8HID0BriRDrzehWRj8+WLdTrElrBfTei4tYPQyMQo9vVV0IWkoTm8V\n3fwPr4Ct040djaQHMkFJ9U9gj8o3OCwTPgqK8uDgb7rfp2yKMPC2yo9p3U+81naar+xeQb1h0DTI\nvgSrJtfumtpK2gQ/jwBnfxizHGxdod294rOGNIqK/1n0o2zdD1a9U3nXf8lkyAQlmSbv9mKqqjbT\nfCfXiHJ9t5aVH+PgKSolD9c2Qa0Te2q5BIJXpGhLFTdbFIQY0qktMP9ecPIrTU6lbWucm4vp1P2L\nGsYzm4IcOPi7GA0PmQGOPrBoXPmu/5JJkQlKMk2KAhGjRCl4WsWbrFWppBgSN0CL26qvgmwzAM7H\nQ8YZ3WItKYbEjWJkWHavnq+Co59Y31WUr9t1q3N6G8y7RyThMStEBeSN2t0rpi4v7DfM/evSkZVi\n2UX4SLB2hHvniEXfvz/ZMBJwIyUTlGS6wkaIJrl7F9b83HN7xPOrqqb3ygQPFK9HVtb8PiCei+Rl\nQEDP6+9Z2cGAz8TeXpu+0O26Vck8K5KTfTORnOyblj8mZAiYWYiSfVO392eR8Jt3EX/2bg+3vycK\nUrZ+ZdzYJJ3JBCWZLgcvsdnh3gU1r9o6Wfb8qWf1x7q2APc2uj+HSlhXeq8eN7/fsi+E3gMbP4FL\nx3S7dmV2fgeF2XD/IjFNWREbFxHDgSWm3YT3yjnxdxx+381VrbGPi9Hvqrflbs0mSiYoybSFj4LM\nM3BqU83OS1grqulstdxKoM0A8TxHl2caietFh4xbp9gA7vxQlNWvmKS/0ujCXNG7sE1/kVyrEjZc\nrClL2qifexvDvl9ALREJ6kaKIhZ423uJ51G5l40Tn6QzmaAk09amv1hbVJNiifyrosKrhRbTe2WC\nB4BaLDqq10RhriiEuHX0VMbOA26fAqe3wJ6fanbtyuz7Vfwwjn28+mNb3QmW9qL3oClSVfHv3je2\n4mTcxBnunQ1Z52DZU/J5lImRCUoybZY2EHK36ASRf1W7c05tFt0qtHn+VMYzAhx8aj7Nd2Y7FOdX\nPZUY+YB4drL2A/0sCN7+LTQNvf48pioWTaDtILFwuDC3dvc2hnO7xXO88JGVH+MTDX0mi393+36t\nq8gkPZAJSjJ94aPE8xZtuzOcXAvm1jVrsqsoYrR2cg0UZGt/XsJ60S6peeeqrx06TFSdZdRy4WzS\nJkg5KEZP2vZoDBsutjE59nft7m0M8QtAYyUKPqrS6SlR4h8/v27ikvRCJijJ9Pl1BOcA7TucJ6wt\nbalkXbP7BA8Qi4Or2/X3pnutA+9o0ZqqKt7txevZ3TWL6VbbZ0ATF9FpQ1v+3cCumelN8xXlw4HF\n4heHyjqBlCn7JSBpo+haL5kEmaAk01e2Jippo2gpVJUr58Tan5o8fyrj11k809B2mi/3slg/Fdiz\n+mM9QkBjKaasdHU5STSBjR4npu60ZaYRCe34v6a1sPXYP+LvOGKUdseHDBXFFIeWGTYuSW9kgpIa\nhsgHwLwJrHm/6uNOatHeqDIac2h1l5gKKy6s/vikTeIHYmUFEjcytxRVhbUZQe34DlAguoIGu9UJ\nGy6eyx36Xff717W9C8Cuqfb/Lpu2BffghtnFvYGSCUpqGBy8oPPTYsrnzM7Kj0tYC7buoohAF8ED\nxALfJC3K2hPWgYWtmOLThnd7sTeVLmuS8q/C7rmi4MHRu+bnNwsDt9amM82XnSpGfGHDxS8O2god\nJprKZp41XGyS3sgEJTUcXSaJZyn/vFpxNVxJiUgagT1136akRS+xbkmbab6E9eDfRYyOtOEdJYo9\ndNnOft8vkJ8JsU/U/FwQ06Rhw0W5e8Zp3a5Rl/YvgpIiUSBTE6FDxWttmgxLdUYmKKnhsLKD3m9C\n8s6Kp3FSDoou4rpM75WxaCI6kh9ZWfXC2sxkSDsOAVpM75XxKi2UqOlzqLLScs8I8I2p2bk3MqUO\n53sXgGe4mLarCdcW4ryDSw0TlynZPRdSDhs7iirJBCU1LOEjxVYdqyaXX9dT9vxJlwKJG7UZILov\nnNtT+TEJ68VrYE/tr+saJBYd1/Q5VMJasRao4xPal5ZXxLk5+HYUP7iyLtTs3KL8utsk8OIh0d+w\npqOnMiFD4Wyc2A+sscpOg+VPGaYPpB7JBCU1LGYauOND0f7o1iahCWvFcxYHr9rdo9UdYh3VH5Mg\n62LFxySuF8+6PGrwG76ZGXhFiB+eNbFtBth6VL8WSBvdXxTJ6ZvOcOTP6o8vKYbtM+F/QfDHM7W/\nf5ncy7Bnnvh3uO7/4J/XYfnTsGgsLHlYrC2rSSn9jcr+nhrzNF9Za6vz8caNoxoyQUkNT0A3McrZ\n9Pn1BFKYJ3rp1Xb0BKLUfMR8SEuAWX0g9cTNn5dt7x7QvebPurzaw8WD2m/BkXYSjv8D0Q+JnYZr\nq2UfeGy9SOILR8Ifz4q9lipydjd81wv+elGM/Hb/CEmbax8DwPqPYdkE+Oc1WPch7JoNx/4VfzcW\nTaDHy9r3UbyVc3Oxx1djnuZL3CBeU49p34HFCGSCkhqmvu+KH/Jrp4g/n9kmFtnW5vnTjVr2gbEr\nxA/vWX1v7pZ96ajoChHYs+bX9Y4S5d4XDmh3/I7vxJYZ0eNqfq/KuLeGR1ZD54mw6weY2UNMqZXJ\nyxRb1n/XS0x13vMDPLVTbIq48jkoKqjd/VVVbNse1BdePgVvpcPr5+CFo+I+j66BHi/V7h6hw8Q+\nWH5rXPAAABRZSURBVKnHa3cdfSougt0/1axTia6SNordh9WSer0fmExQUsPk2gJixovnKRf2i+dP\nZuaiqk5fvKPg4X/B2gHmDICjpa2Crm2v0VOHa5Z1lNBimq8wV7TuCblb7PukT+ZWYj+l0csgPwu+\n6w2bp8L+xfBlB5EYY8aLhBE6TPRE7PeJWAS99cva3fvCPjFFG3K36BBhptHP93SjtncDChyoR6Oo\ng7+JacwNnxj2PlfOi5FT+zHiz1U9SzUymaCkhqvHi+IH3D+vi+dPPjHVtxyqKdcW8PB/4N4KFo4S\nvwEnrBOtl5z8an49B2/xPEmbSr4Tq0UPvaoapdZWYE94Yot47vbfW+L5j72nGMX0+1jsXlum1R1i\nanX9x3C5Fj0FD/8hNqJsdWdto6+cg6dopntgSf3pcB43R7xun2HYdkxla/jC7hX/LmWCkiQjaOIs\ntlZPXC+mqFr0Msx97Dxg7ErRMWL503DiP91GTyCq8LyjtBtBHVomvseA7rrdS1s2LjBiHgz9DgZO\nFcmpbKR3q7s+Esnlr5d0/8F/ZKXolajrMyZthQ4R1Y8phwx7H22kHhd7mkU+KKamN35quHslrhe/\nWDQLA6/Iel0oIROU1LBFPwRurcQ/66NAojJW9jDyFwi7TywgbdlX92t5txc/sPKuVH5MYZ7Ym6rN\nANBY6H4vbZUt5I0aW/WUm6MP3PaqaAd1ZGXN75OeINartRmgc6haCx4Mikb31ke5GWIdkT5GYHFz\nxBR077dEb8FdP0DGmdpftyKJG0SDYDONWDtX3X9rRiQTlNSwaSxg0HSxZsYr0rD3MreEITPg8c3Q\nup/u1/FqD6hV/2Z7cg0UZJU+S6lnYh8XraT+eqnmFWJlSa1Nf/3HdSs7dzH6PLBUtyTz96vwdUeY\nFgn/vS2mynS5TmGe6MTfpr8Yjfd4Wby//qOaX6s6l0+JLV38u4k/e0UCqnjuVw/JBCU1fH4dYcg3\nhnnYfitFgWahtVswq02hxKFlYO2kXSPauqaxgP6fwZWzsP7/anbukZViobVzc8PEdqvQYXA5Ubfn\nMGe2i0TsEgBbpsPMnjAtQjyrq0myOvIH5KaL0SmAk68Y+cf/LJYR6FPZ+qeyaWGvCPF6rn5O85lk\nglIUZaCiKDMzMzONHYok6Z+NCzj7V95RoihfbKtRV9N7uvCLFVViW78Wa5e0cTUFTm+rm+m9MsED\nRJl+TddE5WZA+klRafjgb/DiCTFSd2khFhfP7Am/PabdteLmgFNzCOh5/b1uz4tKyrUf1Cyu6iRu\nABs38AgWf7bzEDtF19NCCZNMUKqqrlBVdbyjo2P1B0uSKfKOqvyHxsm1onovpB5O792oz2RRRfnH\ns9q1QTr6F6DWzfRemSbOorfigd9q1qqpbF1Y2bSxjQu0Hw0PLoUXjkPHCaKB7+EVVV8n9YQY1USN\nuXlRt52HmCo9sLj6NXGFeaJ1UXVUFRI3ioXsN47wvSJkgpIkqQa82ou1QBWVGx/6XVRh1aQRrTHY\nuMDtU8RUWPy86o8/8ocYSei6FYquQobCleSaNektez7oWcFzTRsX6PuOmKpc+YIYbVVm9xxRHBHx\nQPnPukwEK0dYW8UeZ2d2wNex8GV09YUOaSch61z5qk+vCDEazKt/M1IyQUlSfVTZFvBFBaJHXuv+\n2m/jYUzhI0XJ+KrJVe/Wm58l1o+1GVC753e6KKvu1GaPrzLn9oCjH9i6Vvx5WXFOdop4JlWRonzx\nnKn1XWDftPznTZyhy9NiOvfWPc6KC8X03w93iOvkpsPO76uOOam0vdGtv9iUjQJv7BZST8gEJUn1\nkWe4WE90a6FEwjqx71N9n94royiiw0RuBqyZUvlxJ1ZBcUHdTu+VsfMAl0Ax0tPWufjrBQaV8YqE\nThNEj8LEjeU/P7wCctKuF0dUJPYJ8cxozXvX30s7CT/cKar8wkbAhO3Qord49lVZ30QQz5/svcT3\neqOyUWA9nOaTCUqS6iNLW7E9+a3TTod+F41ZA3saIyrdNAsVbZF2/VD5D8HDf4CNq6i4NAbfjiJB\naVN5l3tZVP5Vl6AAer4mCl5WTCy//UvcHNFtJLCKBeRWdtDtObG4NmE9xP0IM7qJvcbumS2WNVg7\nQvcXICdVdDKpyLXnT93Lj1BtXcVoUCYoSZK05t1eTPGV/dAsKhDPaVr300/n8rp026ti+5GVL5Qv\nRigqENu3t76rbpYCVMQvVoxmtCnrvrVAoiqWNqL7RnrCzeuayooj/r+9ew+2qjzvOP79cVHwBiqo\nXMUqQjDKxQve6yUy3lqrzQSNRttkmtRGY401JfaPTB3TaklrmqTTNlPT2AZtaIyRxFw01gTqFUVE\nIjpJBFGPikaloNVyefrH+25Z57AP58DZ7LVg/T4zzF5r7XX2evc77P3s97Ked9plPWe8P+oTKQXW\n7RelQDf6SLj8oU2rAwMceDyMPR4e/ErzZL2rlqUA1l3WkYpOlHCAMquqUdPS2MKbK9L+8vlpIHtH\n6d4rGjQkJZ996bGU4LZoxfw0K3Hi75RTNkgtKEhZ73vS+CIf0YsWFKTW7tRL4IGvbApui25NWSym\nNpkc0dXAQSnDRGxIk04+dhcMGbX5eSdfk+49e/L2zZ9rLK9x0EnNrzFyavp/9r9v9uINtY8DlFlV\ndV0C/uk7YZc9W7dkSLsdMTNPmPhC5wkTz9wNA3cv96bjYYemG59X9jJADT0wzdbrrRk3pC7MeVem\ncaLFc/LkiF5moZ98IVzXAcdf2X2L6+DTU9D875vT0h1Fy+enrsbuEhg3WoMVu2HXAcqsqvY/DPrv\nmrr5NqxLX+QTzkq/qHdExQkTjanTGzemWYmHnJ4WIixLv34wZnrvJkp0LN76tFmD94azZ6cW1G0f\nyZMjtnINr566P6U0FvXm8s6rBW/ckBLRbimp8IjJ6bFi3XwOUGZV1X8gjDgiBajlP0/dLzti917R\nAR+EY/4IFt6SvgxfehzWvgIfKLF7r2Hs9LRO0pamw7/zRsplty15HSedl6bRr1iQJiVsj+TFE85J\nk2sW/O2msb5XlqSu4XFbCFCN7CUVy2zuAGVWZSOnpS+Npd9NK6BuryVD2umUwoSJZfPSjap9yf7e\nKu+PQ22hFdX4Au/NDL6upNSK2m1fmP6p7TMhpF+/NOvvtWXp/inYNMW9u/GnhpFT3YIys60w6khY\n9w4smZsW8CuzG6xVBg+FM65PEyYe+aeUWXvw3mWXKk1K6Tdwy+NQ70+QmLxt19hrJFzzbLo/ans5\n7ILUGlrwpTy9fD4Mm9DzeNfIqfDWyt6lTWoTByizKmtklNi4bsfv3iuafGGaMFHWzbnNDBycAs+W\nWlAdi9NqyX0JqP0Hbt9sGf0HwIlXp2D6y3vh+Qd7bj1BIaNEdVpRDlBmVbbPwSkf28Dd4ZAPlV2a\n1pHg3C+n93TY+WWXZpOxx6Yxv/XvNX9+WyZIlGHyRSlrxPc/A+ve7t2qy+9PlOhhHOqtlX0vXy85\nQJlVWb9+MOUimP7JnaN7r2i/iXDJHdt/afetMWY6bHiveV66t38Dq1fuGAFqwK4p2eyal9P+uF60\noAYNST+ItjQO9fit8NUj4cXHWlPOHjhAmVXdWTelpSts+2ukWmo2DtXo+tqWCRJlmHZZyuO3/+G9\nv2dr5NTuW1DLF8Ddn4VxJ/b+JuU+GtCWq5iZ7Qj22C+NMTUbh+rrBIl222U3uHhumvjRWyOnpjWo\n1r4GewzfdPyN52Dux1Ki2Q//axrnagO3oMzMisYem1pQXRPHdixOXWCDdqCFUkcdme6l661G67B4\nP9S7q+G2mWn7o99OszDbxAHKzKxozPSUWPWN5zof31EmSPTFAUcA2tRa3LAe/vMPU13M/NbmS3Vs\nZw5QZmZFzcah1r6WVt3d2QPUoL1g2PhNAeqev4Bf3wfn3pzGntrMAcrMrGjYhNSNV8xs3pcMEjua\nxkSJhbekG6mPuwKmXVpKURygzMyKGoljVxYmSnQ8ASh3ge3kRkyBNR3ww2th/IyU9aMkDlBmZl2N\nmQ6vP7spcWzH4tT1NWivcsvVDo1uzGGHwu/fUt4ikjhAmZltrjEO9cKj6bHjibbd+1O60UfDb89K\nU9RLDsi+D8rMrKuR01KW9RceTi2KNR07/wSJhv4D4NTPl10KwC0oM7PN7bJbuiF35SP1miBRMQ5Q\nZmbNjDkWOhblbr6aTJCoGAcoM7Nmxk6H9e/C4jkwfALsukfZJaodBygzs2bGTE+Pa16uzwSJinGA\nMjNrZs8DYOiBabsuEyQqxgHKzKw7jenmDlClcIAyM+vOpPNSBvMDDi+7JLXk+6DMzLoz8Zz0z0rh\nFpSZmVWSA5SZmVWSA5SZmVWSA5SZmVWSA5SZmVWSA5SZmVWSA5SZmVWSA5SZmVWSA5SZmVWSIqLs\nMmwzSa8Bz/fxZYYBr7egODsL10dnro/OXB+duT466219HBgRw3s6aYcOUK0g6bGIOKrsclSF66Mz\n10dnro/OXB+dtbo+3MVnZmaV5ABlZmaV5AAFXy+7ABXj+ujM9dGZ66Mz10dnLa2P2o9BmZlZNbkF\nZWZmleQAZWZmlVTbACXpTEnPSvqVpFlll6cMkr4haZWkpYVj+0i6V9Iv8+PeZZaxXSSNkXS/pKcl\n/ULSVfl4LesDQNIgSY9KejLXyV/m4wdJeiR/dr4taZeyy9oukvpLekLSD/J+besCQNIKSU9JWizp\nsXysZZ+ZWgYoSf2BfwDOAiYBF0maVG6pSvFN4Mwux2YB90XEeOC+vF8H64FrImIScCzw6fx/oq71\nAfAecFpETAamAGdKOha4Cbg5Ig4B3gQ+UWIZ2+0qYFlhv8510XBqREwp3P/Uss9MLQMUcAzwq4h4\nLiL+D/gP4LySy9R2ETEfeKPL4fOAW/P2rcDvtbVQJYmIlyNiUd5eQ/oSGkVN6wMgkrV5d2D+F8Bp\nwHfy8drUiaTRwDnAv+R9UdO66EHLPjN1DVCjgBcK+y/mYwb7R8TLefsVYP8yC1MGSeOAqcAj1Lw+\ncpfWYmAVcC/wa+CtiFifT6nTZ+fLwOeAjXl/X+pbFw0B3CPpcUmfzMda9pkZ0NfS2c4rIkJSre5D\nkLQHcAfwpxHxP+lHclLH+oiIDcAUSUOBO4GJJRepFJLOBVZFxOOSTim7PBVyYkS8JGk/4F5JzxSf\n7Otnpq4tqJeAMYX90fmYwauSRgDkx1Ull6dtJA0kBac5EfHdfLi29VEUEW8B9wPHAUMlNX7c1uWz\ncwLwu5JWkIYETgP+nnrWxfsi4qX8uIr0A+YYWviZqWuAWgiMzzNwdgEuBOaVXKaqmAdclrcvA+4q\nsSxtk8cTbgGWRcTfFZ6qZX0ASBqeW05IGgycQRqbux/4cD6tFnUSEZ+PiNERMY70ffFfEXExNayL\nBkm7S9qzsQ3MAJbSws9MbTNJSDqb1KfcH/hGRHyx5CK1naTbgVNIKfJfBb4AfA+YC4wlLWXykYjo\nOpFipyPpRGAB8BSbxhiuI41D1a4+ACQdQRrk7k/6MTs3Iq6X9FukVsQ+wBPAJRHxXnklba/cxfdn\nEXFunesiv/c78+4A4LaI+KKkfWnRZ6a2AcrMzKqtrl18ZmZWcQ5QZmZWSQ5QZmZWSQ5QZmZWSQ5Q\nZmZWSQ5QVguSHsyP4yR9tMWvfV2za7X4GiMk3dOi11rbw/NDJf1JK65l1hcOUFYLEXF83hwHbFWA\nKmQK6E6nAFW4ViudCfxkO7xuM0MBBygrnQOU1UKh1XAjcFJev+bqnAx1tqSFkpZI+lQ+/xRJCyTN\nA57Ox76Xk2L+opEYU9KNwOD8enOK11IyW9LSvGbOzMJr/0zSdyQ9I2lOzmSBpBuV1qRaIulLhbdw\nJvCj/Lc/KLyvr0n6g7y9QtLf5Gs9KumQfPwgSQ/l4zcU/nYPSfdJWpSfa2T0vxE4OL+n2fncawt1\n1FgXandJdyutF7W08f7MWsXJYq1uZpGzAADkQLM6Io6WtCvwQKErbRrwwYhYnvc/HhFv5LQ/CyXd\nERGzJF0REVOaXOsC0jpKk0nZOhZKmp+fmwocBnQADwAnSFoGnA9MzEk2G2mG+gMTIuLpnJRzS1ZH\nxOGSLiVlSjmXlDPuHyPi3yR9unDuu8D5OSnuMODhHJBn5fc9JV9/BjCelGdNwDxJJwPDgY6IOCef\nN6SHspltFbegrO5mAJcqLSnxCGkJhfH5uUcLwQngM5KeBB4mJRsez5adCNweERsi4lXg58DRhdd+\nMSI2AotJXY+rSUHjFkkXAO/kc6fnsvXG7YXH4/L2CYXj/144V8BfSVoC/JS0VESzpRFm5H9PAItI\nGc3Hk9JCnSHpJkknRcTqXpbRrFfcgrK6E3BlRHQa38n51t7usv8h4LiIeEfSz4BBfbhuMV/bBmBA\nRKyXdAxwOikB6RWkrNlnAT/O566n8w/LrmWIXmw3XExqBR0ZEeuUMnU3e08C/joi/nmzJ6RpwNnA\nDZLui4jrm/y92TZxC8rqZg2wZ2H/J8DlSkttIOnQnJm5qyHAmzk4TSQtC9+wrvH3XSwAZuZxruHA\nycCj3RVMaS2qIRHxQ+BqUtcgpID107z9PDBJ0q65C/D0Li8zs/D4UN5+gJSBG1JQKr6nVTk4nQoc\nmI83q6OP5/IhaZSk/SSNBN6JiG8Bs0ldomYt4xaU1c0SYEPuqvsmaXxmHLAoT1R4jeZLVP8Y+OM8\nTvQsqZuv4evAEkmL8hIMDXeSutmeJLVgPhcRr+QA18yewF2SBpFaLZ/Nge3dvAw9EfGCpLmkZQ2W\nk7rdivbOXXbvARflY1cBt0n6czovfTAH+L6kp4DHgGfyNX4j6QFJS4EfRcS1kj4APJTncqwFLgEO\nAWZL2gisAy7v5n2ZbRNnMzerMEmXAKMj4sZenLsCOCoiXt/uBTNrA7egzCosd5+Z1ZJbUGZmVkme\nJGFmZpXkAGVmZpXkAGVmZpXkAGVmZpXkAGVmZpX0//UXd1IiCybCAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x7f90c83deda0>"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "\n",
    "fig, ax = plt.subplots()\n",
    "ax.plot( training_loss_protocol_ReLU, label='Loss ReLU-CNN')\n",
    "ax.plot( training_loss_protocol_SNN, label='Loss SNN')\n",
    "ax.set_yscale('log')  # log scale\n",
    "ax.set_xlabel('iterations/updates')\n",
    "ax.set_ylabel('training loss')\n",
    "fig.tight_layout()\n",
    "ax.legend()\n",
    "fig"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "tf-alpha",
   "language": "python",
   "name": "tf-alpha"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.5.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 0
}
