{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "xFHXWn4HnIA2"
   },
   "source": [
    "# On Recurrent Neural Networks for Sequence-based Processing in Communications\n",
    "## In this notebook we show how to build a decoder for convolutional codes based on recurrent neural networks\n",
    "Accompanying code of paper [\"On Recurrent Neural Networks for Sequence-based Processing in Communications\" by Daniel Tandler, Sebastian Dörner, Sebastian Cammerer, Stephan ten Brink](https://arxiv.org/abs/1905.09983)\n",
    "\n",
    "If you find this code helpful please cite this work using the following bibtex entry:\n",
    "\n",
    "```tex\n",
    "@article{RNN-Conv-Decoding-Tandler2019,\n",
    "  author    = {Daniel Tandler and\n",
    "               Sebastian D{\\\"{o}}rner and\n",
    "               Sebastian Cammerer and\n",
    "               Stephan ten Brink},\n",
    "  title     = {On Recurrent Neural Networks for Sequence-based Processing in Communications},\n",
    "  journal   = {CoRR},\n",
    "  year      = {2019},\n",
    "  url       = {http://arxiv.org/abs/1905.09983},\n",
    "}\n",
    "```"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 83
    },
    "colab_type": "code",
    "executionInfo": {
     "elapsed": 2395,
     "status": "ok",
     "timestamp": 1574344699658,
     "user": {
      "displayName": "Sebastian D.",
      "photoUrl": "https://lh3.googleusercontent.com/a-/AAuE7mDJIgrG5QSz693qk-L8X64e1IqSwUGtLObOtZfUMw=s64",
      "userId": "17763740861140004671"
     },
     "user_tz": -60
    },
    "id": "o0V_CfUbnIBB",
    "outputId": "33179d09-7686-440d-9417-6f5692b2be7f"
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<p style=\"color: red;\">\n",
       "The default version of TensorFlow in Colab will soon switch to TensorFlow 2.x.<br>\n",
       "We recommend you <a href=\"https://www.tensorflow.org/guide/migrate\" target=\"_blank\">upgrade</a> now \n",
       "or ensure your notebook will continue to use TensorFlow 1.x via the <code>%tensorflow_version 1.x</code> magic:\n",
       "<a href=\"https://colab.research.google.com/notebooks/tensorflow_version.ipynb\" target=\"_blank\">more info</a>.</p>\n"
      ],
      "text/plain": [
       "<IPython.core.display.HTML object>"
      ]
     },
     "metadata": {
      "tags": []
     },
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "1.15.0\n"
     ]
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "print(tf.__version__)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "Lwaxsc7YnIBY"
   },
   "source": [
    "# Code Setup\n",
    "\n",
    "We first set up a code class that holds all necessary parameters and provides functions to quickly generate large samples of encoded bits.\n",
    "\n",
    "For this notebook, <b>only code examples for memory 1,2,4 and 6 are provided</b>.\n",
    "\n",
    "To generate other convolutional codes check out the accompanying coding.py which uses [CommPy](https://github.com/veeresht/CommPy) to generate arbitray codes."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "rz8aiownnIBc"
   },
   "outputs": [],
   "source": [
    "class code:\n",
    "    def __init__(self,m):\n",
    "        self.m = m # Number of delay elements in the convolutional encoder\n",
    "        self.tb_depth = 5*(self.m + 1) # Traceback depth of the decoder\n",
    "        self.code_rate = 0.5\n",
    "        if m == 1:\n",
    "            self.d1 = 0o1\n",
    "            self.d2 = 0o3\n",
    "            self.impulse_response = np.array([0, 1, 1, 1])\n",
    "            self.viterbi_reference = np.array([7.293792e-02,5.801720e-02,4.490250e-02,3.349593e-02,2.429049e-02,1.684274e-02,1.124068e-02,7.277303e-03,4.354604e-03,2.546695e-03,1.382015e-03,7.138968e-04])\n",
    "        elif m == 2:\n",
    "            self.d1 = 0o5\n",
    "            self.d2 = 0o7\n",
    "            self.impulse_response = np.array([1, 1, 0, 1, 1, 1])\n",
    "            self.viterbi_reference = np.array([9.278817e-02,6.424232e-02,4.195904e-02,2.531590e-02,1.424276e-02,7.385386e-03,3.617080e-03,1.526589e-03,6.319029e-04,2.502278e-04,7.633503e-05,2.566724e-05])\n",
    "        elif m == 4:\n",
    "            self.d1 = 0o23\n",
    "            self.d2 = 0o35\n",
    "            self.impulse_response = np.array([1, 1, 0, 1, 0, 1, 1, 0, 1, 1])\n",
    "            self.viterbi_reference = np.array([1.266374e-01,7.990744e-02,4.546113e-02,2.301058e-02,1.045569e-02,4.220632e-03,1.526512e-03,5.214676e-04,1.482288e-04,3.666830e-05,7.778123e-06,1.444509e-06])\n",
    "        elif m == 6:\n",
    "            self.d1 = 0o133\n",
    "            self.d2 = 0o171\n",
    "            self.impulse_response = np.array([1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1])\n",
    "            self.viterbi_reference = np.array([1.547330e-01,8.593706e-02,3.985466e-02,1.544436e-02,5.221681e-03,1.378203e-03,3.501900e-04,8.042758e-05,1.676778e-05,2.989088e-06,3.444674e-07,np.NaN])\n",
    "        else:\n",
    "            print(\"Code not available!\")\n",
    "    \n",
    "    def zero_pad(self,u):\n",
    "        return np.reshape(np.stack([u,np.zeros_like(u)],axis=1),(-1,))\n",
    "    \n",
    "    def encode_sequence(self,u,terminate=False):\n",
    "        if terminate:\n",
    "            return np.convolve(self.zero_pad(u),self.impulse_response,mode='full')[:-1] % 2\n",
    "        else:\n",
    "            return np.convolve(self.zero_pad(u),self.impulse_response,mode='full')[:len(u)*2] % 2\n",
    "    \n",
    "    def encode_batch(self,u,terminate=False):\n",
    "        x0 = self.encode_sequence(u[0],terminate)\n",
    "        x = np.empty((u.shape[0],x0.shape[0]),dtype=np.int8)\n",
    "        x[0] = x0\n",
    "        for i in range(len(u)-1):\n",
    "            x[i+1] = self.encode_sequence(u[i+1],terminate)\n",
    "        return x"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "h7CbqjGjnIBp"
   },
   "source": [
    "### Our SNR definition"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "H4AoHWSOnIBu"
   },
   "outputs": [],
   "source": [
    "def ebnodb2std(ebnodb, coderate=1):\n",
    "    ebno = 10**(ebnodb/10)\n",
    "    return (1/np.sqrt(2*coderate*ebno)).astype(np.float32)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "q8NToLOvnIB6"
   },
   "source": [
    "### Choose which Convolutional you want to use"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "MLNYe-pKnIB-"
   },
   "outputs": [],
   "source": [
    "#code = code(m=1) # memory 1 rate 0.5 code with generator polynomials 0o1 and 0o3 (octal)\n",
    "#code = code(m=2) # memory 2 rate 0.5 code with generator polynomials 0o5 and 0o7 (octal)\n",
    "#code = code(m=4) # memory 4 rate 0.5 code with generator polynomials 0o23 and 0o35 (octal)\n",
    "code = code(m=6) # memory 6 rate 0.5 code with generator polynomials 0o133 and 0o171 (octal)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "vHxCyjidnICJ"
   },
   "source": [
    "## Parameters"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 109
    },
    "colab_type": "code",
    "executionInfo": {
     "elapsed": 1069,
     "status": "ok",
     "timestamp": 1574344738698,
     "user": {
      "displayName": "Sebastian D.",
      "photoUrl": "https://lh3.googleusercontent.com/a-/AAuE7mDJIgrG5QSz693qk-L8X64e1IqSwUGtLObOtZfUMw=s64",
      "userId": "17763740861140004671"
     },
     "user_tz": -60
    },
    "id": "sYiAZ51CnICN",
    "outputId": "f2c63cf7-f966-4715-fd7c-6903c915efb9"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Code Rate: 0.5\n",
      "RNN layers: 3\n",
      "Units per layer: 256\n",
      "Gradient depth: 35\n",
      "ConvCode traceback length thump rule: 35\n"
     ]
    }
   ],
   "source": [
    "model_name = \"%s%sm%s_Model\"  % (oct(code.d1),oct(code.d2),code.m)\n",
    "saver_path = \"trained_models/\"+model_name\n",
    "\n",
    "gradient_depth = code.tb_depth\n",
    "additional_input = 0\n",
    "decision_offset = int(len(code.impulse_response)/2)\n",
    "sequence_length = 15\n",
    "\n",
    "rnn_layers = 3\n",
    "rnn_units_per_layer = 256\n",
    "dense_layers = [16]\n",
    "\n",
    "print(\"Code Rate:\", code.code_rate)\n",
    "print(\"RNN layers:\", rnn_layers)\n",
    "print(\"Units per layer:\", rnn_units_per_layer)\n",
    "print(\"Gradient depth:\", gradient_depth)\n",
    "print(\"ConvCode traceback length thump rule:\",code.tb_depth)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "pgCc5a_DnICZ"
   },
   "source": [
    "## Tensorflow Graph"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 570
    },
    "colab_type": "code",
    "executionInfo": {
     "elapsed": 2145,
     "status": "ok",
     "timestamp": 1574344747713,
     "user": {
      "displayName": "Sebastian D.",
      "photoUrl": "https://lh3.googleusercontent.com/a-/AAuE7mDJIgrG5QSz693qk-L8X64e1IqSwUGtLObOtZfUMw=s64",
      "userId": "17763740861140004671"
     },
     "user_tz": -60
    },
    "id": "Y8vm-YG3nICc",
    "outputId": "33ec3bd8-a807-4ac5-f6fa-9fe3b5bb57aa"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:\n",
      "The TensorFlow contrib module will not be included in TensorFlow 2.0.\n",
      "For more information, please see:\n",
      "  * https://github.com/tensorflow/community/blob/master/rfcs/20180907-contrib-sunset.md\n",
      "  * https://github.com/tensorflow/addons\n",
      "  * https://github.com/tensorflow/io (for I/O related ops)\n",
      "If you depend on functionality not listed there, please file an issue.\n",
      "\n",
      "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow_core/contrib/cudnn_rnn/python/layers/cudnn_rnn.py:342: calling GlorotUniform.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Call initializer instance with the dtype argument instead of passing it to the constructor\n",
      "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow_core/contrib/cudnn_rnn/python/layers/cudnn_rnn.py:345: calling Constant.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Call initializer instance with the dtype argument instead of passing it to the constructor\n",
      "WARNING:tensorflow:From <ipython-input-6-278356b99a8b>:17: dense (from tensorflow.python.layers.core) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use keras.layers.Dense instead.\n",
      "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow_core/python/layers/core.py:187: Layer.apply (from tensorflow.python.keras.engine.base_layer) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Please use `layer.__call__` method instead.\n",
      "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow_core/python/ops/losses/losses_impl.py:121: where (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use tf.where in 2.0, which has the same broadcast rule as np.where\n",
      "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow_core/python/ops/variables.py:2825: Variable.initialized_value (from tensorflow.python.ops.variables) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use Variable.read_value. Variables in 2.X are initialized automatically both in eager and graph (inside tf.defun) contexts.\n",
      "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow_core/python/training/rmsprop.py:119: calling Ones.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Call initializer instance with the dtype argument instead of passing it to the constructor\n"
     ]
    }
   ],
   "source": [
    "graph = tf.Graph()\n",
    "with graph.as_default():\n",
    "    \n",
    "    # Encoded Sequence Input\n",
    "    x = tf.placeholder(tf.float32,shape=[2*gradient_depth+sequence_length,None,2*(1+2*additional_input)],name=\"coded_sequence\")\n",
    "    \n",
    "    # Decoding\n",
    "    multi_rnn_cell = tf.contrib.cudnn_rnn.CudnnGRU(rnn_layers,rnn_units_per_layer,direction='bidirectional')\n",
    "    multi_rnn_cell.build(input_shape=[2*gradient_depth+sequence_length,None,(1+2*additional_input)*2])\n",
    "    \n",
    "    out,(new_state,) = multi_rnn_cell(x)\n",
    "    \n",
    "    out_sequence = out[gradient_depth:gradient_depth+sequence_length,:,:]\n",
    "    \n",
    "    # final dense layers:\n",
    "    for size in dense_layers:\n",
    "        out_sequence = tf.layers.dense(out_sequence,size,activation=tf.nn.relu)\n",
    "    u_hat = tf.layers.dense(out_sequence,1,activation=tf.nn.sigmoid)\n",
    "    \n",
    "    u_hat = tf.squeeze(u_hat)\n",
    "    \n",
    "    u_hat_bits = tf.cast(tf.greater(u_hat,0.5),tf.int8)\n",
    "    \n",
    "    \n",
    "    # Loss function\n",
    "    u_label = tf.placeholder(tf.int8,shape=[sequence_length,None],name=\"uncoded_bits\")\n",
    "    loss = tf.losses.log_loss(labels=u_label,predictions=u_hat)\n",
    "    correct_predictions = tf.equal(u_hat_bits, u_label)\n",
    "    ber = 1.0 - tf.reduce_mean(tf.cast(correct_predictions, tf.float32),axis=1)\n",
    "\n",
    "\n",
    "    # Training\n",
    "    lr = tf.placeholder(tf.float32, shape=[])\n",
    "    optimizer = tf.train.RMSPropOptimizer(lr)\n",
    "    step = optimizer.minimize(loss)\n",
    "    \n",
    "    # Init\n",
    "    init = tf.global_variables_initializer()\n",
    "    \n",
    "    # Saver\n",
    "    saver = tf.train.Saver()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "wFhX5TfMnICo"
   },
   "source": [
    "### Let's print all trainable variables of the graph we just defined:\n",
    "Note that special CudnnGRU layers generate some kind of \"sub\"-graph and therefore their variables are not shown here but in a so called opaque_kernel."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 201
    },
    "colab_type": "code",
    "executionInfo": {
     "elapsed": 888,
     "status": "ok",
     "timestamp": 1574344756115,
     "user": {
      "displayName": "Sebastian D.",
      "photoUrl": "https://lh3.googleusercontent.com/a-/AAuE7mDJIgrG5QSz693qk-L8X64e1IqSwUGtLObOtZfUMw=s64",
      "userId": "17763740861140004671"
     },
     "user_tz": -60
    },
    "id": "TKjS78WEnICr",
    "outputId": "57e788ee-0448-467e-a332-1e3d1ec1df9f"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Name                                                        Shape                Variables                  Size\n",
      "----------------------------------------------------------------------------------------------------------------\n",
      "cudnn_gru/opaque_kernel:0                                   <unknown>            0                       0 bytes\n",
      "dense/kernel:0                                              (512, 16)            8192                32768 bytes\n",
      "dense/bias:0                                                (16,)                16                     64 bytes\n",
      "dense_1/kernel:0                                            (16, 1)              16                     64 bytes\n",
      "dense_1/bias:0                                              (1,)                 1                       4 bytes\n",
      "\n",
      "\u001b[1mTotal number of variables:\t8225\u001b[0m\n",
      "\u001b[1mTotal bytes of variables:\t32900\u001b[0m\n"
     ]
    }
   ],
   "source": [
    "def model_summary(for_graph): #from TensorFlow slim.model_analyzer.analyze_vars source\n",
    "    print(\"{:60}{:21}{:14}{:>17}\".format('Name','Shape','Variables','Size'))\n",
    "    print('{:-<112}'.format(''))\n",
    "    total_size = 0\n",
    "    total_bytes = 0\n",
    "    for var in for_graph.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES):\n",
    "        # if var.num_elements() is None or [] assume size 0.\n",
    "        var_shape = var.get_shape()\n",
    "        var_size = var.get_shape().num_elements() or 0\n",
    "        var_bytes = var_size * var.dtype.size\n",
    "        total_size += var_size\n",
    "        total_bytes += var_bytes\n",
    "        print(\"{:60}{:21}{:14}{:>11} bytes\".format(var.name, str(var_shape), str(var_size), var_bytes))\n",
    "    print()\n",
    "    print('\\033[1mTotal number of variables:\\t{}\\033[0m'.format(total_size))\n",
    "    print('\\033[1mTotal bytes of variables:\\t{}\\033[0m'.format(total_bytes))\n",
    "\n",
    "model_summary(graph)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "LP8nLUmKnIC2"
   },
   "source": [
    "## Generator function\n",
    "Let's define a generatior function that first generates a large dataset pair of bit sequences and encoded bit sequences.\n",
    "\n",
    "In a second step, this functions slices those datasets in shorter snippets which are then fed to the NN decoder."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "Caj6X4x1nIC5"
   },
   "outputs": [],
   "source": [
    "def generator(batch_size,iterations,gradient_depth,sequence_length,additional_input,decision_offset,sigma,apriori):\n",
    "    offset = code.tb_depth + 3\n",
    "    full_uncoded_sequences = np.random.randint(0,100,[batch_size,iterations+2*gradient_depth+sequence_length+offset+2*additional_input],dtype=np.int8)\n",
    "    full_uncoded_sequences = np.less(full_uncoded_sequences,np.array([apriori*100],dtype=np.int8)).astype(np.int8)\n",
    "    full_coded_sequences = code.encode_batch(full_uncoded_sequences)\n",
    "    full_coded_sequences = np.reshape(full_coded_sequences,[batch_size,-1,int(1/code.code_rate)])\n",
    "    \n",
    "    # Feeding\n",
    "    for i in range(iterations):\n",
    "        encoded_sequences = full_coded_sequences[:,offset+i:offset+i+2*gradient_depth+sequence_length+2*additional_input,:]\n",
    "        labels = full_uncoded_sequences[:,offset+i+gradient_depth+additional_input+decision_offset:offset+i+gradient_depth+additional_input+decision_offset+sequence_length]\n",
    "        \n",
    "        # BPSK Modulation\n",
    "        modulated_sequences = (encoded_sequences.astype(np.float32) - 0.5) * 2.0\n",
    "\n",
    "        # AWGN\n",
    "        noise = np.random.normal(size=modulated_sequences.shape).astype(np.float32)\n",
    "        noised_sequences = modulated_sequences + noise * sigma\n",
    "        \n",
    "        # Input Processing\n",
    "        stack_array = []\n",
    "        for k in range(2*gradient_depth+sequence_length):\n",
    "            stack_array.append(noised_sequences[:,k:k+2*additional_input+1,:])\n",
    "        input_x = np.stack(stack_array,axis=1)\n",
    "        input_x = np.reshape(input_x,newshape=[batch_size,sequence_length+2*gradient_depth,(1+2*additional_input)*2])\n",
    "\n",
    "        # Transpose dimensions 1 and 0 because CudnnGRU layers need [time,batch,input] feeding\n",
    "        input_x = np.transpose(input_x,axes=[1,0,2])\n",
    "        input_labels = np.transpose(labels,axes=[1,0])\n",
    "        \n",
    "        yield input_x,input_labels"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "uVX0fZaTnIDF"
   },
   "source": [
    "## Starting a tensorflow session\n",
    "We create a session for the previously defined graph and save the initial state of the graph in training_stage 0"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 35
    },
    "colab_type": "code",
    "executionInfo": {
     "elapsed": 3192,
     "status": "ok",
     "timestamp": 1574344790489,
     "user": {
      "displayName": "Sebastian D.",
      "photoUrl": "https://lh3.googleusercontent.com/a-/AAuE7mDJIgrG5QSz693qk-L8X64e1IqSwUGtLObOtZfUMw=s64",
      "userId": "17763740861140004671"
     },
     "user_tz": -60
    },
    "id": "eyffvGZ9nIDH",
    "outputId": "64398234-648d-40bf-8fd9-6b31a2be9bff"
   },
   "outputs": [
    {
     "data": {
      "text/plain": [
       "'trained_models/0o1330o171m6_Model-0'"
      ]
     },
     "execution_count": 9,
     "metadata": {
      "tags": []
     },
     "output_type": "execute_result"
    }
   ],
   "source": [
    "sess_config = tf.ConfigProto()\n",
    "#sess_config.gpu_options.per_process_gpu_memory_fraction = 0.3   # to limit the amount of GPU memory usage\n",
    "sess_config.gpu_options.allow_growth = True\n",
    "sess = tf.Session(graph=graph, config=sess_config)\n",
    "sess.run(init)\n",
    "\n",
    "trained_stages = 0\n",
    "saver.save(sess,saver_path,global_step=trained_stages)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "sy0m9X_nnIDR"
   },
   "source": [
    "## Auxiliary functions"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "3Ihe8ZHonIDU"
   },
   "outputs": [],
   "source": [
    "# generates a python dictionary linking numpy feeds to tensorflow tensors\n",
    "def gen_feed_dict(x_feed, u_feed, lr_feed=1e-3):\n",
    "    feed_dict = {\n",
    "        x: x_feed,\n",
    "        u_label: u_feed,\n",
    "        lr: lr_feed,\n",
    "    }\n",
    "    return feed_dict\n",
    "\n",
    "# runs a single batch to predict u_hat and calculate the BER\n",
    "def test_step(x_feed, u_feed):\n",
    "    return sess.run(ber,feed_dict=gen_feed_dict(x_feed,u_feed))\n",
    "\n",
    "# runs a monte carlo simulation of several test steps to get meaningful BERs\n",
    "def test(test_parameters, plot=False, plot_baseline=False, ber_at_time=int(sequence_length/2)):\n",
    "    test_sigma = ebnodb2std(test_parameters['ebnodb'],code.code_rate)\n",
    "    ber = np.zeros([len(test_parameters['ebnodb']),sequence_length])\n",
    "    for i in range(len(test_sigma)):\n",
    "        for x_feed,u_feed in generator(test_parameters['batch_size'],test_parameters['iterations'],gradient_depth,sequence_length,additional_input,decision_offset,test_sigma[i],0.5):\n",
    "            curr_ber  = test_step(x_feed, u_feed)\n",
    "            ber[i] += curr_ber\n",
    "        # logging\n",
    "        print(\"SNR:\",test_parameters['ebnodb'][i])\n",
    "        print(\"BER:\",ber[i]/test_parameters['iterations'])\n",
    "    ber = ber/test_parameters['iterations']\n",
    "    print(\"Final BER:\",ber)\n",
    "    if (plot):\n",
    "        plot_bler_vs_ebnodb(test_parameters['ebnodb'], ber[:,ber_at_time], plot_baseline)\n",
    "    return ber\n",
    "\n",
    "# runs a single training step\n",
    "def train_step(x_feed,u_feed,lr_feed):\n",
    "    return sess.run([step,loss,ber],feed_dict=gen_feed_dict(x_feed,u_feed,lr_feed))\n",
    "    \n",
    "# runs a training set according to training_params\n",
    "def train(training_params):\n",
    "    global trained_stages\n",
    "    pl = training_params['learning']\n",
    "    #early_stopping = training_params['early_stopping']\n",
    "    for epoch in pl:\n",
    "        # learning params\n",
    "        batch_size = epoch[0]\n",
    "        iterations = epoch[1]\n",
    "        learning_rate = epoch[2]\n",
    "        ebnodb = epoch[3]\n",
    "        apriori = epoch[4]\n",
    "        train_sigma = ebnodb2std(ebnodb,code.code_rate)\n",
    "        # logging\n",
    "        logging_interval = int(iterations/10)\n",
    "        logging_it_counter = 0\n",
    "        logging_interval_loss = 0.0\n",
    "        logging_interval_ber = np.zeros([sequence_length])\n",
    "        \n",
    "        print(\"\\nTraining Epoch - Batch Size: %d, Iterations: %d, Learning Rate: %.4f, EbNodB %.1f (std: %.3f), P_apriori %.2f\" % (batch_size,iterations,learning_rate,ebnodb,train_sigma,apriori))\n",
    "        # training\n",
    "        for x_feed,u_feed in generator(batch_size,iterations,gradient_depth,sequence_length,additional_input,decision_offset,train_sigma,apriori):\n",
    "            _,curr_loss,curr_ber = train_step(x_feed,u_feed,learning_rate)\n",
    "            # logging\n",
    "            logging_interval_loss += curr_loss\n",
    "            logging_interval_ber += curr_ber\n",
    "            logging_it_counter += 1\n",
    "\n",
    "            if logging_it_counter%logging_interval == 0:\n",
    "                #if early_stopping and previous_logging_interval_loss < logging_interval_loss:\n",
    "                #    print(\"\")\n",
    "\n",
    "                print(\"      Iteration %d to %d - Avg. Loss: %.3E   Avg. BER: %.3E   Min. @ BER[%d]=%.3E\" % (logging_it_counter-logging_interval,\n",
    "                                                                                 logging_it_counter,\n",
    "                                                                                 logging_interval_loss/logging_interval,\n",
    "                                                                                 np.mean(logging_interval_ber/logging_interval),\n",
    "                                                                                 np.argmin(logging_interval_ber/logging_interval),\n",
    "                                                                                 np.min(logging_interval_ber/logging_interval)))\n",
    "                logging_interval_loss = 0.0\n",
    "                logging_interval_ber = 0.0\n",
    "        \n",
    "        # save weights\n",
    "        trained_stages += 1\n",
    "        saver.save(sess,saver_path,global_step=trained_stages)\n",
    "        print(\"      -> saved as training stage: %s-%d\" % (model_name,trained_stages))\n",
    "\n",
    "# plots a BER curve\n",
    "def plot_bler_vs_ebnodb(ebnodb, ber, baseline=False):\n",
    "    image = plt.figure(figsize=(12,6))\n",
    "    plt.plot(ebnodb, ber, '-o')\n",
    "    if baseline:\n",
    "        plt.plot(ebnodb, baseline_ber, '--')\n",
    "        plt.legend(['RNN Decoder', 'Viterbi Decoder']);\n",
    "    plt.yscale('log')\n",
    "    plt.xlabel('EbNo (dB)', fontsize=16)\n",
    "    plt.ylabel('Bit-error rate', fontsize=16)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "2nLozXmgnIDd"
   },
   "source": [
    "## Training\n",
    "Let's define training parameters and begin with the training.\n",
    "\n",
    "Notice that we use so called a priori ramp-up training [1].\n",
    "That is, setting the a priori probability of either ones or zeros in bit vector u to a small value and later, in subsequent training epochs, raising it up till 0.5 where ones and zeros are uniformly distributed again."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "DYsTNnVvnIDg"
   },
   "outputs": [],
   "source": [
    "train_snr_db = 1.5\n",
    "training_params = {\n",
    "    'learning' : [ #batch_size, iterations, learning_rate, training_ebnodb, apriori\n",
    "                [100,   1000, 0.001,  train_snr_db, 0.01],\n",
    "                [100,   1000, 0.001,  train_snr_db, 0.1],\n",
    "                [100,   1000, 0.001,  train_snr_db, 0.2],\n",
    "                [100,   1000, 0.001,  train_snr_db, 0.3],\n",
    "                [100,   1000, 0.001,  train_snr_db, 0.4],\n",
    "                [100,   500000, 0.0001,  train_snr_db, 0.5],\n",
    "                [500,   100000,  0.0001,  train_snr_db, 0.5],\n",
    "                [1000,  50000,  0.0001,  train_snr_db, 0.5],\n",
    "                [2000,  50000,  0.0001,  train_snr_db, 0.5],\n",
    "               ]\n",
    "}"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 1000
    },
    "colab_type": "code",
    "executionInfo": {
     "elapsed": 378662,
     "status": "error",
     "timestamp": 1574345290639,
     "user": {
      "displayName": "Sebastian D.",
      "photoUrl": "https://lh3.googleusercontent.com/a-/AAuE7mDJIgrG5QSz693qk-L8X64e1IqSwUGtLObOtZfUMw=s64",
      "userId": "17763740861140004671"
     },
     "user_tz": -60
    },
    "id": "60hOY8pGnIDp",
    "outputId": "8a6d2c9b-9b6e-4e69-9af7-aa806edbc055"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "Training Epoch - Batch Size: 100, Iterations: 1000, Learning Rate: 0.0010, EbNodB 1.5 (std: 0.841), P_apriori 0.01\n",
      "      Iteration 0 to 100 - Avg. Loss: 1.748E-01   Avg. BER: 1.155E-02   Min. @ BER[7]=1.070E-02\n",
      "      Iteration 100 to 200 - Avg. Loss: 4.191E-02   Avg. BER: 1.119E-02   Min. @ BER[14]=1.060E-02\n",
      "      Iteration 200 to 300 - Avg. Loss: 2.376E-02   Avg. BER: 1.023E-02   Min. @ BER[3]=9.100E-03\n",
      "      Iteration 300 to 400 - Avg. Loss: 1.851E-02   Avg. BER: 8.600E-03   Min. @ BER[0]=7.800E-03\n",
      "      Iteration 400 to 500 - Avg. Loss: 1.285E-02   Avg. BER: 5.907E-03   Min. @ BER[5]=5.300E-03\n",
      "      Iteration 500 to 600 - Avg. Loss: 1.517E-02   Avg. BER: 5.187E-03   Min. @ BER[9]=3.800E-03\n",
      "      Iteration 600 to 700 - Avg. Loss: 8.233E-03   Avg. BER: 2.793E-03   Min. @ BER[5]=1.800E-03\n",
      "      Iteration 700 to 800 - Avg. Loss: 3.792E-03   Avg. BER: 1.233E-03   Min. @ BER[8]=9.000E-04\n",
      "      Iteration 800 to 900 - Avg. Loss: 2.435E-03   Avg. BER: 8.200E-04   Min. @ BER[6]=5.000E-04\n",
      "      Iteration 900 to 1000 - Avg. Loss: 2.010E-03   Avg. BER: 6.400E-04   Min. @ BER[2]=4.000E-04\n",
      "      -> saved as training stage: 0o1330o171m6_Model-1\n",
      "\n",
      "Training Epoch - Batch Size: 100, Iterations: 1000, Learning Rate: 0.0010, EbNodB 1.5 (std: 0.841), P_apriori 0.10\n",
      "      Iteration 0 to 100 - Avg. Loss: 1.391E-01   Avg. BER: 5.476E-02   Min. @ BER[13]=5.310E-02\n",
      "      Iteration 100 to 200 - Avg. Loss: 9.828E-02   Avg. BER: 3.997E-02   Min. @ BER[0]=3.680E-02\n",
      "      Iteration 200 to 300 - Avg. Loss: 8.563E-02   Avg. BER: 3.442E-02   Min. @ BER[0]=2.950E-02\n",
      "      Iteration 300 to 400 - Avg. Loss: 6.327E-02   Avg. BER: 2.400E-02   Min. @ BER[0]=2.140E-02\n",
      "      Iteration 400 to 500 - Avg. Loss: 6.362E-02   Avg. BER: 2.476E-02   Min. @ BER[1]=2.030E-02\n",
      "      Iteration 500 to 600 - Avg. Loss: 5.664E-02   Avg. BER: 2.133E-02   Min. @ BER[2]=1.740E-02\n",
      "      Iteration 600 to 700 - Avg. Loss: 4.530E-02   Avg. BER: 1.760E-02   Min. @ BER[4]=1.470E-02\n",
      "      Iteration 700 to 800 - Avg. Loss: 4.503E-02   Avg. BER: 1.693E-02   Min. @ BER[6]=1.410E-02\n",
      "      Iteration 800 to 900 - Avg. Loss: 3.517E-02   Avg. BER: 1.311E-02   Min. @ BER[2]=1.000E-02\n",
      "      Iteration 900 to 1000 - Avg. Loss: 2.876E-02   Avg. BER: 1.029E-02   Min. @ BER[2]=8.400E-03\n",
      "      -> saved as training stage: 0o1330o171m6_Model-2\n",
      "\n",
      "Training Epoch - Batch Size: 100, Iterations: 1000, Learning Rate: 0.0010, EbNodB 1.5 (std: 0.841), P_apriori 0.20\n",
      "      Iteration 0 to 100 - Avg. Loss: 1.940E-01   Avg. BER: 8.290E-02   Min. @ BER[6]=7.720E-02\n",
      "      Iteration 100 to 200 - Avg. Loss: 1.825E-01   Avg. BER: 7.758E-02   Min. @ BER[2]=6.990E-02\n",
      "      Iteration 200 to 300 - Avg. Loss: 1.650E-01   Avg. BER: 7.148E-02   Min. @ BER[0]=6.120E-02\n",
      "      Iteration 300 to 400 - Avg. Loss: 1.644E-01   Avg. BER: 7.165E-02   Min. @ BER[1]=5.920E-02\n",
      "      Iteration 400 to 500 - Avg. Loss: 1.484E-01   Avg. BER: 6.348E-02   Min. @ BER[5]=5.750E-02\n",
      "      Iteration 500 to 600 - Avg. Loss: 1.398E-01   Avg. BER: 5.982E-02   Min. @ BER[0]=5.060E-02\n",
      "      Iteration 600 to 700 - Avg. Loss: 1.248E-01   Avg. BER: 5.235E-02   Min. @ BER[2]=4.350E-02\n",
      "      Iteration 700 to 800 - Avg. Loss: 1.196E-01   Avg. BER: 4.942E-02   Min. @ BER[0]=4.150E-02\n",
      "      Iteration 800 to 900 - Avg. Loss: 1.113E-01   Avg. BER: 4.556E-02   Min. @ BER[0]=4.090E-02\n",
      "      Iteration 900 to 1000 - Avg. Loss: 1.139E-01   Avg. BER: 4.671E-02   Min. @ BER[6]=4.110E-02\n",
      "      -> saved as training stage: 0o1330o171m6_Model-3\n",
      "\n",
      "Training Epoch - Batch Size: 100, Iterations: 1000, Learning Rate: 0.0010, EbNodB 1.5 (std: 0.841), P_apriori 0.30\n",
      "      Iteration 0 to 100 - Avg. Loss: 3.230E-01   Avg. BER: 1.559E-01   Min. @ BER[0]=1.468E-01\n",
      "      Iteration 100 to 200 - Avg. Loss: 3.170E-01   Avg. BER: 1.520E-01   Min. @ BER[0]=1.371E-01\n",
      "      Iteration 200 to 300 - Avg. Loss: 3.264E-01   Avg. BER: 1.575E-01   Min. @ BER[2]=1.469E-01\n",
      "      Iteration 300 to 400 - Avg. Loss: 2.968E-01   Avg. BER: 1.434E-01   Min. @ BER[2]=1.349E-01\n",
      "      Iteration 400 to 500 - Avg. Loss: 2.924E-01   Avg. BER: 1.385E-01   Min. @ BER[0]=1.274E-01\n",
      "      Iteration 500 to 600 - Avg. Loss: 2.808E-01   Avg. BER: 1.335E-01   Min. @ BER[0]=1.221E-01\n",
      "      Iteration 600 to 700 - Avg. Loss: 2.683E-01   Avg. BER: 1.252E-01   Min. @ BER[1]=1.120E-01\n",
      "      Iteration 700 to 800 - Avg. Loss: 2.561E-01   Avg. BER: 1.210E-01   Min. @ BER[1]=1.078E-01\n",
      "      Iteration 800 to 900 - Avg. Loss: 2.385E-01   Avg. BER: 1.103E-01   Min. @ BER[0]=1.033E-01\n",
      "      Iteration 900 to 1000 - Avg. Loss: 2.554E-01   Avg. BER: 1.210E-01   Min. @ BER[3]=1.113E-01\n",
      "      -> saved as training stage: 0o1330o171m6_Model-4\n",
      "\n",
      "Training Epoch - Batch Size: 100, Iterations: 1000, Learning Rate: 0.0010, EbNodB 1.5 (std: 0.841), P_apriori 0.40\n",
      "      Iteration 0 to 100 - Avg. Loss: 5.063E-01   Avg. BER: 2.859E-01   Min. @ BER[0]=2.667E-01\n",
      "      Iteration 100 to 200 - Avg. Loss: 4.854E-01   Avg. BER: 2.667E-01   Min. @ BER[1]=2.552E-01\n",
      "      Iteration 200 to 300 - Avg. Loss: 4.864E-01   Avg. BER: 2.714E-01   Min. @ BER[0]=2.496E-01\n",
      "      Iteration 300 to 400 - Avg. Loss: 4.609E-01   Avg. BER: 2.546E-01   Min. @ BER[0]=2.430E-01\n",
      "      Iteration 400 to 500 - Avg. Loss: 4.624E-01   Avg. BER: 2.522E-01   Min. @ BER[2]=2.380E-01\n",
      "      Iteration 500 to 600 - Avg. Loss: 4.536E-01   Avg. BER: 2.487E-01   Min. @ BER[1]=2.336E-01\n",
      "      Iteration 600 to 700 - Avg. Loss: 4.505E-01   Avg. BER: 2.474E-01   Min. @ BER[2]=2.344E-01\n",
      "      Iteration 700 to 800 - Avg. Loss: 4.386E-01   Avg. BER: 2.358E-01   Min. @ BER[0]=2.197E-01\n",
      "      Iteration 800 to 900 - Avg. Loss: 4.116E-01   Avg. BER: 2.187E-01   Min. @ BER[3]=2.075E-01\n",
      "      Iteration 900 to 1000 - Avg. Loss: 4.297E-01   Avg. BER: 2.300E-01   Min. @ BER[1]=2.130E-01\n",
      "WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow_core/python/training/saver.py:963: remove_checkpoint (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n",
      "Instructions for updating:\n",
      "Use standard file APIs to delete files with this prefix.\n",
      "      -> saved as training stage: 0o1330o171m6_Model-5\n",
      "\n",
      "Training Epoch - Batch Size: 100, Iterations: 500000, Learning Rate: 0.0001, EbNodB 1.5 (std: 0.841), P_apriori 0.50\n"
     ]
    },
    {
     "ename": "KeyboardInterrupt",
     "evalue": "ignored",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-12-66d307680d61>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mtrain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtraining_params\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
      "\u001b[0;32m<ipython-input-10-c6be1fe25dbf>\u001b[0m in \u001b[0;36mtrain\u001b[0;34m(training_params)\u001b[0m\n\u001b[1;32m     54\u001b[0m         \u001b[0;31m# training\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     55\u001b[0m         \u001b[0;32mfor\u001b[0m \u001b[0mx_feed\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mu_feed\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mgenerator\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbatch_size\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0miterations\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mgradient_depth\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0msequence_length\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0madditional_input\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mdecision_offset\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mtrain_sigma\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mapriori\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 56\u001b[0;31m             \u001b[0m_\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mcurr_loss\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mcurr_ber\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtrain_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_feed\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mu_feed\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlearning_rate\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     57\u001b[0m             \u001b[0;31m# logging\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     58\u001b[0m             \u001b[0mlogging_interval_loss\u001b[0m \u001b[0;34m+=\u001b[0m \u001b[0mcurr_loss\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m<ipython-input-10-c6be1fe25dbf>\u001b[0m in \u001b[0;36mtrain_step\u001b[0;34m(x_feed, u_feed, lr_feed)\u001b[0m\n\u001b[1;32m     30\u001b[0m \u001b[0;31m# runs a single training step\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     31\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mtrain_step\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_feed\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mu_feed\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlr_feed\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 32\u001b[0;31m     \u001b[0;32mreturn\u001b[0m \u001b[0msess\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mstep\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mloss\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mber\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mfeed_dict\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mgen_feed_dict\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_feed\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mu_feed\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlr_feed\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     33\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     34\u001b[0m \u001b[0;31m# runs a training set according to training_params\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/client/session.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m    954\u001b[0m     \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    955\u001b[0m       result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[0;32m--> 956\u001b[0;31m                          run_metadata_ptr)\n\u001b[0m\u001b[1;32m    957\u001b[0m       \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    958\u001b[0m         \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/client/session.py\u001b[0m in \u001b[0;36m_run\u001b[0;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m   1178\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mfinal_fetches\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mfinal_targets\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mhandle\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mfeed_dict_tensor\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1179\u001b[0m       results = self._do_run(handle, final_targets, final_fetches,\n\u001b[0;32m-> 1180\u001b[0;31m                              feed_dict_tensor, options, run_metadata)\n\u001b[0m\u001b[1;32m   1181\u001b[0m     \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1182\u001b[0m       \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/client/session.py\u001b[0m in \u001b[0;36m_do_run\u001b[0;34m(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m   1357\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mhandle\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1358\u001b[0m       return self._do_call(_run_fn, feeds, fetches, targets, options,\n\u001b[0;32m-> 1359\u001b[0;31m                            run_metadata)\n\u001b[0m\u001b[1;32m   1360\u001b[0m     \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1361\u001b[0m       \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_do_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0m_prun_fn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeeds\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/client/session.py\u001b[0m in \u001b[0;36m_do_call\u001b[0;34m(self, fn, *args)\u001b[0m\n\u001b[1;32m   1363\u001b[0m   \u001b[0;32mdef\u001b[0m \u001b[0m_do_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1364\u001b[0m     \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1365\u001b[0;31m       \u001b[0;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1366\u001b[0m     \u001b[0;32mexcept\u001b[0m \u001b[0merrors\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mOpError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1367\u001b[0m       \u001b[0mmessage\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcompat\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mas_text\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmessage\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/client/session.py\u001b[0m in \u001b[0;36m_run_fn\u001b[0;34m(feed_dict, fetch_list, target_list, options, run_metadata)\u001b[0m\n\u001b[1;32m   1348\u001b[0m       \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_extend_graph\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1349\u001b[0m       return self._call_tf_sessionrun(options, feed_dict, fetch_list,\n\u001b[0;32m-> 1350\u001b[0;31m                                       target_list, run_metadata)\n\u001b[0m\u001b[1;32m   1351\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1352\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0m_prun_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/client/session.py\u001b[0m in \u001b[0;36m_call_tf_sessionrun\u001b[0;34m(self, options, feed_dict, fetch_list, target_list, run_metadata)\u001b[0m\n\u001b[1;32m   1441\u001b[0m     return tf_session.TF_SessionRun_wrapper(self._session, options, feed_dict,\n\u001b[1;32m   1442\u001b[0m                                             \u001b[0mfetch_list\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtarget_list\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1443\u001b[0;31m                                             run_metadata)\n\u001b[0m\u001b[1;32m   1444\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1445\u001b[0m   \u001b[0;32mdef\u001b[0m \u001b[0m_call_tf_sessionprun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
     ]
    }
   ],
   "source": [
    "train(training_params)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "S2jKzN85nIDx"
   },
   "source": [
    "## Restoring already trained models\n",
    "Since we used the tensorflow saver after each training epoch, we can load already trained models and then evaluate"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "R5_apwuCnID1"
   },
   "outputs": [],
   "source": [
    "#saver.restore(sess,\"%s-%d\" % (saver_path,9))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {
    "colab_type": "text",
    "id": "ODSuWtQWnID9"
   },
   "source": [
    "## Evaluation: Monte Carlo BER simulation"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 1000
    },
    "colab_type": "code",
    "executionInfo": {
     "elapsed": 196915,
     "status": "ok",
     "timestamp": 1574345517683,
     "user": {
      "displayName": "Sebastian D.",
      "photoUrl": "https://lh3.googleusercontent.com/a-/AAuE7mDJIgrG5QSz693qk-L8X64e1IqSwUGtLObOtZfUMw=s64",
      "userId": "17763740861140004671"
     },
     "user_tz": -60
    },
    "id": "0ulrSB8gnIEA",
    "outputId": "d7f495c7-4edc-45ec-db40-bfd09ae9242f"
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "SNR: 0.0\n",
      "BER: [0.432155 0.432995 0.43358  0.43104  0.43075  0.432795 0.434515 0.43324\n",
      " 0.43486  0.43431  0.434575 0.43521  0.43421  0.43513  0.43433 ]\n",
      "SNR: 0.5\n",
      "BER: [0.412145 0.41181  0.410865 0.411365 0.412465 0.410655 0.413165 0.41102\n",
      " 0.41198  0.412295 0.412285 0.412115 0.413075 0.413455 0.41378 ]\n",
      "SNR: 1.0\n",
      "BER: [0.386745 0.38539  0.385955 0.38803  0.387115 0.38661  0.38669  0.38723\n",
      " 0.387055 0.387875 0.3856   0.388115 0.388515 0.38835  0.38774 ]\n",
      "SNR: 1.5\n",
      "BER: [0.35756  0.35628  0.35481  0.354985 0.35695  0.35708  0.355935 0.35709\n",
      " 0.356555 0.357455 0.3585   0.356615 0.35737  0.35926  0.35964 ]\n",
      "SNR: 2.0\n",
      "BER: [0.318685 0.32132  0.32081  0.32098  0.321825 0.321125 0.321735 0.3215\n",
      " 0.323075 0.323915 0.322745 0.323345 0.32509  0.32549  0.32707 ]\n",
      "SNR: 2.5\n",
      "BER: [0.28964  0.288255 0.28867  0.28865  0.289935 0.289655 0.290085 0.29035\n",
      " 0.28943  0.29282  0.2913   0.29413  0.293825 0.29522  0.29725 ]\n",
      "SNR: 3.0\n",
      "BER: [0.25148  0.250685 0.252055 0.25203  0.253485 0.2544   0.255385 0.254305\n",
      " 0.256385 0.25675  0.257175 0.257905 0.258275 0.25981  0.26056 ]\n",
      "SNR: 3.5\n",
      "BER: [0.2284   0.22915  0.22722  0.229505 0.22965  0.229445 0.229305 0.232195\n",
      " 0.23057  0.233555 0.232355 0.23368  0.233275 0.236105 0.23576 ]\n",
      "SNR: 4.0\n",
      "BER: [0.20074  0.200485 0.201385 0.20127  0.202835 0.20232  0.20192  0.203395\n",
      " 0.20407  0.20425  0.205865 0.206815 0.20774  0.20881  0.21002 ]\n",
      "SNR: 4.5\n",
      "BER: [0.178255   0.178825   0.17936    0.1794     0.17948501 0.181165\n",
      " 0.18078    0.18182    0.181525   0.182905   0.182015   0.1832\n",
      " 0.18652    0.18694    0.18602   ]\n",
      "SNR: 5.0\n",
      "BER: [0.155185 0.15517  0.155645 0.15418  0.15636  0.155135 0.15706  0.157825\n",
      " 0.157755 0.15873  0.15932  0.160295 0.16135  0.16109  0.16365 ]\n",
      "SNR: 5.5\n",
      "BER: [0.13964  0.1392   0.13974  0.13975  0.14159  0.14142  0.142    0.1418\n",
      " 0.14596  0.144915 0.147055 0.14563  0.148615 0.14975  0.14952 ]\n",
      "Final BER: [[0.432155   0.432995   0.43358    0.43104    0.43075    0.432795\n",
      "  0.434515   0.43324    0.43486    0.43431    0.434575   0.43521\n",
      "  0.43421    0.43513    0.43433   ]\n",
      " [0.412145   0.41181    0.410865   0.411365   0.412465   0.410655\n",
      "  0.413165   0.41102    0.41198    0.412295   0.412285   0.412115\n",
      "  0.413075   0.413455   0.41378   ]\n",
      " [0.386745   0.38539    0.385955   0.38803    0.387115   0.38661\n",
      "  0.38669    0.38723    0.387055   0.387875   0.3856     0.388115\n",
      "  0.388515   0.38835    0.38774   ]\n",
      " [0.35756    0.35628    0.35481    0.354985   0.35695    0.35708\n",
      "  0.355935   0.35709    0.356555   0.357455   0.3585     0.356615\n",
      "  0.35737    0.35926    0.35964   ]\n",
      " [0.318685   0.32132    0.32081    0.32098    0.321825   0.321125\n",
      "  0.321735   0.3215     0.323075   0.323915   0.322745   0.323345\n",
      "  0.32509    0.32549    0.32707   ]\n",
      " [0.28964    0.288255   0.28867    0.28865    0.289935   0.289655\n",
      "  0.290085   0.29035    0.28943    0.29282    0.2913     0.29413\n",
      "  0.293825   0.29522    0.29725   ]\n",
      " [0.25148    0.250685   0.252055   0.25203    0.253485   0.2544\n",
      "  0.255385   0.254305   0.256385   0.25675    0.257175   0.257905\n",
      "  0.258275   0.25981    0.26056   ]\n",
      " [0.2284     0.22915    0.22722    0.229505   0.22965    0.229445\n",
      "  0.229305   0.232195   0.23057    0.233555   0.232355   0.23368\n",
      "  0.233275   0.236105   0.23576   ]\n",
      " [0.20074    0.200485   0.201385   0.20127    0.202835   0.20232\n",
      "  0.20192    0.203395   0.20407    0.20425    0.205865   0.206815\n",
      "  0.20774    0.20881    0.21002   ]\n",
      " [0.178255   0.178825   0.17936    0.1794     0.17948501 0.181165\n",
      "  0.18078    0.18182    0.181525   0.182905   0.182015   0.1832\n",
      "  0.18652    0.18694    0.18602   ]\n",
      " [0.155185   0.15517    0.155645   0.15418    0.15636    0.155135\n",
      "  0.15706    0.157825   0.157755   0.15873    0.15932    0.160295\n",
      "  0.16135    0.16109    0.16365   ]\n",
      " [0.13964    0.1392     0.13974    0.13975    0.14159    0.14142\n",
      "  0.142      0.1418     0.14596    0.144915   0.147055   0.14563\n",
      "  0.148615   0.14975    0.14952   ]]\n"
     ]
    }
   ],
   "source": [
    "test_parameters = {\n",
    "    'batch_size' : 2000,\n",
    "    'iterations' : 100,\n",
    "    'ebnodb' : np.arange(0,5.6,0.5)\n",
    "}\n",
    "sim_ber = test(test_parameters,plot=False,plot_baseline=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "colab": {
     "base_uri": "https://localhost:8080/",
     "height": 393
    },
    "colab_type": "code",
    "executionInfo": {
     "elapsed": 1733,
     "status": "ok",
     "timestamp": 1574345755532,
     "user": {
      "displayName": "Sebastian D.",
      "photoUrl": "https://lh3.googleusercontent.com/a-/AAuE7mDJIgrG5QSz693qk-L8X64e1IqSwUGtLObOtZfUMw=s64",
      "userId": "17763740861140004671"
     },
     "user_tz": -60
    },
    "id": "qDuKGcjUnIEI",
    "outputId": "3a04fba1-580c-45fe-d737-1932615ac2c4"
   },
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAt4AAAF4CAYAAABjOf4xAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0\ndHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOzdd3hUZfrG8e8zM+kJQRBQQQUEEUQX\nNSqKSlBAUBFF14ZtVRAV6291bbuWXfsWe0HFrtgREGVtoCAWUFxAxIItWFFaQurk/f1xJiGEBAYy\nkzMzuT/Xda7MnHPmzMOea3fvvHnP85pzDhERERERia+A3wWIiIiIiLQECt4iIiIiIs1AwVtERERE\npBkoeIuIiIiINAMFbxERERGRZqDgLSIiIiLSDEJ+FxBvZjYMGJaXlzdqxx13bLbvXbGmkp9WlVEZ\nriYtGGCrVpm0zk5rtu+XpispKSEnJ8fvMmQz6N4lJ9235KV7l7x07+Jj7ty5y5xz7ervt5bSx7ug\noMDNmTOnWb5r4sdLueyF+ZRWhmv3ZaUFuWHELhyxW8dmqSHeJn68lFumLeaHFaVs0zqLiw/ukTL/\nthrTp0+nsLDQ7zJkM+jeJSfdt+Sle5e8dO/iw8zmOucK6u9P+RFvP9wybfE6oRugtDLMVZMWsqYi\nTChopAcDhIJGWjBAWtAIBQK1r9Mix7xzAoQCRnrI+5kWCpAW8M4LBgwza/Z/X/1fLJauKOWyF+YD\npFz4FhEREYkVBe84+GFFaYP7V5ZWcvmL82P6XTUBfm04D5AWskg4jxwLBkivCfehAGmB+uG+5py6\n4b7mnHV/GUgLBrhh6qIGf7G4buoidto6j4xQkPSQd72MtMjPUMCXXxI2R81o/tIVpXR8782UHM0X\nERGR5qfgHQfbtM5iaQPhe6v8TCae3Y/KcDVV1Y7KcHVkc1SFq6kIV1MVdlRVV1NR5f2sOV4ZOVb3\n/MpwNZXVjsoq73re573zKuqeX3tONWtKw7XneMeqqayq+U7vOjWf31S/ri5nyK3vNHo8PRjwAnm9\nUJ4e8oK5dyxY+zpjvXOCtZ/PqHOdmmMZ9Y/V/UyUvwRoNF9ERETiJeWDd83Dld26dWu277z44B4N\nzvG+dMhObJWf2Wx1NIVzjnC180J/dfXacF9VzdH3vsvPq8rX+0ybnHT+cURvyqvCVFR5Qb48slVU\neb9YlFdWUxEO1x6re15FVTUrSysprwxTEa5u4Jww1TF6JKHuLwF1Q/w3v5VQGV73S0orw1z+4nw+\n/m45mWlBMtKCZKYFyAwFyYy8zkoLRo4FvH2hyDlpa8/JDAUJBPwd9W8Jc/NFREQSVcoHb+fcZGBy\nQUHBqOb6zpogUztdIQkDjplFpqBAFsF1jl02tGeDv1j87bBeHLLL1nGtq+YvA16Arxvuw+uF+Irw\n+vvL1zsnvM61vviluMHvXVMR5sWPl1IW+dzmqhl5z2wgvGemBclYJ7A3cDwtSGZobaDPSlv3/Ix6\n56cF13YM1Wi+iIiIv1I+ePvliN06csRuHVPyaeG6v1g098hpKDLvPDs9Ptfvd+ObDU4T6tg6i1mX\nHghAdbWjvKqassowZVVhyiojrysjr6vClFfW219zfmRfeQOfKy6vYllxReSzdT+z+SP9wYDVBvUV\nayoJu4ZH8+d9v4K8zBA5Gd6WmxEkNyONnIwgubX7vC07PZg08/VFREQSiYK3bJaaXyxSTWPThC4+\nuEft+0DAyEoPkpUebOgSMeecN+XHC/neCH3dkF/WQMgvr6y3vyrM4+991+D111SEef6jIkrKq6IK\n+GaQkx6qDeU1wTwnI0RexrrhvW5grz0nEvBzI9cIBWO3jpcejBURkUSm4C1SRyJOEzIz0kNe15pW\nmZu/CNNbn/26wdF85xyllWGKy6soKQ9TUl7F6rIqSsqrKKmoori8iuLI++LI8eLIVlJexe8la2pf\nl5SHo35ANyMUWDvanh4J6pl1wnv6uoG9sRH5tz77hb+9tIDSSu97NZVGREQSjYK3SD2pOk1oY6P5\nZkZ2eojs9BDkNf37yqvCtQG+JpCvrg3mDYf3mrD/y+oySpaFa8N+/faV0SqtDHPJc//jxY+Xkh35\nK0V2epDs9BBZaTWvg2Slh9YeT4scT697PEh60N+WmHowVkQk+Sl4i7QQzT0332vxGKRNTtMn5Ier\nHSUVkcBeVlU7Kl83tF81aWGDn60IV7N8TQVLV4QprQizpqKKNRVhyjfxIdlgwMhOWxvea8J6dnqw\nNsSvs6+BEJ8VCf11P5OdHiIzbcOhXg/GioikhpQP3n60ExRJVMk6Nz8YMFplpnlTbfIbPmfc20sa\nnUozaex+6+0PV3tTa9ZUVEUCubfVhHPvWM2+qnWPV67dt7qsil9WlbOmcu11SivDuE14INaMOuE9\nSHbauiPus778rcFFq66dvJB2eRm0ykwjPyuNVlkh8jLTCPrctlJERBqW8sHbj3aCItL8onkwtq5g\nwGof/Iw15xxlldW1o+trQ3ydcF4z+l4Zrhf81/3M8jWVjU61+X1NJSMfeH+9/bkZIfKz0sjLDNEq\nK22dYN4qM41WWZH3dY9ne+9zM0LqWiMiEicpH7xFpGVIpAdjzdZ2vmkbg+s11uayfV4Gtx+/G6tK\nK1lVVsWq0kpWllayqqySVaVVkZ+VFC1fw6IfveOry6s2+F0Bg7z6Qb2x4L5ekE/b6LSZhqgbjYi0\nFAreIpIyWtqDsZcf0pO+XTct2oerHavrBfOGwvrKOmF+ybJi733pxh90TQtabVDPqzOqXhPM64f1\nj79bzj3Tv6qdc6/56yKSyhS8RUQSXCwfjA0GjNbZ6bTezFWoKqqqveBeVhUJ42tD+9oAv+7xpStK\nvVBfWhlVm8nSyjAXPTOPW1//fJ0HVms70tQ+uFqvI03tw651PpMRqn0oNiPkT2cadaQRkRoK3iIi\nSSBRHoxNDwVom5tB29yMzfp8WWW4NqyvLK3iqHvebfC8age7dmodme/uda/5dXV57Vz5zXmINWCs\n22UmrV6Yr9N1ZlM60+RkhBoN9epIIyJ1KXiLiEizyUwLkpkWpH2rTMDrOtNYN5rbj99tg9falIdY\nSyrCdbrOVK3XxWZZcXnt52v2RbOSaw0zIiPr6wb3T39YtV7rytLKMFdNWki1c+Rleg/B5mV6U3Dy\nIg+4xnJFVxFJHAreIiLim03tRlNXrB9ircs5R3lV9XpBvuHgvrYbTUm9zjSN9YtfWVrJRc980uj3\nZ6UFawN53XCel5G2/r5GXmeEgjH+T2V9ejBWZNMoeIuIiG8SqRtNXWZWOzrflEWgGutIs1V+JhNG\n9WV1WVXtnPnVZZWsjiwQVfN6dZn3wOvqsip+XFlWu39NxcZXc00PBcjLaDic52aEaLXO/shoe+a6\n+7PSgo3Oi9c0GpFNp+AtIiK+StVuNND4iP6lQ3ai85Y5m33dqnB1JKB7wbw4EtJXl68b2Gv3RwL7\nN8vWeK8jq75ubI58MGC1019qwnhNMP/vwp8aXNjpHy9/Srf2uV6f/Mhn/XqwVSTRpHzw1sqVIiLi\nl1h2pKkrFAw0qTsNQHW1o6Siqjao1wTyukG9dhS+rKp2VP6HFWWsLl9NSSOj7suKKzjsjpnr1hsw\ncjND5KSHaoN8TiSY59W8jozO17yuCe3rbJkh0ppp/ru60Ug8pHzw1sqVIiLip0TpSFNfIGCRUey0\nzfp8Y9NotsxN5/ojd6G4vIqS8ipvdL2s3uuKKlasqeD75Wsoqd238ekzABmhwDrBPCfDC++5mXVe\nNxTs673OSQ8RDGgajTSvlA/eIiIiEnuNTaO58tBeDN55q02+Xs0IfHEkiBeXN/K6Yu2+ksgI/U+r\nyij5de15ZZUb7xcPkJ0eXG90PScjxMwvljU4jebvUz5l2zbZ5NdZCErTaGRTKHiLiIjIJov1g7Hr\njMDnN622ynC1N5LeWHiv87pmuk1NkP/+9zWNrtD6W0nFer3n04OB2hVZ667Wus5KrQ3ti7zPTFNw\nb0kUvEVERGSzJOqDsWlNnAPf2DSadrkZ3PLHXVlV5q3EWvMQa81qrTWLQ/2worT2fWMtJdfWanWC\n+PoBPW8Dob1V1oY7z2yIWkH6Q8E7nqoqwEU3Z01EREQSQ2PTaK44tCeFPdpv0rXKKsONBvTVdV6v\nKl17zo8ry2r3b2zaTChgjYb2Vllp5GVE9tfZN+eb37nt9S8oi/xSoDnszUfBO54+nUi/WRfAzwOg\na6G3te3mLXEmIiIiCSmW3Whq+sG3y8vYrFrKq8J1Avq6Qd3rAb9uaF9VVsXPq8pq9zc2baa+0sow\n//fsJ9z/zhJy0kNkZwS9n+lBcjLW/sxJD5KdEWrwnJzI+6y0IIFGHlxtDonckUbBO5626MKyLfdh\n6x//B59N8fblbbM2hHftD3mb/gCKiIiIxFeidKPJCAXJyA2yZe7mBfeKquraRZpqQvtJD37Q4Lnh\nasfW+ZmUlIdZXlJB0fJS1kTmw5dUhAlXb6Txex3Z6UGy070wnp3uBfaacF7zPrsmyEfOy6kJ9PXC\nfs21GutCU1eid6RR8I6nbfdk8U7nsnX//rD8a1gy3ds+fwU+edI7p91OXgjv0h8694PMJj5RIiIi\nIhKRHgrQNjeDtnWCe8fWWQ3OYe/YOosHTtmzwes456gIV7OmPExJhbd6akl5FSW1773XdX8W13u/\notSb/76mIlz7MGvVJoT5rLRgbXBfJ5xHRt9zM0K88NHSBjvS3DJtsYJ3i2EGbbp6W8FpUF0NP/3P\nC+Ffz4C5j8D794IFoePua0fEO+0Joc37DVdERESkIY3NYb/44B6NfsbMvNH3UJAtcjZ/4ab6Kqqq\nvXBeEWZNZGS9JBLK11REQn2dsF9cXlV73ppIR5qfV5XV/gJQXF7V4Pf80MAvGn5Q8PZDIADb9PG2\n/S6AqnL4/oO1I+Lv/AvevgVCWbD9vmunpXTYxfusiIiIyGaKdSvIpkgPBUgPpdM6OzbXa6wjzTat\ns2LzBU2k4J0IQhnQZX9vO+ivULoCvp0FS2Z4Qfy1v3rnZbWBLgesHRFv08W3kkVERCR5JWoryKba\nnNH85qTgnYiyWsNOh3obwKof4Ou3146IfzrR2996u7Xzw7v0h9x2/tQrIiIikgBi2ZEmHhS8k0Gr\nbeAPx3mbc7Dsi7Xzwxe+BB896p3XYRdvSkrXQthuH8jI9bFoERERkeaXKB1pGpKUwdvMugJXAPnO\nuaP9rqdZmUG7Hb1t79EQroIf560dDf9gHMy+EwJp3sOZXQu9MN5xDwim+Vu7iIiISAvW7MHbzMYD\nhwG/OOd619k/BLgNCAIPOOdubOwazrklwOlm9ly86014wRB0KvC2A/4MFWvg+/ciQXwGTL8Bpl8P\n6bnQeT9vSkrXQmjfUwv5iIiIiDQjP0a8HwbuBB6t2WFmQeAuYBBQBHxoZpPwQvgN9T5/mnPul+Yp\nNQmlZ8MOB3obwJrf4Zt36vQQf9Xbn9N+7bSULv2h9bb+1CsiIiLSQjR78HbOvW1mnevt3gv4MjKS\njZlNAIY7527AGx2XzZXdBnoN9zaAFd+t7ZayZDrMf9bb32aHtdNSOu/vfU5EREREYsaci37FoJh9\nqRe8p9RMNTGzo4EhzrkzIu9PAvZ2zo1t5PNtgevwRsgfiAT0hs4bDYwG6NChwx4TJkyI8b9k44qL\ni8nNTdCHHJ0jp+Rbtlj+P7ZY/gn5KxcQCpfhMFbn7cCK1ruyfIs/sDK/J9XBlreQT0LfO9kg3bvk\npPuWvHTvkpfuXXwMGDBgrnOuoP7+pHy40jn3GzAmivPGAeMACgoKnB99KpOqP2a4EpbOxZZMp9WS\nGbQqmsR2378AwQzYbu/ItJRCb+GfQNDnYuMvqe6drEP3LjnpviUv3bvkpXvXvBIleC8F6k4y7hTZ\n12RmNgwY1q1bt1hcLrUF02C7vt5WeCmUF8O373ptC5dMhzeuBa6FzHxvOkrXQugxFPI7+Vu3iIiI\nSBJIlOD9IdDdzLrgBe7jgBNicWHn3GRgckFBwahYXK9FyciFHQd7G0Dxr2tD+JIZ8NkUmPpn2G5f\n6D0Ceh2hRXxEREREGuFHO8GngEJgSzMrAq5yzj1oZmOBaXidTMY75xY2d22yEbntYJejvc05+O0r\nWPgiLHjOC+Cv/MV7OLP3UbDTYd4KnCIiIiIC+NPV5PhG9k8Fpsb6+zTVJE7MYMtu0P9ib/t5ISx4\n3tteOgemXAjdBnkj4T2GQnqO3xWLiIiI+CpRpprEjaaaNJMOO3vbgX+FpR95AXzhC7D4ZUjL9sJ3\n76Og20AItbwOKSIiIiIpH7ylmZlBpz28bfA/4Lt3IyF8ovczIx96DvNGwrv091beFBEREWkBlHok\nfgIBb5n6zvvB0Ju9BzIXPA+LJsG8xyF7S9j5CG8kfNu+3vkiIiIiKSrlg7fmeCeIYBp0H+htlf+B\nL1/zQvjHT8CHD0CrjrDzkV4I32Y3b+RcREREJIWkfPDWHO8ElJbpTTfpOczrFb74FS+Ev38fzL4T\n2nT1Anjvo6B9T7+rFREREYmJlA/ekuAycmHXP3pb6XJYNNkL4e/8C96+Bdrv7M0H730UtOnid7Ui\nIiIimy3lg7emmiSRrC1g95O9rfiXtQ9kvvl3b+u4hxfAdz4SWm3jd7UiIiIimyTln2Zzzk12zo3O\nz8/3uxTZFLntYe/RcPo0uGA+DLoWqqtg2uXw717w0KHw4YNQ8pvflYqIiIhEJeWDt6SA1ttBv/Ph\nzLdh7BwovAxKfoGXL4J/dofHj4J5T0LZSr8rFREREWlUyk81kRSzZXco/Av0vwR+XrB2tcyJZ0Ew\nA7oP8qaj7DgE0rP9rlZERESkloK3JCcz2GoXbzvoKiiaE1mo50X4bAqk5cBOh3ghfIeDIJTud8Ui\nIiLSwqV88NbDlS2AGWy7p7cdfB18O8sL4Z++BPOfhcx86Hm4F8K7HACBoN8Vi4iISAuU8sFbfbxb\nmEDQC9ddDoBD/glLpsP857wOKR8/BjntI6tlHg2d9tRqmSIiItJsUj54SwsWTPPmfHcfBJWl8MV/\nvZHwjx6FD8ZB/rZea8JdjoatdtVqmSIiIhJXCt7SMqRlQa/h3la+Gj6b6oXw9+6Gd2+Htt0iq2Ue\nDe129LtaERERSUEK3tLyZOTBH471tjW/w6JJXgifcTPMuAk67AK9R5BRtq3flYqIiEgKSfngrYcr\nZYOy28Aep3rb6p/Wrpb5xjX0JQArJsLeZ0Ln/TUVRURERJok5Z8s08qVErW8raDvGDjjNTj/E77b\nbgR8+y48Mgzu2RfmjIeKEr+rFBERkSSV8sFbZLNs0Zmvu54EF30Kw++CQAimXAj/7gnTroDfv/a7\nQhEREUkyCt4iG5KWBbud6C1Xf9o06DYQ3r8Xbt8NnjwWvnwDqqv9rlJERESSQMrP8RaJCTPYrq+3\nrfoR5j7kTT15fAS07Q57jYY+x3sPboqIiIg0QCPeIpuq1dYw4HK4cCEcOQ4yW8ErF8O/esLUS2DZ\nF35XKCIiIglIwVtkc4UyvJaEo96EM96EnQ7xRsHvLIDHjoTFr2oaioiIiNRS8BaJhU57wIhx3sOY\nA66AXxbBU8fCHbvD7LugdIXfFYqIiIjPUj54m9kwMxu3cuVKv0uRliC3PfS/BC6YD0ePh9wOMO1y\nrxvKlAu9QC4iIiItUsoHb/XxFl8E07wl6E+f5nVE2XkEfPwE3N3X6wu+aApUh/2uUkRERJpRygdv\nEd9t/Qc44i64aBEcdBX8tgSeHgm3/QFm/sdbtl5ERERSnoK3SHPJaQv7XwTnfwLHPAZbdIbXr/am\nobw0Fn6a73eFIiIiEkfq4y3S3IIh6HW4t/28ED4YB588DR8/BtvtC3uNgp7DvOkqIiIikjI04i3i\npw47w7Db4P8WweB/wKql8Nyf4NZdYcYtUPyr3xWKiIhIjCh4iySCrC1g33PhvI/h+AnQrge89Q/4\nTy944UxYOtfvCkVERKSJNNVEJJEEgtBjqLf9+nlkGspT8L8J0LEA9j4Teh0BoXS/KxUREZFNpBFv\nkUTVbkc49J9eN5QhN0HpcnhhFPxnZ3jrelj9k98VioiIyCZQ8BZJdJmtoO8YGDsHRj4P2/SBGTd5\nAfy50+C798E5v6sUERGRjUj5qSZmNgwY1q1bN79LEWmaQAC6D/S2376CDx+Ajx+HBc97vcL3OtNb\ntCct0+9KRUREpAEpP+KtlSslJbXdAYbc4E1DOfRfUFUOL53tPYz5+jWwssjvCkVERKSelA/eIikt\nIxf2PAPOfg9Ofgm27QuzbvXaET59EnwzU9NQREREEkTKTzURaRHMoGuhty3/1puG8tGjsGgSdOjt\nLcqzyzGQnu1vnSIiIi2YRrxFUs0W28Pgv3vTUIbd7u2bfL63NP1/r4Tl3/hanoiISEul4C2SqtKz\nYY9TYMxMOHUqdO0Ps++G2/rAUydA0Ry/KxQREWlRNNVEJNWZQed+3rayCOaM97YHXoauA6D/JbD9\nvn5XKSIikvI04i3SkuR3goP+BhfMh4HXwM8L4KGh8NAh8NVbehBTREQkjhS8RVqijDzY7wI4/38w\n5Eb4fQk8dgQ8OAg+n6YALiIiEgcK3iItWXo29D0LzpsHh/4bVv8MTx4D4/rDoslQXe13hSIiIilD\nwVtEvNUu9zwdzvsIDr8TylbB0yfCvf1g/nNQHfa7QhERkaSn4C0iawXTYPeTYOwcGHG/F7ifPx3u\n2gvmPQnhKr8rFBERSVoK3iKyvmAIdj3GWxHzj49AKAsmngV37A5zH4aqCr8rFBERSTpJG7zN7Agz\nu9/MnjazwX7XI5KSAgHY+QgY8w4c9xRkt/EW47m9D7w/DirL/K5QREQkafgSvM1svJn9YmYL6u0f\nYmaLzexLM7t0Q9dwzk10zo0CxgDHxrNekRbPDHY6BEa9BSc+D/nbwisXw227wrt3QkWJ3xWKiIgk\nPL9GvB8GhtTdYWZB4C5gKNALON7MepnZLmY2pd7Wvs5Hr4x8TkTizQy6DYTTXoVTJkO7HvDfK+DW\nXeCdf3kPZYqIiEiDfFm50jn3tpl1rrd7L+BL59wSADObAAx3zt0AHFb/GmZmwI3AK865j+JbsYis\nwwy6HOBt370Pb98Mb1wLs2732hPufSZkbeF3lSIiIgnFnE8LZUSC9xTnXO/I+6OBIc65MyLvTwL2\nds6NbeTz5wGnAB8C85xz9zZwzmhgNECHDh32mDBhQhz+JRtWXFxMbm5us3+vNJ3u3abJW/UF23/7\nLFv+9j5VwSyWdjyUok6HU5me3+y16N4lJ9235KV7l7x07+JjwIABc51zBfX3+zLiHQvOuduB2zdy\nzjhgHEBBQYErLCxshsrWNX36dPz4Xmk63btNVQiMgp8WEHr7Frb/9Hm2/3EqFJwG+54LeVs1WyW6\nd8lJ9y156d4lL9275pVIXU2WAtvWed8psq9JzGyYmY1buXJlUy8lItHYqjcc8wic8z70HAbv3Q23\n7gpTL4aVRX5XJyIi4ptECt4fAt3NrIuZpQPHAZOaelHn3GTn3Oj8/Ob/c7dIi9auB4wY5y3Gs+sx\nMGc83NYHJp0Hv3/td3UiIiLNzq92gk8Bs4EeZlZkZqc756qAscA0YBHwjHNuoR/1iUgMtd0Bht8J\n530Mu58MnzwFd+wBL54Fy77wuzoREZFm41dXk+Mb2T8VmBrL7zKzYcCwbt26xfKyIrKpWm8Hh/0b\nDrgY3r0d5jzkhfDeI2D/P0OHXn5XKCIiEleJNNUkLjTVRCTBtNoahtwAF8yHfufD59Pgnn1gwkj4\nYZ7f1YmIiMRNygdvEUlQue1g0DVeAD/gEvj6HRjXH544Br7/0O/qREREYk7BW0T8ld0GDrwCLpwP\nB14JRR/CgwPh0eHwzSy/qxMREYmZlA/eaicokiQy87353xfMh0F/h58/hYcPgfFD4as3wafFvkRE\nRGIl5YO35niLJJmMXOh3HlzwPxhyEyz/Bh47Eh4YCItfVQAXEZGklfLBW0SSVFoW9B0D58+Dw/4D\nJb/AU8fCfQfAp5OgutrvCkVERDaJgreIJLZQhrfs/LkfwfC7oaIEnjkJ7tkX5j8H1WG/KxQREYlK\nygdvzfEWSRHBNNhtJJzzAYx4AHDw/Olw114w70kIV/pdoYiIyAalfPDWHG+RFBMMwa5/hLNmwzGP\nelNSJp7lrYY55yGoKve7QhERkQZtUvA2sy3N7DAzO8XM2kT2ZZpZygd4EUkwgQD0Gg5nvgPHPw05\nW8KUC+D23djqxzc0B1xERBJOVIHZPLcARcAkYDzQOXL4JeCKuFQnIrIxZtBjCJzxBpz4AuRtzU6L\nb4fxB8OPn/hdnYiISK1oR6ovA8YC1wJ7A1bn2GTgsBjXFTOa4y3SQphBt4Pg9Nf4rMd58PsSGFcI\nL/8flC73uzoREZGog/cZwLXOueuBj+od+xLYIaZVxZDmeIu0MIEAP219EJw7F/YcBXPGe/O/P3pM\n009ERMRX0QbvjsB7jRyrAHJiU46ISIxktYZDbobRM6Btd5g0Fh4cBD/M87syERFpoaIN3kuB3o0c\n+wPwdWzKERGJsa13hdNehSPuhRXfetNPplwEa373uzIREWlhog3ezwJ/M7N+dfY5M9sR+D9gQswr\nExGJFTPoczyMnQN7nwlzH/Kmn8x9RNNPRESk2UQbvK8GPgPeBr6I7HsWmB95f2PMKxMRibWs1jD0\nJq8FYbseMPk8eHAgLK3/6IqIiEjsRRW8nXOlQCFwKvAu8DrwITAaGOScq4hTfU2mriYisp6tesOf\nXoEjx8HKIrj/QJh8vqafiIhIXEW98I1zLuyce8w5d6JzbrBz7njn3CPOuap4FthU6moiIg0ygz8c\n600/6Xu21/Xkjt29LijVYb+rExGRFBTtAjphM9urkWN7mJn+X0pEklNmKxhyPYyZCe17wZQL4YGD\noGiu35WJiEiKiXbE2zZwLAi4GNQiIuKfDr3g1JdhxAOw6kcvfE86F0p+87syERFJERsM3mYWMLNg\nzbmR93W3HGAosCzulYqIxJsZ7PpHGPsh7HMOzHvSm37y4YOafiIiIk3WaPA2s6uASrwFchwwK/K+\n7rYK+BtehxMRkdSQ2QoOvhQ4GJgAACAASURBVA7GzIKtdoGXL4L7B8D3H/pdmYiIJLHQBo5Nj/w0\nvHD9IFBU75xy4FNgSswrExHxW/ud4JTJsPAFmHaF13pwtxNh4DWQs6Xf1YmISJJpNHg752YAMwDM\nzAH3O+d+aK7CYsXMhgHDunXr5ncpIpKMzKD3UdB9MMy4Gd67GxZNhgP/CgWnQSC48WuIiIgQfR/v\na5IxdIPaCYpIjGTkweC/w1nvwtZ9YOqfYVx/+O59vysTEZEksaGpJusws/bA8UAPILPeYeecOz2W\nhYmIJKR2PeDkl+DTifDq5TB+MPQZ6U0/yW3nd3UiIpLAogreZtYDmB05Pwevi0kbvFaCywEtCyki\nLYcZ7HwkdBsEb98Cs++CRVPgwCug4HQIRj2mISIiLUi0fbxvwVsivgPew5ZDgSzgDGANcGRcqhMR\nSWQZuTDoGjh7NnTcHV65xJt+8u1svysTEZEEFG3w3hO4G6+LCUDAOVflnBsP3AncGo/iRESSwpbd\n4aQX4ZhHoXQFPDQEXhwDxb/4XZmIiCSQaIN3LvC7c64ab1pJ3T5aH+IFcxGRlssMeg2HsR/A/v8H\nC56HO/aA9+6BcJXf1YmISAKINnh/A2wVeb0Y+GOdY4cBK2JYk4hI8krPgYP+BmfNhk57wquXwn0H\nwDez/K5MRER8Fm3wfg0YFHn9b+BPZrbYzBYC5wPj41GciEjS2rIbnPg8HPs4lK+Chw+B50fB6p/8\nrkxERHwS7aP3lwEZAM65Z8ysFDgWyAZuA+6PT3kiIknMDHoOgx0Ogpn/hlm3weJXYMBlsNdoCKb5\nXaGIiDSjjY54m1kQ2IlI8IbaRWlOdM6NcM6Nc865eBbZFGY2zMzGrVypjoci4pP0bDjwSjj7Pdiu\nL0y7HO7dH76Z6XdlIiLSjKKZauKAOcBuca4lLrRypYgkjLY7wMhn4bgnobIEHj4UnjsdVv3od2Ui\nItIMNhq8I51MvsdbOEdERJrCDHY6FM75APr/BRZNhjsLYNbtEK70uzoREYmjaB+uvA+4wMzS41mM\niEiLkZYFAy6Hc96DzvvBa3+Fe/eDr9/2uzIREYmTaB+uzAN2AJaY2avAj3hTUGo459xVsS5ORCTl\ntekKJzztPXT5yl/gkWGw8wg4+DpotY3f1YmISAxFG7wvr/P6tAaOO0DBW0Rkc/UYCl0Lvc4nM/8D\nn0+D/pdA37MhpD82ioikgqimmjjnAhvZgvEuVEQk5aVlQeGlcM770LU/vH4V3NsPlkz3uzIREYmB\naOd4i4hIc9miMxz/FJzwrPfA5aPD4ZlTYGWR35WJiEgTKHiLiCSqHQd7vb8HXOlNPblzT28aSlWF\n35WJiMhmUPAWEUlkaZnQ/2Jv+skOB8LrV8N9+8M3s/yuTERENpGCt4hIMthiezjuCTj+aahYAw8f\nAhPPhpJlflcmIiJRUvAWEUkmPYZ4o9/7XQj/e9pbfGfuI1Bd7XdlIiKyERsN3maWZmbDzaxLcxQk\nIiIbkZ4NA6+GMTOhfS+YfB48NAR+Xuh3ZSIisgHRLBlfCTwDdI57NSIiEr32PeHUl+GIe+C3L+He\n/eG/V0J5sd+ViYhIA6KdarIEaB/PQjaFmfU0s3vN7DkzO8vvekREfGMGfU6AsXNgtxPh3Tvgrr1h\n0RRwbuOfFxGRZhNt8L4ZuMLM2jX1C81svJn9YmYL6u0fYmaLzexLM7t0Q9dwzi1yzo0BjgH6NbUm\nEZGkl90GDr8dTvsvZObD0yPhqeNg+bd+VyYiIhHRLhl/INAG+NrM3gN+xFsmvoZzzp0S5bUeBu4E\nHq3ZYWZB4C5gEFAEfGhmk4AgcEO9z5/mnPvFzA4HzgIei/J7RURS33Z7w5kz4P174a0bvNHvwr9A\n33O09LyIiM/MRfGnSDP7eiOnOOdc16i/1KwzMMU51zvyfh/gaufcwZH3l0UuWj90N3Stl51zhzZy\nbDQwGqBDhw57TJgwIdoSY6a4uJjc3Nxm/15pOt275KV758ko+5VuXz5Au2XvUZK9LZ/veBYrW+/s\nd1mN0n1LXrp3yUv3Lj4GDBgw1zlXUH9/VCPezrl4dzTpCHxf530RsHdjJ5tZITACyACmNnaec24c\nMA6goKDAFRYWxqDUTTN9+nT8+F5pOt275KV7V9cfYfGr5Ey9mN3mXQ59RsKgayFnS78LW4/uW/LS\nvUteunfNK9qpJgnFOTcdmO5zGSIiyaHHEOhyALx9s/fw5eKpMPAa2O0kCGg5BxGR5hL1/+KaWbaZ\njTWzZ83sjcjPs80sKwZ1LAW2rfO+U2Rfk5nZMDMbt3LlylhcTkQkOan3t4iI76IK3ma2FfARcDtQ\nAGRHft4JfGRmHZpYx4dAdzPrYmbpwHHApCZeEwDn3GTn3Oj8/PxYXE5EJLmp97eIiG82pZ3gFsD+\nzrkuzrl9IvO+9wNaAzdF+4Vm9hQwG+hhZkVmdrpzrgoYC0wDFgHPOOc0DCMiEg/q/S0i4otog/dQ\n4DLn3Ky6O51z7wJXAg12FWmIc+5459zWzrk051wn59yDkf1TnXM7Oud2cM5dF+31NkZTTUREGqHe\n3yIizSra4J0L/NDIsaLI8YSkqSYiIhtR0/t78D/g63e80e+Z/4GqCr8rExFJKdEG78XASY0cOxH4\nLDbliIiIL4JpsO+5MPYD6HYQvH413Lc/fDNrox8VEZHoRBu8/wkcb2avm9lpZjbUzP5kZtOAE4Bb\n4ldi02iqiYjIJsjvBMc9Acc/DRVr4OFDYOLZULLM78pERJJeVMHbOfc4MAboDTwAvAw8COwKjHHO\nPRm3CptIU01ERDZDjyFwznuw34Xwv6fhzgKY+whUV/tdmYhI0oq6j3dkFchtgJ2B/SM/Ozrn7o9T\nbSIi4qf0nLW9v9v1VO9vEZEm2mjwNrN0M/vIzAY756qdc4ucc7MiPzX0ISKS6tr3hD9NheF3q/e3\niEgTbDR4O+cqgC5AVfzLiT3N8RYRiQEz2G1kpPf3SPX+FhHZDNFONXkNGBzPQuJFc7xFRGIouw0c\nfgecNk29v0VENlG0wfsOvK4m/zSz/cxsBzPrWneLZ5EiIpJgtuur3t8iIpso2uA9A9gWuCjy+nPg\ni3qbiIi0JDW9v895X72/RUSiEIryvNOApJzEZ2bDgGHdunXzuxQRkdTUeluv9/fiV2DqJV7v7z4j\nYdC1kLOl39WJiCSMaIP3i0CZc648nsXEg3NuMjC5oKBglN+1iIiktB5DocsB8PYt3sOXn73she/d\nToJA1N1rRURSVjTtBEPAbyTpw5UiItKM6vb+bt/L6/09/mD4aYHflYmI+C6adoJVwM9AOP7liIhI\nSqjb+/v3r+C+A2DaFer9LSItWrR/+3scOCOehYiISIqp3/t79p1w116waLJ6f4tIixTtHO9vgBPM\n7EPgJeBH6j1s6ZwbH9vSREQkJdT0/u4zEqZcBE+fCDsOgaE3wxbb+12diEiziTZ43xX52RHYo4Hj\nDkjI4K2uJiIiCaKm9/f798JbN3i9v/tfAvuMhVC639WJiMRdtFNNumxkS9gFdLRypYhIAqnf+/uN\na9T7W0RajKhGvJ1zWgtYRERip4He3z22OhD22tWbmiIikoI2qbGqme1qZmPN7Coz2yqyr5uZ5cWn\nPBERSWk9hsI578F+F9Lh5xne9JNFU/yuSkQkLqIK3maWYWbPAh8DtwN/A7aJHL4ZuCI+5YmISMqL\n9P7+aPd/Ql4HeHokPHcalPzmd2UiIjEV7Yj3dcBA4CSgA2B1jr0CHBzjukREpIUpzusKo96CAVfA\np5O81oMLX/S7LBGRmIk2eB8PXOmcexL4vd6xr4HOsSxKRERaqGCa1+nkzBmQ3wmePRWePgmKf/G7\nMhGRJos2eLcFFm3gGhmxKSf2zGyYmY1buXKl36WIiEi0OuwMZ7wBB10Fn7/qzf2e/5wW3hGRpBZt\n8P4a2KeRY3sBi2NTTuypnaCISJIKhmD/i+DMd6BNV3j+dJgwElb/5HdlIiKbJdrg/ShwqZmNBNIi\n+5yZDQAuJEEXzxERkRTQfic4/b8w6O/w1Rve3O95T2n0W0SSTrTB+2bgZeAxYHlk30zgdeBV59wd\ncahNRETEEwhCv/NgzCxo1xMmjoEnj4FVP/hdmYhI1KIK3s65sHPuOKA/8C/gAby2ggc650bGsT4R\nEZG1tuwGf5oKQ26Er9/x5n5/9KhGv0UkKUS1cmUN59w7wDtxqkVERGTjAkHoexZ0HwyTzvW2hS/C\nsNu9FTFFRBLUJq1cCWBmATN708y6x6MgERGRqLTdAU6ZAof8E757H+7eB+aM1+i3iCSsTQ7eeIvn\nFAJaJl5ERPwVCMBeo+Dsd6HjbjDlQnj0cFj+jd+ViYisZ3OCt4iISGLZojOcPAkOuxWWfgx37wsf\n3A/V1X5XJiJSS8FbRERSgxkU/AnOng3b9YWpf4ZHhsFvX/ldmYgIsHnBuxq4BkiKHk5auVJEpIVp\nvS2c+Dwcfif8NB/u6Qez74bqsN+ViUgLF1XwNrOTzawtgPNc45z7KXKsjZmdHM8im0IrV4qItEBm\nsPtJcM570OUAmHYZPDQUln3hd2Ui0oJFO+L9ELBDI8e6RI6LiIgkllbbwAlPw5H3wa+L4d79YNZt\nGv0WEV9EG7xtA8dygKoY1CIiIhJ7ZvCH4+Cc92GHg+C1v8GDg+CXz/yuTERamEYX0DGzPsDudXYN\nM7Pe9U7LAo4D9Lc7ERFJbHlbwXFPwILnYerFcN/+UHgp7Hs+BDdpPTkRkc2yof+lGQ5cFXntgCsa\nOe834PRYFiUiIhIXZrDL0d6876l/hjeuhU8nwRF3Q4ed/a5ORFLchqaa3Io3f7sr3lSTEZH3dbdt\ngPbOuUlxrlNERCR2ctvDMY/CHx+BlUVwX3+YfhOEK/2uTERSWKMj3s65lcBKADPrAvzonKtorsJE\nRETibucjoPP+8MolMP16WDTZG/3eele/KxORFBTVw5XOuW8VukVEJCXltIWjH4Rjn4Din+H+AfDm\ndVCl/9sTkdhqNHibWdjM9oq8ro68b2xTVxMREUluPQ/zOp/0PhrevhnG9YelH/ldlYikkA09XHkt\nUFTntYt/OSIiIj7KbgMj7oPeI2Dy+fDAQOh3PvT/C6Rl+l2diCS5Dc3xvqbO66ubpRoREZFEsOPB\ncPZ78N8rYOa/4bOXvbnfnQr8rkxEkli0C+iIiIi0LFmtYfhdcOLzUFHiLbrz3yuhstTvykQkSW0w\neJtZnpkdbGaHmVluZF8PM3vKzBaa2XQzG9E8pYqIiPig20A4ezbsfgq8e4e37Px37/ldlYgkoQ09\nXLkjsBCYCkwCPjezPYB3gIFAMdAbeNbMBjZDrfXryzGzOWZ2WHN/t4iItDCZrWDYrXDySxCugPFD\n4JVLvZFwEZEobWjE++9AGTAY6At8CkwEPga2dc7tDWwHzAAujfYLzWy8mf1iZgvq7R9iZovN7Esz\ni+Z6fwGeifZ7RUREmqxrIZw1G/Y8A96/B+7pB9/M9LsqEUkSGwre/YBrnXNvOOc+AM4FOgJ3OufK\nAJxza4A78Ea+o/UwMKTuDjMLAncBQ4FewPFm1svMdjGzKfW29mY2CO8XgV824XtFRESaLiMXDv0n\nnPqy9/7hQ+HlP0N5sb91iUjCM+ca7hIY6c29v3NuduR9Ot4I+J7Oubl1zusLzHLOBaP+UrPOwBTn\nXO/I+32Aq51zB0feXwbgnLuhkc9fB+TghfRS4EjnXHUD540GRgN06NBhjwkTJkRbYswUFxeTm5vb\n7N8rTad7l7x075JTMt63QLiMLl8/TqeiKZRltmNxj7Gs2OIPfpfV7JLx3olH9y4+BgwYMNc5t14b\npA318Q4A4Trva17XT+qx6O/dEfi+zvsiYO/GTnbOXQFgZqcCyxoK3ZHzxgHjAAoKClxhYWEMSt00\n06dPx4/vlabTvUteunfJKXnv2xD47j2yXjqHPp/8DfY4FQb93ZsX3kIk770T3bvmtaHgDdDRzLpG\nXgfr7FtR55xOsS8rOs65h/36bhERkVrb9YUxM+Gt62D2XfDF63D4bV5HFBGRiI0F7+ca2Dex3nuj\n6aPeS4Ft67zvFNnXZGY2DBjWrVu3WFxORESkYWlZMPgf0OsImHg2PH4U7HYiDL7O6wkuIi3ehoL3\nn5qtCvgQ6G5mXfAC93HACbG4sHNuMjC5oKBgVCyuJyIiskGdCuDMt2HGTTDrNvjyDRh2m7capoi0\naBtaMv6ReHyhmT0FFAJbmlkRcJVz7kEzGwtMw5vSMt45tzAe3y8iIhJ3aZkw8CroOQxeOgeePAb6\nnAhDb4SMPL+rExGfbGyqScw5545vZP9UvMV6YkpTTURExDcdd4fRM2DGjTDzP/DtTBhxP2y7l9+V\niYgPNrhkfCpwzk12zo3Oz8/3uxQREWmJQulw0N/g1KngqmH8wfDW9RCu9LsyEWlmKR+8RUREEsL2\n+8CYWbDrsd787/FD4Lev/K5KRJpRygdvMxtmZuNWrlzpdykiItLSZbaCI++Fox+C376Ee/eHuY9A\nI4vZiUhqSfngrakmIiKScHqPgLPe9TqgTD4PJoyEkmV+VyUicZbywVtERCQh5XeEkybCwdfDl6/B\nPft6C++ISMpS8BYREfFLIAD7nAOj3oLstvDEUTD1Yqgs9bsyEYmDlA/emuMtIiIJb6veXvjuew58\nMA7u6w8/fuJ3VSISYykfvDXHW0REkkJaJgy5Hk56EcpXwf0HwcxboTrsd2UiEiMpH7xFRESSyg4H\neg9e9hgKr18FjxwOK773uyoRiQEFbxERkUST3QaOeRSOuAd+nAf39IP5z/ldlYg0kYK3iIhIIjKD\nPifAmJnQfid4/nR4/gwoXeF3ZSKymVI+eOvhShERSWptunjLzQ+4Eha8APfuB9/M9LsqEdkMKR+8\n9XCliIgkvWAI+l8Mp78GwXR4+DB47SqoqvC7MhHZBCkfvEVERFJGpz3gzLdhj1Ng1q3wwIHw62K/\nqxKRKCl4i4iIJJOMXBh2Gxz3FKz6Ae47AD64H5zzuzIR2QgFbxERkWS00yFw1mzovD9M/TM8cTSs\n/tnvqkRkA1I+eOvhShERSVl5HWDks3DIP70HLu/ZBz572e+qRKQRKR+89XCliIikNDPYa5Q39zu/\nE0w4ASadC+XFflcmIvWkfPAWERFpEdr1gNNfh/0uhI8eg/v2h6I5flclInUoeIuIiKSKUDoMvBpO\nfRnClfDgYJh+E4Sr/K5MRFDwFhERST2d+3krXvY+CqZfDw8Nhd+X+F2VSIun4C0iIpKKslrDUffD\nUQ96vb7v3R8+flxtB0V8pOAtIiKSynY5Gs6aBdvsBi+dA8+cBGt+97sqkRYp5YO32gmKiEiL13pb\nOHkSDLoWFr8Kd+8DX77hd1UiLU7KB2+1ExQREQECAeh3Pox605uG8vgIeOVSqCzzuzKRFiPlg7eI\niIjUsfWuMHo67HUmvH8PjCuEn+b7XJRIy6DgLSIi0tKkZcEhN8PI56H0d7j/QHj3Dqiu9rsykZSm\n4C0iItJSdR8IZ82G7oPhv1fCY8Nh5VK/qxJJWQreIiIiLVlOWzj2cTj8DiiaC/fsAwte8LsqkZSk\n4C0iItLSmcHuJ8OYd6Btd3juT/DCmVCmjmAisaTgLSIiIp62O8Bp06D/pTD/WbhnP/j2Xb+rEkkZ\nCt4iIiKyVjAEAy6D0171WhA+fCi8cS1UVfhdmUjSU/AWERGR9W27F4yZCX1Gwjv/ggcHwa+f+12V\nSFJL+eCtlStFREQ2U0YeDL8TjnkMVnwL9x0AHz4IzvldmUhSSvngrZUrRUREmqjX4V7bwe33gZcv\ngiePheJf/K5KJOmkfPAWERGRGGi1tbfgztCbYcl0uHsfWPyq31WJJBUFbxEREYlOIAB7nwlnzoC8\nreGpY2HKhQTCZX5XJpIUFLxFRERk07TvCaPegH3PgzkPscfc/4NfF/tdlUjCU/AWERGRTRfKgMF/\nh5NfIq1yNYwbAAue97sqkYSm4C0iIiKbr2t/5hT8B7bqDc+dBq9cqp7fIo1Q8BYREZEmqchoC6e+\nDH3PhvfvgUcOg1U/+F2WSMJR8BYREZGmC6bBkBvg6PHw0wKv5/fXb/tdlUhCUfAWERGR2Ol9FIx+\nC7K2gEeHw8z/aMEdkQgFbxEREYmtdj1g1JvQazi8fjVMGAllWkFaRMFbREREYi8jD45+CIbcCF9M\ng3GF3hQUkRZMwVtERETiwwz6nuU9eFlZCg8MhHlP+V2ViG8UvEVERCS+tusLZ74NnQpg4hiYciFU\nlftdlUizS8rgbWaFZvaOmd1rZoV+1yMiIiIbkdseTpoI/S6AOeNh/MGw4ju/qxJpVs0evM1svJn9\nYmYL6u0fYmaLzexLM7t0I5dxQDGQCRTFq1YRERGJoWAIBl0Dxz4Bv33ltRz88nW/qxJpNn6MeD8M\nDKm7w8yCwF3AUKAXcLyZ9TKzXcxsSr2tPfCOc24o8BfgmmauX0RERJqi52EwejrkbQOPHw3Tb4Lq\nar+rEom7UHN/oXPubTPrXG/3XsCXzrklAGY2ARjunLsBOGwDl1sOZMSjThEREYmjtjvAGa97872n\nXw9FH8KIcZDdxu/KROLGnA9N7SPBe4pzrnfk/dHAEOfcGZH3JwF7O+fGNvL5EcDBQGvgHufc9EbO\nGw2MBujQocMeEyZMiO0/JArFxcXk5uY2+/dK0+neJS/du+Sk+5a8mnTvnGObH16l25cPUJHehgW9\n/0JxXrfYFiiN0n/v4mPAgAFznXMF9fc3+4h3LDjnXgBeiOK8ccA4gIKCAldYWBjnytY3ffp0/Phe\naTrdu+Sle5ecdN+SV9Pv3QAoOobMZ0+hYN5lcMgtsPspXjtCiSv99655JUpXk6XAtnXed4rsazIz\nG2Zm41au1IpZIiIiCavTHjB6BnTeDyafDy+d4/X+FkkhiRK8PwS6m1kXM0sHjgMmxeLCzrnJzrnR\n+fn5sbiciIiIxEtOWxj5HPT/C8x7Ah4YBL8v8bsqkZjxo53gU8BsoIeZFZnZ6c65KmAsMA1YBDzj\nnFvY3LWJiIiIzwJBGHA5nPAsrPwe7iuExa/4XZVITDR78HbOHe+c29o5l+ac6+ScezCyf6pzbkfn\n3A7Oueti9X2aaiIiIpKEdhzsrXbZpgs8dRy8fg2Eq/yuSqRJEmWqSdxoqomIiEiS2mJ7OG2a96Dl\nzH/D40dC8a9+VyWy2VI+eIuIiEgSS8uEw2+H4XfB9x94q11+/4HfVYlslpQP3ppqIiIikgJ2OxFO\nfw1C6fDQUHj/PvBhLRKRpkj54K2pJiIiIili6129pea7DYRXLoHnz4DyYr+rEolaygdvERERSSFZ\nW8BxT8GBf4WFL8ADB8Gvn/tdlUhUFLxFREQkuQQCcMCf4cQXoORXuH8ALJzod1UiG5XywVtzvEVE\nRFLUDgO8loPte8Kzp8C0KyBc6XdVIo1K+eCtOd4iIiIpLL8TnDoV9hoNs++ERw6H1T/5XZVIg1I+\neIuIiEiKC6XDIbfAiAfgx3lw7/7wzSy/qxJZj4K3iIiIpIZd/whnvAGZreCRYTDrdrUclISi4C0i\nIiKpo0MvGPUW7HQovPZXeOYkKFvld1UiQAsI3nq4UkREpIXJbAXHPAqD/wGfTYVxhfDzp35XJZL6\nwVsPV4qIiLRAZrDvuXDKZKgo9vp9/+8Zv6uSFi7kdwF+qqyspKioiLKysrh9R35+PosWLYrb9aVh\nmZmZdOrUibS0NL9LERERP3Xu57UcfPZP8MIo+P4DOPg6CGX4XZm0QC06eBcVFZGXl0fnzp0xs7h8\nx+rVq8nLy4vLtaVhzjl+++03ioqK6NKli9/liIiI3/K2glMmwRvXwLt3wA8fwzGPeK0IRZpRyk81\n2ZCysjLatm0bt9At/jAz2rZtG9e/ZIiISJIJpnlzvo95FH5dDPcdAF+95XdV0sKkfPDe2MOVCt2p\nSfdVREQa1Gs4jH4LctrDY0fC27dAdbXfVUkLkfLBO9EfrgwGg/Tp04fevXszbNgwVqxYAcA333yD\nmXHHHXfUnjt27FgefvhhAE499VQ6duxIeXk5AMuWLaNz584NfsfVV19Nx44d6dOnD927d2fEiBF8\n+ml8n+4+9dRTee655+L6HSIiIptly+4w6g3Y5Wh48x/w1HFQutzvqqQFSPngHUsTP15KvxvfpMul\nL9PvxjeZ+PHSJl8zKyuLefPmsWDBAtq0acNdd91Ve6x9+/bcdtttVFRUNPjZYDDI+PHjo/qeCy/8\n//buPDyq6v7j+PtrIIKAD02NyiKCW0rAJEAIRdGwxbApBqk0VVFQC1oXVCxqalutWy0oitZqK3VB\nJBYQsGClLhRRChIIylqXomVRApQfyCIhnN8fd0LDMJNMIDN3Ej6v55kn3HPvPec7zxH4cv3ec26j\nuLiYTz/9lCFDhtCzZ09KSkqOOv6asn//fr9DEBGRY0liIxj0R+g3Fj5/F57Nhk3L/Y5K6jgl3hGa\nsWwDd0//hA3b9+CADdv3cPf0T2ok+S7XtWtXNmz4X3/Jycn06tWLF198MeT1o0aN4vHHH6920jpk\nyBAuuugiJk+eDEBRURHZ2dl06tSJ3NxcNm3aBMBnn31G7969SU9Pp2PHjnz++ec457jzzjtp3749\n5557LoWFhYD3QuNNN91ESkoKvXv3ZvPmzQfHC9d/9+7dGTVqFJmZmTzxxBPV+g4iIiJHzQyyrodh\nb8KB/fCnHFj6st9RSR12TK9qUtF9b6xk1cbwO1st+2o7+8oOrQHbU1rGz6d+zKuLvwp5T2rzE7m9\ne6uIxi8rK+Odd97h2muvPaR9zJgx9O3bl+HDhx92T6tWrejWrRsvv/wyF198cUTjlOvYsSNr1qyh\ntLSUm2++mZkzZ5KcnExhYSEFBQVMnDiRK664grvuuou8vDz27t3LgQMHmD59OsXFxSxfvpwtW7bQ\nuXNnLrzwQhYuXMja9fDOhAAAEzdJREFUtWtZtWoV33zzDampqQwfPrzS/gH27dvHkiVLqhW7iIhI\njTqts7fk4NThMOsm+M8i70l4/QZ+RyZ1jBLvCAUn3VW1R2rPnj1kZGSwYcMG2rZtS05OziHnzzjj\nDLp06XLw6XSwu+++m4EDB9K/f/9qjeucA2Dt2rWsWLHi4LhlZWU0a9aMnTt3smHDBvLy8gBvXWyA\nBQsWkJ+fT0JCAqeccgrZ2dl89NFHzJ8//2B78+bN6dmzZ6X9lxsyZEi14hYREYmKRifBVa/Dew/B\n+2Ph64+9FVC+19rvyKQOUeId8KuL21V6/vxH3mXD9j2Htbdo2pDCEV3D3rdz585K+y2v8d69eze5\nubk8/fTT3HLLLYdcc8899zB48GCys7MPu//ss88mIyOD1177325cBQUFzJ49G4Di4uKQ4y5btozM\nzEycc7Rr146FCxdWK+5Iheu/XKNGjWpkHBERkaN2XAL0uhdaZsL0Ed6Sg4P+COfk+h2Z1BF1vsa7\nquUEI3VnbgoN6ycc0tawfgJ35qYcVb/lTjjhBJ588knGjRt3WM32D37wA1JTU3njjTdC3ltQUMDY\nsWMPHj/44IMUFxeHTbqnTZvG3Llzyc/PJyUlhZKSkoOJcWlpKStXrqRJkya0bNmSGTNmAPDdd9+x\ne/duLrjgAgoLCykrK6OkpIT58+eTlZXFhRdeeLB906ZNvPeetzZquP5FRETiVkpfGPEPaNoKJg+B\nohf8jkjqiDqfeNfUcoKXdmjBw4POpUXThhjek+6HB53LpR1a1EygQIcOHUhLS+PVV1897FxBQQHr\n168PeV+7du3o2LFjpX0//vjjB5cTnDRpEu+++y7JyckkJiYydepUxowZQ3p6OhkZGXz44YcAvPzy\nyzz55JOkpaVx3nnn8fXXX5OXl0daWhrp6en07NmTRx99lFNPPZW8vDzOPvtsUlNTGTp0KF27ev8X\noLL+RURE4lZSG7j273BWb3jjVlj0nN8RSR1g5bW+dV1mZqYLfolv9erVtG3bNqrjast4/xzt/M6b\nN4/u3bvXXEASM5q72knzVnvV6bnb/x38ZRisne3tfHnezX5HVKPq9Nz5yMyKnHOZwe11/om3iIiI\nyBGrdzxc/iKkXgpzf+HtdClyhPRypYiIiEhlEurDZc97Sfi7D8D+fdDjHm8dcJFqUOItIiIiUpWE\nenDpM14SPv9RKPsOet+n5FuqRYm3iIiISCSOS4CLJ0DC8fDBE179d59HlHxLxJR4i4iIiETquOOg\n/ziv7OSfv/eS7/6Pee0iVVDiLSIiIlIdZpD7kJd8L3gcyvbBJRO8J+IildA/z3zUo0cP3nrrrUPa\nxo8fzw033MDGjRsZPHgw4O0+OWfOnGr3P2/ePAYMGBDyXL9+/di+fXv1gw5Ys2YNGRkZdOjQgc8/\n//yI+xEREamVzKDXr6D73VD8Crw+Asr2V32fHNOUePsoPz+fKVOmHNI2ZcoU8vPzad68OVOnTgWO\nLPEO3v0y2Jw5c2jatGml15SVlYU9N2PGDAYPHsyyZcs488wzI4qpsv5ERERqHTPofpeXgH/yF5g6\nzFvxRCSMOp9419SW8dEwePBgZs+ezb593m/SdevWsXHjRi644ALWrVtH+/bt2bdvH7/85S8pLCwk\nIyODwsJCdu3axfDhw8nKyqJDhw7MnDkTgBdeeIFLLrmEnj170qtXLwB27NhB//79SUlJYeTIkRw4\ncACA1q1bs2XLlsNiaty4MXfccQfp6eksXLiQoqIisrOz6dSpE7m5uWzatIk5c+Ywfvx4nnnmGXr0\n6AHApEmTyMrKIiMjgxEjRhxMsiPpD6B79+6MGTOGrKwszjnnHN5//33AS9ZHjx5N+/btSUtLY8KE\nCQBh+xEREYm5C273Sk9Wz4LXhnp13yIh1Pkab+fcG8AbmZmZ11d64Zt3wdef1Ozgp54L3QrCnk5K\nSiIrK4s333yTgQMHMmXKFC6//HKswtvRiYmJ3H///SxZsoSnnnoKgHvuuYeePXsyceJEtm/fTlZW\nFr179wZg6dKlfPzxxyQlJTFv3jwWL17MqlWrOP300+nTpw/Tp08/WMISyq5du+jSpQvjxo2jtLSU\n7OxsZs6cSXJyMoWFhRQUFDBx4kRGjhxJ48aNGT16NKtXr6awsJAPPviA+vXrc+ONN/LKK68wdOjQ\niPsD7yn94sWLmTNnDvfddx9vv/02zz33HOvWraO4uJh69eqxbds2SktLufnmm8P2IyIiEnNdfwYJ\niTBnNLyaDz9+Beo39DsqiTN1PvGOd+XlJuWJ9/PPP1/lPXPnzmXWrFmMHTsWgL179/LVV18BkJOT\nQ1JS0sFrs7KyOOOMMw6OtWDBgkoT74SEBC677DIA1q5dy4oVK8jJyQG8p8/NmjU77J533nmHoqIi\nOnfuDMCePXs4+eSTq93foEGDAOjUqRPr1q0D4O2332bkyJHUq+f9p5qUlMSKFSsiiktERCSmsq73\nXricdQu88iP4SSEkNvI7KokjSrzL9X0kOv3u3Fnp6YEDB3LbbbexdOlSdu/eTadOnars0jnHtGnT\nSElJOaR90aJFNGp06G9wC1pbNPg4WIMGDUhISDg4Trt27Vi4cGGV8Vx99dU8/PDDR9Xf8ccfD3jJ\nemU16pHGJSIiEnMdh3rrfM8YCZMug5+8Bg1O9DsqiRN1vsY73jVu3JgePXowfPhw8vPzQ17TpEkT\ndlZI4HNzc5kwYQLOOQCWLVsWtv/Fixfz73//mwMHDlBYWEi3bt0iji0lJYWSkpKDCW5paSkrV648\n7LpevXoxdepUNm/eDMC2bdv48ssvj7i/inJycnj22WcPJuLbtm07on5ERERiJn0IDJ4I6z+Cl/Ng\nz5GvIiZ1ixLvOJCfn8/y5cvDJt49evRg1apVB1+uvPfeeyktLSUtLY127dpx7733hu27c+fO3HTT\nTbRt25Y2bdqQl5cXcVyJiYlMnTqVMWPGkJ6eTkZGBh9++OFh16WmpvLAAw9w0UUXkZaWRk5OTsiX\nHSPtr6LrrruOVq1akZaWRnp6OpMnTz6ifkRERGKqXR5c/hJsWg4vXQK7t/kdkcQBK39qWtdlZma6\nJUuWHNK2evVq2rZtG9Vxd+7cSZMmTaI6hoR2tPM7b948unfvXnMBScxo7monzVvtpbmrxL/mQuGV\n8P2zYOhMaJzsd0SH0NxFh5kVOecyg9v1xFtEREQkWs65yHvJctsX8EJ/2Pm13xGJj5R4i4iIiETT\nmT3gymmwYwP8uS/833q/IxKfKPEWERERibbW58NVr8OuLV7y/d91fkckPjjmE+9jpcb9WKN5FRGR\nuHNallfnvXcH/Lk/bP3c74gkxo7pxLtBgwZs3bpVSVod45xj69atNGjQwO9QREREDtWiI1zzV9i/\nB/7cD0rW+h2RxNAxvYFOy5YtWb9+PSUlJVEbY+/evUoAfdCgQQNatmzpdxgiIiKHO/VcuGY2vHiJ\nl3xfPQtOaed3VBIDtTLxNrPjgN8AJwJLnHMvHkk/9evXp02bNjUaW7B58+bRoUOHqI4hIiIitczJ\nbWHYm/Dixd5qJ1fNgOYZfkclURbzUhMzm2hmm81sRVB7HzNba2afmdldVXQzEGgJlAJ6NVhERERq\nn5POgmFzILGJt8nO+iVV3yO1mh813i8AfSo2mFkC8DTQF0gF8s0s1czONbO/Bn1OBlKAD51ztwM3\nxDh+ERERkZqR1AaGzYaGSfDSpfDlQr8jkiiKeeLtnJsPBO+bmgV85pz7wjm3D5gCDHTOfeKcGxD0\n2Yz3lPu/gXvLYhe9iIiISA1r2sp78t3kVJg0CL74h98RSZTES413C+A/FY7XA10quX46MMHMLgDm\nh7vIzH4K/DRw+K2Z+fHq8EnAFh/GlaOnuau9NHe1k+at9tLc1aRfdI/laJq76Dg9VGO8JN7V4pzb\nDVwbwXXPAc9FP6LwzGyJcy7TzxjkyGjuai/NXe2keau9NHe1l+YutuJlHe8NwGkVjlsG2kRERERE\n6oR4Sbw/As42szZmlgj8GJjlc0wiIiIiIjXGj+UEXwUWAilmtt7MrnXO7QduAt4CVgOvOedWxjq2\nKPG11EWOiuau9tLc1U6at9pLc1d7ae5iyLRduoiIiIhI9MVLqYmIiIiISJ2mxDuKqrkbp8SJcLur\nSnwzs9PM7D0zW2VmK83sVr9jksiYWQMzW2xmywNzd5/fMUnkzCzBzJaZ2V/9jkUiZ2brzOwTMys2\nM22ZGSMqNYmSwG6c/wJy8NYl/wjId86t8jUwqZKZXQh8C7zknGvvdzwSGTNrBjRzzi01syZAEXCp\nfs/FPzMzoJFz7lszqw8sAG51zv3T59AkAmZ2O5AJnOicG+B3PBIZM1sHZDrntIZ3DOmJd/SE3I3T\n55gkAmF2V5U455zb5JxbGvj1TrwXtVv4G5VEwnm+DRzWD3z0VKgWMLOWQH/gT37HIlIbKPGOnlC7\ncSoJEIkBM2sNdAAW+RuJRCpQrlAMbAb+7pzT3NUO44GfAwf8DkSqzQFzzawosNO3xIASbxGpU8ys\nMTANGOWc2+F3PBIZ51yZcy4DbwO1LDNTmVecM7MBwGbnXJHfscgR6eac6wj0BX4WKLOUKFPiHT3a\njVMkxgL1wdOAV5xz0/2OR6rPObcdeA/o43csUqXzgUsCtcJTgJ5mNsnfkCRSzrkNgZ+bgdfxSmQl\nypR4R4924xSJocALes8Dq51zj/kdj0TOzJLNrGng1w3xXkpf429UUhXn3N3OuZbOudZ4f8e965y7\n0uewJAJm1ijwEjpm1gi4CNBKXjGgxDtK6vhunHVaqN1V/Y5JInI+cBXeU7fiwKef30FJRJoB75nZ\nx3gPLf7unNPSdCLRcwqwwMyWA4uB2c65v/kc0zFBywmKiIiIiMSAnniLiIiIiMSAEm8RERERkRhQ\n4i0iIiIiEgNKvEVEREREYkCJt4iIiIhIDCjxFhERERGJASXeIiI+MrNrzMyF+WwPcd1ZEfRZfv/w\nEOcmBXYarMnv0MnMdptZiyquax2I65oKbb8O+s77zexLM3s+uD8zG29mc2oydhGRWKrndwAiIgLA\nj4D1QW37j7LPX5nZJOfcvqPspyq/AyaWb0F9hLoBZUB9IBW4D+hkZh2dcwcC1/wW+MLMejjn3juq\niEVEfKDEW0QkPhQ75z6rwf7m4m0DPQKYUIP9HsLMOgE9gJuPsqtFgR1/Ad43szLgj0AK3u6/OOc2\nmdkbwJ2AEm8RqXVUaiIiUrs0N7MZZvatmW01s6fNrGGI6z4CZgAFZnZCZR2a2Ylm9pSZbTSz78xs\nrZndZmYWQTzXAR8751YG9XmCmf0+EOO3ZjYLaBnplwR2BH7WD2qfAuSa2WnV6EtEJC4o8RYRiQ8J\nZlYv6BPqz+hJwGfAIOBx4HrgmTB9/gJIBm4JN2hgjNnAMGAccDHwN+Ax4MEI4u4DvB+i/Vm8pPyx\nQKxrgcmV9FP+/RsGnqLfA6wEVgRd9z7e3105EcQmIhJXVGoiIhIf1oRomw0MCGqb45wbHfj1XDNz\nwP1m9pBz7l8VL3TOrTSzycDPzewZ59z/hRijH1599TDn3AsV+m0E3GFmjznntoQK2MxOAVoDy4Pa\nU4CfAAXOuUcq9NkYGBmqL2Bv0PEaYECF+u7y71RiZuuBHwITw/QlIhKX9MRbRCQ+5AGdgz6jQlz3\nWtDxFLw/y7PC9PsroDFeXXQoFwIHOPxp9CQgEehaSczNAz9Lgtq7BGIKFWs4P8T7zl2Ay4FdeMn6\nKSGuLakwtohIraEn3iIi8WFFhC9XfhPmOORSfs65L8zseeBWM3sixCVJwLYQK598XeF8OA0CP78L\nam9WRayhFFV4uXKxmc0HNgG3A2OCrt0DhKprFxGJa3riLSJSuwQ/AS4/rmwpv98ACXh108G2AUlm\nlhjUfmqF8+FsDfz8XlD7pqDYCHMclnPuG2ALkBbidFLgnIhIraLEW0Skdrk86PjHeKUii8Ld4Jzb\nCDwN3MDhK4v8A+/vgh8FtV8B7AMWVhLLOrza7DOC2hcFYgoVa0TMrBlwEkFlLGaWALTCe1lTRKRW\nUamJiEh8yDCzk0K0L6lQggHQz8x+h7dOdxZeDfdLzrlPq+j/EeCnQDbwZYX2N4EFwB/MLBlvJZF+\neCuSPBzuxUoA59w+M1tEUH25c25t4KXO+wOrpnyEt6Z4v0ri6xJYu/s44HS8mvQy4A9B17UHTgDm\nV/51RUTijxJvEZH48Jcw7ckcWlZxJXAH3tPrfXibzIwOcd8hnHNbzewx4NdB7QfMrD/wEF4t9ffx\nnmTfDoyPIO5C4Hdm1sg5t6tC+wjg20BsicC7eCudLAjTT3m7w6svLwJGOucWB103IHB+XgSxiYjE\nFXPO+R2DiIjUUmZ2It5W9zc65ybFYLxVwDTn3L3RHktEpKapxltERI6Yc24H8Fu8tcIj2enyiJnZ\nQLwXNMdFcxwRkWhRqYmIiBytx/BWTWkGbIziOA2BK51z26M4hohI1KjUREREREQkBlRqIiIiIiIS\nA0q8RURERERiQIm3iIiIiEgMKPEWEREREYkBJd4iIiIiIjHw/4vZngld8IpcAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 864x432 with 1 Axes>"
      ]
     },
     "metadata": {
      "tags": []
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "plot_bler_vs_ebnodb(test_parameters['ebnodb'], sim_ber[:,0])\n",
    "plt.plot(np.arange(0,5.6,0.5),code.viterbi_reference)\n",
    "plt.ylim(1e-6,0.5)\n",
    "plt.grid()\n",
    "plt.legend(['RNN-Decoder','Viterbi reference'])\n",
    "plt.show();"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "colab": {},
    "colab_type": "code",
    "id": "TWgUOTgynIEQ"
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "accelerator": "GPU",
  "colab": {
   "collapsed_sections": [],
   "name": "RNN-based Decoder for Convolutional Codes.ipynb",
   "provenance": []
  },
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.8"
  },
  "varInspector": {
   "cols": {
    "lenName": 16,
    "lenType": 16,
    "lenVar": 40
   },
   "kernels_config": {
    "python": {
     "delete_cmd_postfix": "",
     "delete_cmd_prefix": "del ",
     "library": "var_list.py",
     "varRefreshCmd": "print(var_dic_list())"
    },
    "r": {
     "delete_cmd_postfix": ") ",
     "delete_cmd_prefix": "rm(",
     "library": "var_list.r",
     "varRefreshCmd": "cat(var_dic_list()) "
    }
   },
   "oldHeight": 654.4,
   "position": {
    "height": "676px",
    "left": "714px",
    "right": "63px",
    "top": "-5px",
    "width": "800px"
   },
   "types_to_exclude": [
    "module",
    "function",
    "builtin_function_or_method",
    "instance",
    "_Feature"
   ],
   "varInspector_section_display": "block",
   "window_display": false
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
