{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "name": "BioBot_FDS_YB_04_Simple_NN_TF_Model_181114.ipynb",
      "version": "0.3.2",
      "provenance": [],
      "collapsed_sections": [],
      "toc_visible": true
    },
    "kernelspec": {
      "display_name": "Python 3",
      "language": "python",
      "name": "python3"
    }
  },
  "cells": [
    {
      "metadata": {
        "id": "IjyaQqpewlW4",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "# BioBot_FDS_04_Simple_NN_TF Model\n",
        "## Deliverable_04: Implementing a simple Neural Network Model with TensorFlow\n",
        "Author/code developer: Yan Bello. 14/11/2018. As part of the Master in Artificial Intelligence (UNIR). \n",
        "This file/code is part of the development and exploration/experimentation on a Fall Detection System (FDS). \n",
        "\n",
        "---\n",
        "\n",
        "\n",
        "In the following sections, we used this dataset: \n",
        "SisFall: A Fall and Movement Dataset. \n",
        "Created by: A. Sucerquia, J.D. López, J.F. Vargas-Bonilla\n",
        "SISTEMIC, Faculty of Engineering, Universidad de Antiquia UDEA.\n",
        "Detailed information about this dataset can be found in this website: http://sistemic.udea.edu.co/en/investigacion/proyectos/english-falls/.\n",
        "Reference paper: Sucerquia A, López JD, Vargas-Bonilla JF. SisFall: A Fall and Movement Dataset. Sensors (Basel). 2017;17(1):198. Published 2017 Jan 20. doi:10.3390/s17010198\n",
        "\n",
        "---\n",
        "\n"
      ]
    },
    {
      "metadata": {
        "id": "3zwtaJ0RwiwC",
        "colab_type": "code",
        "outputId": "b3f8827c-2054-4ae2-f776-68add8dbed65",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 54
        }
      },
      "cell_type": "code",
      "source": [
        "# Preliminary step 0. We need to establish/select our working folders. First, ensure  the previous dataset files are available.\n",
        "# The code below is prepared to work with two options: local drive or mounting a Google Drive for Colab\n",
        "# Select the appropriate configuration for your environment by commenting/un-commenting the following lines:\n",
        "\n",
        "# To work with Google Colab and Google Drive: \n",
        "from google.colab import drive \n",
        "drive.mount('/content/gdrive')\n",
        "FILE_DIRECTORY =  \"gdrive/My Drive/Colab Notebooks/\"\n",
        "SisFall_ALL_DIRECTORY =  FILE_DIRECTORY + \"SisFall_dataset_ALL/\"\n",
        "\n",
        "# To work with a local drive, uncomment these line:\n",
        "# FILE_DIRECTORY =  os.getcwd() + \"\\\\\"\n",
        "# SisFall_ALL_DIRECTORY =  FILE_DIRECTORY + \"SisFall_dataset_ALL\\\\\""
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Drive already mounted at /content/gdrive; to attempt to forcibly remount, call drive.mount(\"/content/gdrive\", force_remount=True).\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "_uBcnFPnxamQ",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "## 4.1 Load a dataframe with prepared info from ADL/Falls dataset"
      ]
    },
    {
      "metadata": {
        "id": "swM4SErNeXmu",
        "colab_type": "code",
        "outputId": "ff9ad7cd-8488-43e0-9908-04473c6b1da5",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 1018
        }
      },
      "cell_type": "code",
      "source": [
        "# We work with the prepared file Unified_ADL_Falls, which is based on the previous dataset\n",
        "my_data_file_name = FILE_DIRECTORY + \"Unified_ADL_Falls.txt\"\n",
        "\n",
        "import pandas as pd\n",
        "\n",
        "# Creamos un data frame y cargamos los datos del fichero\n",
        "df_ADL_Falls = pd.DataFrame(pd.read_csv(my_data_file_name, sep = ','))\n",
        "\n",
        "df_ADL_Falls.drop('0', axis=1, inplace=True)\n",
        "\n",
        "df_only_ADLs = df_ADL_Falls[df_ADL_Falls.Fall_ADL == \"D\"]\n",
        "df_only_Falls = df_ADL_Falls[df_ADL_Falls.Fall_ADL == \"F\"]\n",
        "\n",
        "# mostramos el data frame\n",
        "print(df_only_ADLs.tail())\n",
        "print(df_only_Falls.tail())"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "     Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  max_S1_X  \\\n",
            "2697      D19      SE        D  D19_SE06_R01.txt       8.727956       190   \n",
            "2698      D19      SE        D  D19_SE06_R02.txt      10.096698        86   \n",
            "2699      D19      SE        D  D19_SE06_R03.txt       9.540330       259   \n",
            "2700      D19      SE        D  D19_SE06_R04.txt      20.191198       393   \n",
            "2701      D19      SE        D  D19_SE06_R05.txt       9.022231       230   \n",
            "\n",
            "      mean_S1_X  min_S1_X  range_S1_X  skewness_S1_X    ...     \\\n",
            "2697  20.204659      -195         385      -1.745292    ...      \n",
            "2698 -33.031614      -324         410      -1.976282    ...      \n",
            "2699   8.276206      -154         413       0.398760    ...      \n",
            "2700   9.514143      -255         648       0.993127    ...      \n",
            "2701   9.554077      -164         394      -0.149056    ...      \n",
            "\n",
            "      range_S1_N_VER  skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV  \\\n",
            "2697        1.531165           2.009740      0.189131      0.035770  0.711721   \n",
            "2698        1.721676           2.839037      0.226830      0.051452  0.865803   \n",
            "2699        1.651294           2.986164      0.175513      0.030805  0.747053   \n",
            "2700        2.525731           4.938333      0.230644      0.053197  0.787008   \n",
            "2701        2.110528           4.118197      0.203275      0.041321  0.796733   \n",
            "\n",
            "       corr_NH   corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "2697  0.996825  0.739432 -0.309286 -0.145757  0.702846  \n",
            "2698  0.995884  0.886033  0.752237  0.536177  0.744701  \n",
            "2699  0.998071  0.765374 -0.409197 -0.213251  0.695071  \n",
            "2700  0.998731  0.800299 -0.157456 -0.203438  0.688019  \n",
            "2701  0.998789  0.806128 -0.476529 -0.166258  0.717775  \n",
            "\n",
            "[5 rows x 58 columns]\n",
            "     Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  max_S1_X  \\\n",
            "4495      F15      SE        F  F15_SE06_R01.txt      12.015061       175   \n",
            "4496      F15      SE        F  F15_SE06_R02.txt      14.164169       128   \n",
            "4497      F15      SE        F  F15_SE06_R03.txt      10.210935        13   \n",
            "4498      F15      SE        F  F15_SE06_R04.txt       9.538224       137   \n",
            "4499      F15      SE        F  F15_SE06_R05.txt      11.740748       154   \n",
            "\n",
            "       mean_S1_X  min_S1_X  range_S1_X  skewness_S1_X    ...     \\\n",
            "4495 -150.477537      -977        1152      -2.804444    ...      \n",
            "4496 -155.198003      -911        1039      -3.036554    ...      \n",
            "4497 -160.153078      -753         766      -2.682646    ...      \n",
            "4498 -171.712146      -834         971      -2.509883    ...      \n",
            "4499 -147.943428      -944        1098      -2.826632    ...      \n",
            "\n",
            "      range_S1_N_VER  skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV  \\\n",
            "4495        3.650464           3.294931      0.514555      0.264767 -0.065147   \n",
            "4496        3.290032           4.064083      0.373755      0.139693 -0.055799   \n",
            "4497        2.751549           3.714880      0.397491      0.157999 -0.028757   \n",
            "4498        3.240474           3.509039      0.452012      0.204315 -0.054726   \n",
            "4499        3.904351           3.534182      0.499249      0.249249  0.155856   \n",
            "\n",
            "       corr_NH   corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "4495  0.328154  0.881449 -0.082799 -0.530395  0.460717  \n",
            "4496  0.210786  0.946213 -0.056746 -0.469133  0.677548  \n",
            "4497  0.265980  0.930266 -0.240292 -0.541105  0.675271  \n",
            "4498  0.275312  0.925184 -0.341458 -0.624081  0.762479  \n",
            "4499  0.521532  0.902481 -0.197174 -0.673646  0.731832  \n",
            "\n",
            "[5 rows x 58 columns]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "GBiW3tap1263",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Shuffle and set up training and test samples for ADL/Falls"
      ]
    },
    {
      "metadata": {
        "id": "kk3EZrikeXm4",
        "colab_type": "code",
        "outputId": "7b17c011-2cbb-4e28-e821-94c4d58c0f8f",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "import random\n",
        "import math\n",
        "from numpy.random import permutation\n",
        "\n",
        "# Randomly shuffle the index of each set (ADLs and Falls)\n",
        "# -------------------------------------------------------\n",
        "# First we prepare the sets of ADLs\n",
        "random_indices = permutation(df_only_ADLs.index)\n",
        "# Use a test-split (of 30% of the items)\n",
        "test_split = math.floor(len(df_only_ADLs)*0.3)\n",
        "# Test set with 30% of items\n",
        "df_only_ADLs_test = df_only_ADLs.loc[random_indices[0:test_split]]\n",
        "# Train set with 70% of the items.\n",
        "df_only_ADLs_train = df_only_ADLs.loc[random_indices[test_split:]]\n",
        "\n",
        "\n",
        "# -------------------------------------------------------\n",
        "# Now we prepare the sets of Falls\n",
        "random_indices = permutation(df_only_Falls.index)\n",
        "# Use a test-split (of 30% of the items)\n",
        "test_split = math.floor(len(df_only_Falls)*0.3)\n",
        "# Test set with 30% of items\n",
        "df_only_Falls_test = df_only_Falls.loc[random_indices[0:test_split]]\n",
        "# Train set with 70% of the items.\n",
        "df_only_Falls_train = df_only_Falls.loc[random_indices[test_split:]]\n",
        "\n",
        "\n",
        "\n",
        "print(\"Total ADL: \" + str(len(df_only_ADLs)))\n",
        "print(\"Total Falls: \" + str(len(df_only_Falls)))\n",
        "print(\"GRAND Total: \" + str(len(df_only_Falls)+len(df_only_ADLs)))\n",
        "print(\"---------------------------------------\")\n",
        "print(\"Train Falls: \"+ str(len(df_only_Falls_train)))\n",
        "print(\"Train ADL: \"+ str(len(df_only_ADLs_train)))\n",
        "print(\"Train TOTAL: \"+ str(len(df_only_ADLs_train)+len(df_only_Falls_train)))\n",
        "print(\"---------------------------------------\")\n",
        "print(\"Test Falls: \"+ str(len(df_only_Falls_test)))\n",
        "print(\"Test ADL: \"+ str(len(df_only_ADLs_test)))\n",
        "print(\"Test TOTAL: \"+ str(len(df_only_ADLs_test)+len(df_only_Falls_test)))"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Total ADL: 2702\n",
            "Total Falls: 1798\n",
            "GRAND Total: 4500\n",
            "---------------------------------------\n",
            "Train Falls: 1259\n",
            "Train ADL: 1892\n",
            "Train TOTAL: 3151\n",
            "---------------------------------------\n",
            "Test Falls: 539\n",
            "Test ADL: 810\n",
            "Test TOTAL: 1349\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "XHEHeHINeXm_",
        "colab_type": "code",
        "outputId": "dff7afec-a51f-48dc-b470-d642b6729ace",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "# Prepare dataset with Test examplars\n",
        "\n",
        "frames = [df_only_Falls_test, df_only_ADLs_test]\n",
        "df_ADL_Falls_test = pd.concat(frames)\n",
        "print(\"Test ADLs: \"+ str(len(df_only_ADLs_test)))\n",
        "print(\"Test Falls: \"+ str(len(df_only_Falls_test)))\n",
        "print(\"Test ALL: \"+ str(len(df_ADL_Falls_test)))\n",
        "\n",
        "print(df_ADL_Falls_test.head())\n",
        "print(df_ADL_Falls_test.tail())\n"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Test ADLs: 810\n",
            "Test Falls: 539\n",
            "Test ALL: 1349\n",
            "     Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  max_S1_X  \\\n",
            "3477      F07      SA        F  F07_SA12_R02.txt      28.158868      1730   \n",
            "4496      F15      SE        F  F15_SE06_R02.txt      14.164169       128   \n",
            "3031      F03      SA        F  F03_SA19_R01.txt      88.093106       191   \n",
            "4384      F15      SA        F  F15_SA01_R05.txt      64.593266      2619   \n",
            "3719      F09      SA        F  F09_SA12_R04.txt       2.035594        19   \n",
            "\n",
            "       mean_S1_X  min_S1_X  range_S1_X  skewness_S1_X    ...     \\\n",
            "3477   63.324459      -510        2240       3.887497    ...      \n",
            "4496 -155.198003      -911        1039      -3.036554    ...      \n",
            "3031 -146.509151     -4096        4287      -8.141281    ...      \n",
            "4384 -159.004992     -4053        6672      -5.225071    ...      \n",
            "3719 -147.504160      -835         854      -1.118020    ...      \n",
            "\n",
            "      range_S1_N_VER  skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV  \\\n",
            "3477        6.699628           4.023581      0.718885      0.516795  0.846591   \n",
            "4496        3.290032           4.064083      0.373755      0.139693 -0.055799   \n",
            "3031       16.388624           9.088421      1.221342      1.491676  0.690857   \n",
            "4384       21.553022           8.303136      1.562807      2.442366  0.820121   \n",
            "3719        3.256540           1.456533      0.498986      0.248987  0.088200   \n",
            "\n",
            "       corr_NH   corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "3477  0.950524  0.919384 -0.668190 -0.089681  0.438163  \n",
            "4496  0.210786  0.946213 -0.056746 -0.469133  0.677548  \n",
            "3031  0.866353  0.953690  0.424994 -0.329616 -0.443368  \n",
            "4384  0.892859  0.978143  0.305849 -0.657534 -0.152340  \n",
            "3719  0.695683  0.748694 -0.216643 -0.377517  0.326644  \n",
            "\n",
            "[5 rows x 58 columns]\n",
            "     Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  max_S1_X  \\\n",
            "757       D08      SA        D  D08_SA22_R05.txt       8.159998       100   \n",
            "652       D08      SA        D  D08_SA01_R05.txt       1.755796        45   \n",
            "1326      D11      SE        D  D11_SE01_R04.txt       3.757317        63   \n",
            "2627      D19      SA        D  D19_SA10_R01.txt       7.738473       170   \n",
            "2576      D18      SA        D  D18_SA23_R05.txt       4.126170       427   \n",
            "\n",
            "      mean_S1_X  min_S1_X  range_S1_X  skewness_S1_X    ...     \\\n",
            "757   15.016639       -38         138       1.151278    ...      \n",
            "652    4.602329       -39          84      -0.146848    ...      \n",
            "1326  -3.467554       -23          86       1.358530    ...      \n",
            "2627   7.294509      -142         312      -0.059787    ...      \n",
            "2576   3.487521      -472         899       0.165259    ...      \n",
            "\n",
            "      range_S1_N_VER  skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV  \\\n",
            "757         1.391013           1.726104      0.229784      0.052801  0.442195   \n",
            "652         1.234381           2.736062      0.138825      0.019272  0.294266   \n",
            "1326        1.787610           1.850547      0.251917      0.063462  0.411575   \n",
            "2627        1.310244           2.305588      0.182099      0.033160  0.653727   \n",
            "2576        2.997666           3.221152      0.381603      0.145621  0.606482   \n",
            "\n",
            "       corr_NH   corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "757   0.998951  0.450273  0.012475  0.136234  0.205409  \n",
            "652   0.999974  0.296621  0.139385  0.288089  0.025525  \n",
            "1326  0.999976  0.412518  0.376528 -0.715585 -0.269799  \n",
            "2627  0.999824  0.658250 -0.083154 -0.110563  0.321832  \n",
            "2576  0.992261  0.669341  0.127315 -0.201458 -0.042817  \n",
            "\n",
            "[5 rows x 58 columns]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "GjXFNdmfeXnE",
        "colab_type": "code",
        "outputId": "cef0ee0a-afb9-4883-f733-3e674a885081",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "# Prepare dataset with Train examplars\n",
        "\n",
        "frames = [df_only_Falls_train, df_only_ADLs_train]\n",
        "df_ADL_Falls_train = pd.concat(frames)\n",
        "print(\"train ADLs: \"+ str(len(df_only_ADLs_train)))\n",
        "print(\"train Falls: \"+ str(len(df_only_Falls_train)))\n",
        "print(\"train ALL: \"+ str(len(df_ADL_Falls_train)))\n",
        "\n",
        "print(df_ADL_Falls_train.head())\n",
        "print(df_ADL_Falls_train.tail())\n"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "train ADLs: 1892\n",
            "train Falls: 1259\n",
            "train ALL: 3151\n",
            "     Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  max_S1_X  \\\n",
            "4149      F13      SA        F  F13_SA02_R05.txt       4.165379        53   \n",
            "3575      F08      SA        F  F08_SA07_R05.txt       4.422467       205   \n",
            "4172      F13      SA        F  F13_SA07_R03.txt       2.065487       221   \n",
            "3303      F06      SA        F  F06_SA01_R03.txt      12.473522       893   \n",
            "3400      F06      SA        F  F06_SA20_R05.txt      10.054013      1396   \n",
            "\n",
            "       mean_S1_X  min_S1_X  range_S1_X  skewness_S1_X    ...     \\\n",
            "4149 -147.003328      -918         971      -1.356286    ...      \n",
            "3575 -143.532446      -989        1194      -1.547840    ...      \n",
            "4172  -81.241265      -463         684      -0.420039    ...      \n",
            "3303  129.995008       -88         981       2.952118    ...      \n",
            "3400  123.337770      -565        1961       2.090652    ...      \n",
            "\n",
            "      range_S1_N_VER  skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV  \\\n",
            "4149        3.616831           1.759811      0.505234      0.255262 -0.053625   \n",
            "3575        4.529287           3.027514      0.653364      0.426884  0.325869   \n",
            "4172        3.010339           2.310889      0.410026      0.168122  0.889051   \n",
            "3303        3.467718           2.645097      0.484077      0.234330  0.475571   \n",
            "3400        5.562395           2.550464      0.684755      0.468889  0.503541   \n",
            "\n",
            "       corr_NH   corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "4149  0.340929  0.825944 -0.536538 -0.287334  0.329780  \n",
            "3575  0.693146  0.887493 -0.712236  0.105727  0.335286  \n",
            "4172  0.979584  0.917976 -0.355061  0.163495 -0.478949  \n",
            "3303  0.894396  0.784597 -0.051477  0.412850  0.346640  \n",
            "3400  0.894028  0.806098 -0.058769  0.037300  0.524654  \n",
            "\n",
            "[5 rows x 58 columns]\n",
            "     Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  max_S1_X  \\\n",
            "560       D07      SA        D  D07_SA21_R03.txt       9.899222        62   \n",
            "2468      D18      SA        D  D18_SA02_R02.txt       3.125303       276   \n",
            "1550      D12      SE        D  D12_SE08_R03.txt      -0.017980       312   \n",
            "855       D09      SA        D  D09_SA04_R03.txt       3.529780        56   \n",
            "1835      D14      SE        D  D14_SE03_R03.txt      -1.453733       278   \n",
            "\n",
            "       mean_S1_X  min_S1_X  range_S1_X  skewness_S1_X    ...     \\\n",
            "560     2.242928       -25          87       1.670363    ...      \n",
            "2468   15.718802      -193         469       0.742017    ...      \n",
            "1550  211.229617        49         263      -1.128621    ...      \n",
            "855    16.936772       -38          94       0.352948    ...      \n",
            "1835  113.946755       -48         326       0.460365    ...      \n",
            "\n",
            "      range_S1_N_VER  skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV  \\\n",
            "560         0.573939          -0.044662      0.203851      0.041555  0.255089   \n",
            "2468        2.169816           2.911109      0.275573      0.075941  0.579090   \n",
            "1550        0.425406          -0.336909      0.079033      0.006246 -0.890491   \n",
            "855         0.342374           0.518233      0.084930      0.007213  0.535318   \n",
            "1835        1.793753           0.070799      0.207965      0.043250  0.079973   \n",
            "\n",
            "       corr_NH   corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "560   0.999342  0.264373  0.236076  0.002666 -0.427624  \n",
            "2468  0.997391  0.607893 -0.258679 -0.011191  0.089100  \n",
            "1550 -0.827260  0.949986  0.928715 -0.942368 -0.929078  \n",
            "855   0.999046  0.523658 -0.339200  0.047038  0.444544  \n",
            "1835  0.139818  0.950952 -0.220681 -0.766857 -0.049420  \n",
            "\n",
            "[5 rows x 58 columns]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "_HX8lpYOAenz",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "## 4.2 Utility function to display training/validation performance data\n",
        "Note: the code in the following cell was adapted/extended by from a code-sample shared in class."
      ]
    },
    {
      "metadata": {
        "id": "-gcu5y6C5Kar",
        "colab_type": "code",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "import matplotlib\n",
        "import matplotlib.pyplot as plt\n",
        "\n",
        "def plot_acc(history, title=\"Model Accuracy\"):\n",
        "    \"\"\"Displays a grapth with the accuracy per epoch, obtained in a training session\"\"\"\n",
        "    plt.plot(history.history['acc'])\n",
        "    plt.plot(history.history['val_acc'])\n",
        "    plt.title(title)\n",
        "    plt.ylabel('Accuracy')\n",
        "    plt.xlabel('Epoch')\n",
        "    plt.legend(['Train', 'Val'], loc='upper left')\n",
        "    plt.show()\n",
        "    \n",
        "def plot_loss(history, title=\"Model Loss\"):\n",
        "    \"\"\"Displays a grapth with the loss per epoch, obtained in a training session\"\"\"\n",
        "    plt.plot(history.history['loss'])\n",
        "    plt.plot(history.history['val_loss'])\n",
        "    plt.title(title)\n",
        "    plt.ylabel('Loss')\n",
        "    plt.xlabel('Epoch')\n",
        "    plt.legend(['Train', 'Val'], loc='upper right')\n",
        "    plt.show()\n",
        "    \n",
        "def plot_compare_losses(history1, history2, name1=\"Red 1\",\n",
        "                        name2=\"Red 2\", title=\"Graph title\"):\n",
        "    \"\"\"Compares losses trainings: name1 y name2\"\"\"\n",
        "    plt.plot(history1.history['loss'], color=\"green\")\n",
        "    plt.plot(history1.history['val_loss'], 'r--', color=\"green\")\n",
        "    plt.plot(history2.history['loss'], color=\"blue\")\n",
        "    plt.plot(history2.history['val_loss'], 'r--', color=\"blue\")\n",
        "    plt.title(title)\n",
        "    plt.ylabel('Loss')\n",
        "    plt.xlabel('Epoch')\n",
        "    plt.legend(['Train ' + name1, 'Val ' + name1, \n",
        "                'Train ' + name2, 'Val ' + name2],\n",
        "               loc='upper right')\n",
        "    plt.show()\n",
        "    \n",
        "def plot_compare_accs(history1, history2, name1=\"Red 1\",\n",
        "                      name2=\"Red 2\", title=\"Graph title\"):\n",
        "    \"\"\"Compares accuracies of trainings: nombres name1 y name2\"\"\"\n",
        "    plt.plot(history1.history['acc'], color=\"green\")\n",
        "    plt.plot(history1.history['val_acc'], 'r--', color=\"green\")\n",
        "    plt.plot(history2.history['acc'], color=\"blue\")\n",
        "    plt.plot(history2.history['val_acc'], 'r--', color=\"blue\")\n",
        "    plt.title(title)\n",
        "    plt.ylabel('Accuracy')\n",
        "    plt.xlabel('Epoch')\n",
        "    plt.legend(['Train ' + name1, 'Val ' + name1, \n",
        "                'Train ' + name2, 'Val ' + name2], \n",
        "               loc='lower right')\n",
        "    plt.show()\n",
        "    \n",
        "    \n",
        "def plot_compare_losses3(history1, history2, history3, name1=\"Red 1\",\n",
        "                        name2=\"Red 2\", name3=\"Red 3\", title=\"Graph title\"):\n",
        "    \"\"\"Compares losses of: namea, name2 y name3\"\"\"\n",
        "    plt.plot(history1.history['loss'], color=\"green\")\n",
        "    plt.plot(history1.history['val_loss'], 'r--', color=\"green\")\n",
        "    plt.plot(history2.history['loss'], color=\"blue\")\n",
        "    plt.plot(history2.history['val_loss'], 'r--', color=\"blue\")\n",
        "    plt.plot(history3.history['loss'], color=\"purple\")\n",
        "    plt.plot(history3.history['val_loss'], 'r--', color=\"purple\")\n",
        "\n",
        "    plt.title(title)\n",
        "    plt.ylabel('Loss')\n",
        "    plt.xlabel('Epoch')\n",
        "    plt.legend(['Train ' + name1, 'Val ' + name1, \n",
        "                'Train ' + name2, 'Val ' + name2,\n",
        "                'Train ' + name3, 'Val ' + name3],               \n",
        "               loc='upper right')\n",
        "    plt.show()\n",
        "    \n",
        "def plot_compare_accs3(history1, history2, history3, name1=\"Red 1\",\n",
        "                        name2=\"Red 2\", name3=\"Red 3\", title=\"Graph title\"):\n",
        "    \"\"\"Compares accuracies of: namea, name2 y name3\"\"\"\n",
        "    plt.plot(history1.history['acc'], color=\"green\")\n",
        "    plt.plot(history1.history['val_acc'], 'r--', color=\"green\")\n",
        "    plt.plot(history2.history['acc'], color=\"blue\")\n",
        "    plt.plot(history2.history['val_acc'], 'r--', color=\"blue\")\n",
        "    plt.plot(history3.history['acc'], color=\"purple\")\n",
        "    plt.plot(history3.history['val_acc'], 'r--', color=\"purple\")\n",
        "    plt.title(title)\n",
        "    plt.ylabel('Accuracy')\n",
        "    plt.xlabel('Epoch')\n",
        "    plt.legend(['Train ' + name1, 'Val ' + name1, \n",
        "                'Train ' + name2, 'Val ' + name2, \n",
        "               'Train ' + name3, 'Val ' + name3], \n",
        "               loc='lower right')\n",
        "    plt.show()"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "metadata": {
        "colab_type": "text",
        "id": "gvdG8GzIyQN_"
      },
      "cell_type": "markdown",
      "source": [
        "## 4.3 Define and train a Neural Network - based classifier using TensorFlow\n",
        "Below we use TensorFlow Sequential for experimenting with various parameter settings. For clarity and simplicity here only one model configuration is included."
      ]
    },
    {
      "metadata": {
        "id": "rOWAmqmiy1a_",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Define, compile and train the Neural Network model"
      ]
    },
    {
      "metadata": {
        "id": "7VAfyxsZ5Kav",
        "colab_type": "code",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "# Import TensorFlow & tf.keras\n",
        "import tensorflow as tf\n",
        "from tensorflow import keras\n",
        "\n",
        "# The columns that we will be making predictions with...\n",
        "x_columns = ['kurtosis_S1_X','max_S1_X','mean_S1_X','min_S1_X','range_S1_X','skewness_S1_X','std_S1_X','var_S1_X',\n",
        "             'kurtosis_S1_Y','max_S1_Y','mean_S1_Y','min_S1_Y','range_S1_Y','skewness_S1_Y','std_S1_Y','var_S1_Y',\n",
        "             'kurtosis_S1_Z','max_S1_Z','mean_S1_Z','min_S1_Z','range_S1_Z','skewness_S1_Z','std_S1_Z','var_S1_Z',\n",
        "             'kurtosis_S1_N_XYZ','max_S1_N_XYZ','mean_S1_N_XYZ','min_S1_N_XYZ','range_S1_N_XYZ','skewness_S1_N_XYZ','std_S1_N_XYZ','var_S1_N_XYZ',\n",
        "             'kurtosis_S1_N_HOR','max_S1_N_HOR','mean_S1_N_HOR','min_S1_N_HOR','range_S1_N_HOR','skewness_S1_N_HOR','std_S1_N_HOR','var_S1_N_HOR',\n",
        "             'kurtosis_S1_N_VER','max_S1_N_VER','mean_S1_N_VER','min_S1_N_VER','range_S1_N_VER','skewness_S1_N_VER','std_S1_N_VER','var_S1_N_VER',\n",
        "             'corr_HV','corr_NH','corr_NV','corr_XY','corr_XZ','corr_YZ']\n",
        "\n",
        "# The column that we want to predict.\n",
        "y_column = [\"Fall_ADL_BIN\"]\n",
        "\n",
        "fn_Fall_ADL_BIN = lambda row: (1) if row.Fall_ADL == \"F\" else (0)\n",
        "col = df_ADL_Falls_train.apply(fn_Fall_ADL_BIN, axis=1) # get column data with an index\n",
        "df_ADL_Falls_train = df_ADL_Falls_train.assign(Fall_ADL_BIN=col.values) # assign values to column 'c'\n",
        "\n",
        "col = df_ADL_Falls_test.apply(fn_Fall_ADL_BIN, axis=1) # get column data with an index\n",
        "df_ADL_Falls_test = df_ADL_Falls_test.assign(Fall_ADL_BIN=col.values) # assign values to column 'c'\n",
        "\n",
        "# Define the neural network\n",
        "my_model_NN = keras.Sequential([\n",
        "    # Define the layers\n",
        "    keras.layers.Dense(32, input_shape=(len(x_columns),)),\n",
        "    keras.layers.Dense(64, activation=tf.nn.sigmoid),\n",
        "    keras.layers.Dense(32, activation=tf.nn.sigmoid),\n",
        "    # output layer has 2 units (neurons) considering two categories: FALL/ADL\n",
        "    keras.layers.Dense(2, activation=tf.nn.softmax)\n",
        "])"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "metadata": {
        "id": "HRz-3ckG5Kax",
        "colab_type": "code",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "# compile the model\n",
        "my_model_NN.compile(optimizer='sgd', \n",
        "              loss='sparse_categorical_crossentropy',\n",
        "              metrics=['accuracy'])"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "metadata": {
        "id": "UtLaoZBn5Ka1",
        "colab_type": "code",
        "outputId": "12654e0f-d6e9-4500-9cb0-e6164f673714",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "import numpy as np\n",
        "\n",
        "x_train = df_ADL_Falls_train[x_columns]\n",
        "y = df_ADL_Falls_train.loc[:,['Fall_ADL_BIN']]\n",
        "y_train = np.array(y)\n",
        "\n",
        "x_val = df_ADL_Falls_test[x_columns]\n",
        "y = df_ADL_Falls_test.loc[:,['Fall_ADL_BIN']]\n",
        "y_val = np.array(y)\n",
        "\n",
        "# train the model 250 epochs\n",
        "history = my_model_NN.fit(x_train, y_train, epochs=250, batch_size=250, \n",
        "                          validation_data=(x_val, y_val),) # using X_val/y_val for validation"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Train on 3151 samples, validate on 1349 samples\n",
            "Epoch 1/250\n",
            "3151/3151 [==============================] - 0s 92us/step - loss: 0.7075 - acc: 0.6004 - val_loss: 0.6779 - val_acc: 0.6004\n",
            "Epoch 2/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.6586 - acc: 0.6004 - val_loss: 0.6427 - val_acc: 0.6004\n",
            "Epoch 3/250\n",
            "3151/3151 [==============================] - 0s 31us/step - loss: 0.6421 - acc: 0.6004 - val_loss: 0.6275 - val_acc: 0.6004\n",
            "Epoch 4/250\n",
            "3151/3151 [==============================] - 0s 31us/step - loss: 0.6227 - acc: 0.6023 - val_loss: 0.6142 - val_acc: 0.6086\n",
            "Epoch 5/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.6127 - acc: 0.6223 - val_loss: 0.6121 - val_acc: 0.6227\n",
            "Epoch 6/250\n",
            "3151/3151 [==============================] - 0s 27us/step - loss: 0.6061 - acc: 0.6442 - val_loss: 0.6117 - val_acc: 0.6219\n",
            "Epoch 7/250\n",
            "3151/3151 [==============================] - 0s 22us/step - loss: 0.6067 - acc: 0.6420 - val_loss: 0.5999 - val_acc: 0.6264\n",
            "Epoch 8/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.5967 - acc: 0.6484 - val_loss: 0.5918 - val_acc: 0.6701\n",
            "Epoch 9/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.5900 - acc: 0.6925 - val_loss: 0.5917 - val_acc: 0.7035\n",
            "Epoch 10/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.5907 - acc: 0.7109 - val_loss: 0.5835 - val_acc: 0.7035\n",
            "Epoch 11/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.5901 - acc: 0.6807 - val_loss: 0.5839 - val_acc: 0.6812\n",
            "Epoch 12/250\n",
            "3151/3151 [==============================] - 0s 23us/step - loss: 0.5857 - acc: 0.6804 - val_loss: 0.5750 - val_acc: 0.6850\n",
            "Epoch 13/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.5903 - acc: 0.6823 - val_loss: 0.5838 - val_acc: 0.7191\n",
            "Epoch 14/250\n",
            "3151/3151 [==============================] - 0s 36us/step - loss: 0.5824 - acc: 0.7252 - val_loss: 0.5817 - val_acc: 0.7116\n",
            "Epoch 15/250\n",
            "3151/3151 [==============================] - 0s 31us/step - loss: 0.5807 - acc: 0.7093 - val_loss: 0.5709 - val_acc: 0.7057\n",
            "Epoch 16/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.5745 - acc: 0.7071 - val_loss: 0.5628 - val_acc: 0.7324\n",
            "Epoch 17/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.5675 - acc: 0.7290 - val_loss: 0.5617 - val_acc: 0.7272\n",
            "Epoch 18/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.5625 - acc: 0.7404 - val_loss: 0.5568 - val_acc: 0.7450\n",
            "Epoch 19/250\n",
            "3151/3151 [==============================] - 0s 21us/step - loss: 0.5603 - acc: 0.7623 - val_loss: 0.5533 - val_acc: 0.7606\n",
            "Epoch 20/250\n",
            "3151/3151 [==============================] - 0s 24us/step - loss: 0.5575 - acc: 0.7560 - val_loss: 0.5614 - val_acc: 0.7168\n",
            "Epoch 21/250\n",
            "3151/3151 [==============================] - 0s 18us/step - loss: 0.5603 - acc: 0.7480 - val_loss: 0.5463 - val_acc: 0.7643\n",
            "Epoch 22/250\n",
            "3151/3151 [==============================] - ETA: 0s - loss: 0.5448 - acc: 0.768 - 0s 33us/step - loss: 0.5465 - acc: 0.7658 - val_loss: 0.5396 - val_acc: 0.7576\n",
            "Epoch 23/250\n",
            "3151/3151 [==============================] - 0s 20us/step - loss: 0.5500 - acc: 0.7626 - val_loss: 0.5440 - val_acc: 0.7413\n",
            "Epoch 24/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.5444 - acc: 0.7461 - val_loss: 0.5458 - val_acc: 0.7302\n",
            "Epoch 25/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.5519 - acc: 0.7379 - val_loss: 0.5549 - val_acc: 0.7383\n",
            "Epoch 26/250\n",
            "3151/3151 [==============================] - 0s 27us/step - loss: 0.5581 - acc: 0.7490 - val_loss: 0.5595 - val_acc: 0.7480\n",
            "Epoch 27/250\n",
            "3151/3151 [==============================] - 0s 34us/step - loss: 0.5536 - acc: 0.7569 - val_loss: 0.5498 - val_acc: 0.7480\n",
            "Epoch 28/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.5461 - acc: 0.7598 - val_loss: 0.5367 - val_acc: 0.7665\n",
            "Epoch 29/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.5371 - acc: 0.7645 - val_loss: 0.5303 - val_acc: 0.7658\n",
            "Epoch 30/250\n",
            "3151/3151 [==============================] - 0s 17us/step - loss: 0.5341 - acc: 0.7642 - val_loss: 0.5260 - val_acc: 0.7680\n",
            "Epoch 31/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.5266 - acc: 0.7645 - val_loss: 0.5164 - val_acc: 0.7806\n",
            "Epoch 32/250\n",
            "3151/3151 [==============================] - 0s 10us/step - loss: 0.5264 - acc: 0.7648 - val_loss: 0.5226 - val_acc: 0.7569\n",
            "Epoch 33/250\n",
            "3151/3151 [==============================] - 0s 10us/step - loss: 0.5294 - acc: 0.7636 - val_loss: 0.5241 - val_acc: 0.7517\n",
            "Epoch 34/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.5284 - acc: 0.7632 - val_loss: 0.5234 - val_acc: 0.7591\n",
            "Epoch 35/250\n",
            "3151/3151 [==============================] - 0s 10us/step - loss: 0.5201 - acc: 0.7617 - val_loss: 0.5196 - val_acc: 0.7613\n",
            "Epoch 36/250\n",
            "3151/3151 [==============================] - 0s 18us/step - loss: 0.5181 - acc: 0.7617 - val_loss: 0.5184 - val_acc: 0.7487\n",
            "Epoch 37/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.5171 - acc: 0.7588 - val_loss: 0.5165 - val_acc: 0.7472\n",
            "Epoch 38/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.5254 - acc: 0.7591 - val_loss: 0.5170 - val_acc: 0.7583\n",
            "Epoch 39/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.5154 - acc: 0.7601 - val_loss: 0.5155 - val_acc: 0.7576\n",
            "Epoch 40/250\n",
            "3151/3151 [==============================] - 0s 10us/step - loss: 0.5209 - acc: 0.7550 - val_loss: 0.5188 - val_acc: 0.7376\n",
            "Epoch 41/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.5352 - acc: 0.7490 - val_loss: 0.5426 - val_acc: 0.7250\n",
            "Epoch 42/250\n",
            "3151/3151 [==============================] - 0s 17us/step - loss: 0.5308 - acc: 0.7531 - val_loss: 0.5226 - val_acc: 0.7502\n",
            "Epoch 43/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.5238 - acc: 0.7540 - val_loss: 0.5353 - val_acc: 0.7331\n",
            "Epoch 44/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.5273 - acc: 0.7483 - val_loss: 0.5281 - val_acc: 0.7391\n",
            "Epoch 45/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.5271 - acc: 0.7553 - val_loss: 0.5308 - val_acc: 0.7517\n",
            "Epoch 46/250\n",
            "3151/3151 [==============================] - 0s 17us/step - loss: 0.5268 - acc: 0.7515 - val_loss: 0.5224 - val_acc: 0.7487\n",
            "Epoch 47/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.5258 - acc: 0.7502 - val_loss: 0.5330 - val_acc: 0.7487\n",
            "Epoch 48/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.5250 - acc: 0.7652 - val_loss: 0.5118 - val_acc: 0.7524\n",
            "Epoch 49/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.5191 - acc: 0.7636 - val_loss: 0.5302 - val_acc: 0.7613\n",
            "Epoch 50/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.5249 - acc: 0.7693 - val_loss: 0.5128 - val_acc: 0.7761\n",
            "Epoch 51/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.5160 - acc: 0.7715 - val_loss: 0.5129 - val_acc: 0.7658\n",
            "Epoch 52/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.5092 - acc: 0.7709 - val_loss: 0.4973 - val_acc: 0.7591\n",
            "Epoch 53/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.5010 - acc: 0.7699 - val_loss: 0.5032 - val_acc: 0.7517\n",
            "Epoch 54/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4972 - acc: 0.7721 - val_loss: 0.4836 - val_acc: 0.7791\n",
            "Epoch 55/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.4914 - acc: 0.7842 - val_loss: 0.4866 - val_acc: 0.7850\n",
            "Epoch 56/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.4877 - acc: 0.7699 - val_loss: 0.4888 - val_acc: 0.7606\n",
            "Epoch 57/250\n",
            "3151/3151 [==============================] - 0s 10us/step - loss: 0.4908 - acc: 0.7667 - val_loss: 0.4907 - val_acc: 0.7591\n",
            "Epoch 58/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.5011 - acc: 0.7639 - val_loss: 0.5024 - val_acc: 0.7687\n",
            "Epoch 59/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4940 - acc: 0.7613 - val_loss: 0.4891 - val_acc: 0.7428\n",
            "Epoch 60/250\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "stream",
          "text": [
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.4985 - acc: 0.7560 - val_loss: 0.5117 - val_acc: 0.7302\n",
            "Epoch 61/250\n",
            "3151/3151 [==============================] - 0s 10us/step - loss: 0.5011 - acc: 0.7560 - val_loss: 0.4955 - val_acc: 0.7472\n",
            "Epoch 62/250\n",
            "3151/3151 [==============================] - 0s 10us/step - loss: 0.4964 - acc: 0.7629 - val_loss: 0.4894 - val_acc: 0.7598\n",
            "Epoch 63/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.5000 - acc: 0.7671 - val_loss: 0.5245 - val_acc: 0.7769\n",
            "Epoch 64/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.5190 - acc: 0.7715 - val_loss: 0.4992 - val_acc: 0.7761\n",
            "Epoch 65/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.5000 - acc: 0.7671 - val_loss: 0.4980 - val_acc: 0.7732\n",
            "Epoch 66/250\n",
            "3151/3151 [==============================] - 0s 18us/step - loss: 0.4913 - acc: 0.7725 - val_loss: 0.4801 - val_acc: 0.7798\n",
            "Epoch 67/250\n",
            "3151/3151 [==============================] - 0s 10us/step - loss: 0.4911 - acc: 0.7705 - val_loss: 0.4885 - val_acc: 0.7687\n",
            "Epoch 68/250\n",
            "3151/3151 [==============================] - 0s 10us/step - loss: 0.4857 - acc: 0.7772 - val_loss: 0.4724 - val_acc: 0.7924\n",
            "Epoch 69/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.4740 - acc: 0.7871 - val_loss: 0.4832 - val_acc: 0.7813\n",
            "Epoch 70/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4820 - acc: 0.7848 - val_loss: 0.4694 - val_acc: 0.7650\n",
            "Epoch 71/250\n",
            "3151/3151 [==============================] - 0s 18us/step - loss: 0.4754 - acc: 0.7782 - val_loss: 0.4633 - val_acc: 0.7776\n",
            "Epoch 72/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.4707 - acc: 0.7817 - val_loss: 0.4796 - val_acc: 0.7895\n",
            "Epoch 73/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.4990 - acc: 0.7848 - val_loss: 0.4690 - val_acc: 0.7924\n",
            "Epoch 74/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.4697 - acc: 0.7963 - val_loss: 0.4685 - val_acc: 0.7769\n",
            "Epoch 75/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.4719 - acc: 0.7813 - val_loss: 0.4622 - val_acc: 0.7984\n",
            "Epoch 76/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4640 - acc: 0.7953 - val_loss: 0.4469 - val_acc: 0.8021\n",
            "Epoch 77/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.4603 - acc: 0.7883 - val_loss: 0.4469 - val_acc: 0.8021\n",
            "Epoch 78/250\n",
            "3151/3151 [==============================] - 0s 17us/step - loss: 0.4534 - acc: 0.7915 - val_loss: 0.4392 - val_acc: 0.8080\n",
            "Epoch 79/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4477 - acc: 0.8001 - val_loss: 0.4550 - val_acc: 0.7828\n",
            "Epoch 80/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4654 - acc: 0.7969 - val_loss: 0.4361 - val_acc: 0.7984\n",
            "Epoch 81/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4363 - acc: 0.8013 - val_loss: 0.4281 - val_acc: 0.8147\n",
            "Epoch 82/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.4549 - acc: 0.7880 - val_loss: 0.4890 - val_acc: 0.7532\n",
            "Epoch 83/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4929 - acc: 0.7677 - val_loss: 0.5138 - val_acc: 0.7213\n",
            "Epoch 84/250\n",
            "3151/3151 [==============================] - 0s 10us/step - loss: 0.4931 - acc: 0.7417 - val_loss: 0.5010 - val_acc: 0.7309\n",
            "Epoch 85/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4966 - acc: 0.7499 - val_loss: 0.4897 - val_acc: 0.7398\n",
            "Epoch 86/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4937 - acc: 0.7547 - val_loss: 0.4938 - val_acc: 0.7902\n",
            "Epoch 87/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4864 - acc: 0.7905 - val_loss: 0.4614 - val_acc: 0.7895\n",
            "Epoch 88/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4604 - acc: 0.7912 - val_loss: 0.4456 - val_acc: 0.7976\n",
            "Epoch 89/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4712 - acc: 0.7832 - val_loss: 0.4582 - val_acc: 0.7954\n",
            "Epoch 90/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4620 - acc: 0.7890 - val_loss: 0.4597 - val_acc: 0.8065\n",
            "Epoch 91/250\n",
            "3151/3151 [==============================] - 0s 21us/step - loss: 0.4631 - acc: 0.8007 - val_loss: 0.4803 - val_acc: 0.7865\n",
            "Epoch 92/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.4686 - acc: 0.7813 - val_loss: 0.4511 - val_acc: 0.7784\n",
            "Epoch 93/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.4471 - acc: 0.7924 - val_loss: 0.4254 - val_acc: 0.8258\n",
            "Epoch 94/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4375 - acc: 0.8089 - val_loss: 0.4387 - val_acc: 0.8036\n",
            "Epoch 95/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.4576 - acc: 0.7893 - val_loss: 0.4558 - val_acc: 0.7880\n",
            "Epoch 96/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4547 - acc: 0.7829 - val_loss: 0.4353 - val_acc: 0.7999\n",
            "Epoch 97/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4549 - acc: 0.7975 - val_loss: 0.4444 - val_acc: 0.8102\n",
            "Epoch 98/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.4455 - acc: 0.7982 - val_loss: 0.4430 - val_acc: 0.8095\n",
            "Epoch 99/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4550 - acc: 0.7950 - val_loss: 0.4524 - val_acc: 0.8073\n",
            "Epoch 100/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4540 - acc: 0.7959 - val_loss: 0.4482 - val_acc: 0.7828\n",
            "Epoch 101/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4514 - acc: 0.7855 - val_loss: 0.4469 - val_acc: 0.7754\n",
            "Epoch 102/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4423 - acc: 0.7915 - val_loss: 0.4347 - val_acc: 0.8058\n",
            "Epoch 103/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4410 - acc: 0.7950 - val_loss: 0.4326 - val_acc: 0.8021\n",
            "Epoch 104/250\n",
            "3151/3151 [==============================] - 0s 9us/step - loss: 0.4479 - acc: 0.7969 - val_loss: 0.4334 - val_acc: 0.8191\n",
            "Epoch 105/250\n",
            "3151/3151 [==============================] - 0s 20us/step - loss: 0.4432 - acc: 0.7978 - val_loss: 0.4485 - val_acc: 0.8058\n",
            "Epoch 106/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.4532 - acc: 0.7810 - val_loss: 0.4522 - val_acc: 0.7546\n",
            "Epoch 107/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.4379 - acc: 0.7950 - val_loss: 0.4148 - val_acc: 0.8414\n",
            "Epoch 108/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4404 - acc: 0.8270 - val_loss: 0.4265 - val_acc: 0.8332\n",
            "Epoch 109/250\n",
            "3151/3151 [==============================] - 0s 18us/step - loss: 0.4494 - acc: 0.8213 - val_loss: 0.4369 - val_acc: 0.8362\n",
            "Epoch 110/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4505 - acc: 0.8124 - val_loss: 0.4559 - val_acc: 0.8021\n",
            "Epoch 111/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.4517 - acc: 0.7985 - val_loss: 0.4519 - val_acc: 0.7895\n",
            "Epoch 112/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4558 - acc: 0.8134 - val_loss: 0.4246 - val_acc: 0.8340\n",
            "Epoch 113/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4414 - acc: 0.8258 - val_loss: 0.4265 - val_acc: 0.8280\n",
            "Epoch 114/250\n",
            "3151/3151 [==============================] - 0s 17us/step - loss: 0.4446 - acc: 0.8147 - val_loss: 0.4412 - val_acc: 0.8117\n",
            "Epoch 115/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.4455 - acc: 0.8055 - val_loss: 0.4430 - val_acc: 0.8176\n",
            "Epoch 116/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.4484 - acc: 0.7994 - val_loss: 0.4327 - val_acc: 0.8132\n",
            "Epoch 117/250\n",
            "3151/3151 [==============================] - 0s 10us/step - loss: 0.4486 - acc: 0.8013 - val_loss: 0.4544 - val_acc: 0.8006\n",
            "Epoch 118/250\n",
            "3151/3151 [==============================] - 0s 10us/step - loss: 0.4496 - acc: 0.8055 - val_loss: 0.4376 - val_acc: 0.8236\n",
            "Epoch 119/250\n",
            "3151/3151 [==============================] - 0s 10us/step - loss: 0.4405 - acc: 0.8131 - val_loss: 0.4192 - val_acc: 0.8228\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "stream",
          "text": [
            "Epoch 120/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.4296 - acc: 0.8004 - val_loss: 0.4210 - val_acc: 0.8147\n",
            "Epoch 121/250\n",
            "3151/3151 [==============================] - ETA: 0s - loss: 0.4644 - acc: 0.788 - 0s 10us/step - loss: 0.4267 - acc: 0.8013 - val_loss: 0.4183 - val_acc: 0.8095\n",
            "Epoch 122/250\n",
            "3151/3151 [==============================] - 0s 10us/step - loss: 0.4251 - acc: 0.8150 - val_loss: 0.4145 - val_acc: 0.8073\n",
            "Epoch 123/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.4188 - acc: 0.8105 - val_loss: 0.4171 - val_acc: 0.8036\n",
            "Epoch 124/250\n",
            "3151/3151 [==============================] - 0s 9us/step - loss: 0.4230 - acc: 0.8051 - val_loss: 0.4042 - val_acc: 0.8221\n",
            "Epoch 125/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4359 - acc: 0.7832 - val_loss: 0.4341 - val_acc: 0.7932\n",
            "Epoch 126/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.4426 - acc: 0.7858 - val_loss: 0.4333 - val_acc: 0.7850\n",
            "Epoch 127/250\n",
            "3151/3151 [==============================] - 0s 18us/step - loss: 0.4387 - acc: 0.8051 - val_loss: 0.4342 - val_acc: 0.8087\n",
            "Epoch 128/250\n",
            "3151/3151 [==============================] - 0s 9us/step - loss: 0.4320 - acc: 0.8166 - val_loss: 0.4076 - val_acc: 0.8087\n",
            "Epoch 129/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.4131 - acc: 0.8099 - val_loss: 0.3973 - val_acc: 0.8280\n",
            "Epoch 130/250\n",
            "3151/3151 [==============================] - 0s 10us/step - loss: 0.4142 - acc: 0.8162 - val_loss: 0.4126 - val_acc: 0.8310\n",
            "Epoch 131/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.4167 - acc: 0.8178 - val_loss: 0.4194 - val_acc: 0.8028\n",
            "Epoch 132/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4418 - acc: 0.7972 - val_loss: 0.4156 - val_acc: 0.8236\n",
            "Epoch 133/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4292 - acc: 0.7994 - val_loss: 0.4620 - val_acc: 0.7732\n",
            "Epoch 134/250\n",
            "3151/3151 [==============================] - 0s 19us/step - loss: 0.4355 - acc: 0.7953 - val_loss: 0.4185 - val_acc: 0.8080\n",
            "Epoch 135/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4486 - acc: 0.7769 - val_loss: 0.4426 - val_acc: 0.7665\n",
            "Epoch 136/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4309 - acc: 0.8121 - val_loss: 0.4273 - val_acc: 0.7895\n",
            "Epoch 137/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4444 - acc: 0.8128 - val_loss: 0.4245 - val_acc: 0.8080\n",
            "Epoch 138/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4266 - acc: 0.8045 - val_loss: 0.4089 - val_acc: 0.8243\n",
            "Epoch 139/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.4131 - acc: 0.8188 - val_loss: 0.3901 - val_acc: 0.8391\n",
            "Epoch 140/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.4090 - acc: 0.8280 - val_loss: 0.3987 - val_acc: 0.8280\n",
            "Epoch 141/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.4296 - acc: 0.7871 - val_loss: 0.4165 - val_acc: 0.7858\n",
            "Epoch 142/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4201 - acc: 0.7918 - val_loss: 0.4007 - val_acc: 0.8102\n",
            "Epoch 143/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4045 - acc: 0.8353 - val_loss: 0.4049 - val_acc: 0.8554\n",
            "Epoch 144/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4004 - acc: 0.8413 - val_loss: 0.3920 - val_acc: 0.8406\n",
            "Epoch 145/250\n",
            "3151/3151 [==============================] - 0s 17us/step - loss: 0.4016 - acc: 0.8248 - val_loss: 0.3960 - val_acc: 0.8169\n",
            "Epoch 146/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.4079 - acc: 0.8223 - val_loss: 0.4024 - val_acc: 0.8399\n",
            "Epoch 147/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4205 - acc: 0.8239 - val_loss: 0.4228 - val_acc: 0.8428\n",
            "Epoch 148/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4188 - acc: 0.8245 - val_loss: 0.4090 - val_acc: 0.8206\n",
            "Epoch 149/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4133 - acc: 0.8096 - val_loss: 0.4111 - val_acc: 0.7984\n",
            "Epoch 150/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4141 - acc: 0.8067 - val_loss: 0.3942 - val_acc: 0.8236\n",
            "Epoch 151/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4144 - acc: 0.8004 - val_loss: 0.4091 - val_acc: 0.8058\n",
            "Epoch 152/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.4195 - acc: 0.7997 - val_loss: 0.4307 - val_acc: 0.7880\n",
            "Epoch 153/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4303 - acc: 0.8128 - val_loss: 0.4202 - val_acc: 0.8325\n",
            "Epoch 154/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4273 - acc: 0.8093 - val_loss: 0.4073 - val_acc: 0.7976\n",
            "Epoch 155/250\n",
            "3151/3151 [==============================] - 0s 17us/step - loss: 0.4201 - acc: 0.7988 - val_loss: 0.4146 - val_acc: 0.8199\n",
            "Epoch 156/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4239 - acc: 0.8039 - val_loss: 0.3990 - val_acc: 0.8369\n",
            "Epoch 157/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.4007 - acc: 0.8324 - val_loss: 0.3768 - val_acc: 0.8525\n",
            "Epoch 158/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4062 - acc: 0.8277 - val_loss: 0.3779 - val_acc: 0.8369\n",
            "Epoch 159/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.3925 - acc: 0.8258 - val_loss: 0.3795 - val_acc: 0.8228\n",
            "Epoch 160/250\n",
            "3151/3151 [==============================] - 0s 18us/step - loss: 0.4115 - acc: 0.8061 - val_loss: 0.4017 - val_acc: 0.8006\n",
            "Epoch 161/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4159 - acc: 0.8156 - val_loss: 0.3833 - val_acc: 0.8436\n",
            "Epoch 162/250\n",
            "3151/3151 [==============================] - 0s 18us/step - loss: 0.4010 - acc: 0.8242 - val_loss: 0.3891 - val_acc: 0.8369\n",
            "Epoch 163/250\n",
            "3151/3151 [==============================] - 0s 20us/step - loss: 0.4099 - acc: 0.8274 - val_loss: 0.3905 - val_acc: 0.8414\n",
            "Epoch 164/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.4165 - acc: 0.8204 - val_loss: 0.4154 - val_acc: 0.8399\n",
            "Epoch 165/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4307 - acc: 0.8093 - val_loss: 0.4049 - val_acc: 0.8288\n",
            "Epoch 166/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4312 - acc: 0.8048 - val_loss: 0.4155 - val_acc: 0.8340\n",
            "Epoch 167/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4501 - acc: 0.8121 - val_loss: 0.4362 - val_acc: 0.8265\n",
            "Epoch 168/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4398 - acc: 0.8178 - val_loss: 0.4217 - val_acc: 0.8414\n",
            "Epoch 169/250\n",
            "3151/3151 [==============================] - 0s 17us/step - loss: 0.4321 - acc: 0.8267 - val_loss: 0.4205 - val_acc: 0.8154\n",
            "Epoch 170/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.4249 - acc: 0.8064 - val_loss: 0.4177 - val_acc: 0.7939\n",
            "Epoch 171/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4285 - acc: 0.8001 - val_loss: 0.4366 - val_acc: 0.7702\n",
            "Epoch 172/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4281 - acc: 0.8064 - val_loss: 0.4301 - val_acc: 0.8125\n",
            "Epoch 173/250\n",
            "3151/3151 [==============================] - 0s 17us/step - loss: 0.4496 - acc: 0.8080 - val_loss: 0.4470 - val_acc: 0.8006\n",
            "Epoch 174/250\n",
            "3151/3151 [==============================] - 0s 19us/step - loss: 0.4565 - acc: 0.7972 - val_loss: 0.4376 - val_acc: 0.7969\n",
            "Epoch 175/250\n",
            "3151/3151 [==============================] - 0s 17us/step - loss: 0.4440 - acc: 0.7877 - val_loss: 0.4319 - val_acc: 0.8043\n",
            "Epoch 176/250\n",
            "3151/3151 [==============================] - 0s 21us/step - loss: 0.4428 - acc: 0.7823 - val_loss: 0.4366 - val_acc: 0.7739\n",
            "Epoch 177/250\n",
            "3151/3151 [==============================] - 0s 21us/step - loss: 0.4318 - acc: 0.7982 - val_loss: 0.4121 - val_acc: 0.8295\n",
            "Epoch 178/250\n",
            "3151/3151 [==============================] - 0s 20us/step - loss: 0.4158 - acc: 0.8197 - val_loss: 0.4132 - val_acc: 0.8377\n",
            "Epoch 179/250\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "stream",
          "text": [
            "3151/3151 [==============================] - 0s 21us/step - loss: 0.4397 - acc: 0.7858 - val_loss: 0.4437 - val_acc: 0.7635\n",
            "Epoch 180/250\n",
            "3151/3151 [==============================] - 0s 19us/step - loss: 0.4618 - acc: 0.7610 - val_loss: 0.4541 - val_acc: 0.8028\n",
            "Epoch 181/250\n",
            "3151/3151 [==============================] - 0s 25us/step - loss: 0.4419 - acc: 0.8013 - val_loss: 0.4162 - val_acc: 0.8147\n",
            "Epoch 182/250\n",
            "3151/3151 [==============================] - 0s 71us/step - loss: 0.4174 - acc: 0.7902 - val_loss: 0.4068 - val_acc: 0.7939\n",
            "Epoch 183/250\n",
            "3151/3151 [==============================] - 0s 45us/step - loss: 0.3966 - acc: 0.8067 - val_loss: 0.3955 - val_acc: 0.8036\n",
            "Epoch 184/250\n",
            "3151/3151 [==============================] - 0s 29us/step - loss: 0.3998 - acc: 0.8023 - val_loss: 0.3959 - val_acc: 0.8399\n",
            "Epoch 185/250\n",
            "3151/3151 [==============================] - 0s 34us/step - loss: 0.4076 - acc: 0.8204 - val_loss: 0.4050 - val_acc: 0.8154\n",
            "Epoch 186/250\n",
            "3151/3151 [==============================] - 0s 31us/step - loss: 0.4205 - acc: 0.7924 - val_loss: 0.4141 - val_acc: 0.8013\n",
            "Epoch 187/250\n",
            "3151/3151 [==============================] - 0s 33us/step - loss: 0.4200 - acc: 0.7978 - val_loss: 0.3992 - val_acc: 0.8147\n",
            "Epoch 188/250\n",
            "3151/3151 [==============================] - 0s 29us/step - loss: 0.4065 - acc: 0.8115 - val_loss: 0.3939 - val_acc: 0.8206\n",
            "Epoch 189/250\n",
            "3151/3151 [==============================] - 0s 37us/step - loss: 0.4046 - acc: 0.8067 - val_loss: 0.3787 - val_acc: 0.8354\n",
            "Epoch 190/250\n",
            "3151/3151 [==============================] - 0s 21us/step - loss: 0.4092 - acc: 0.8093 - val_loss: 0.3926 - val_acc: 0.8347\n",
            "Epoch 191/250\n",
            "3151/3151 [==============================] - 0s 17us/step - loss: 0.3909 - acc: 0.8251 - val_loss: 0.3788 - val_acc: 0.8414\n",
            "Epoch 192/250\n",
            "3151/3151 [==============================] - 0s 17us/step - loss: 0.3998 - acc: 0.8185 - val_loss: 0.3951 - val_acc: 0.8154\n",
            "Epoch 193/250\n",
            "3151/3151 [==============================] - 0s 18us/step - loss: 0.4050 - acc: 0.8277 - val_loss: 0.3809 - val_acc: 0.8362\n",
            "Epoch 194/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4081 - acc: 0.8137 - val_loss: 0.4219 - val_acc: 0.8050\n",
            "Epoch 195/250\n",
            "3151/3151 [==============================] - 0s 26us/step - loss: 0.4085 - acc: 0.8216 - val_loss: 0.3897 - val_acc: 0.8288\n",
            "Epoch 196/250\n",
            "3151/3151 [==============================] - 0s 34us/step - loss: 0.4047 - acc: 0.8343 - val_loss: 0.3837 - val_acc: 0.8399\n",
            "Epoch 197/250\n",
            "3151/3151 [==============================] - 0s 32us/step - loss: 0.4069 - acc: 0.8194 - val_loss: 0.4148 - val_acc: 0.8243\n",
            "Epoch 198/250\n",
            "3151/3151 [==============================] - ETA: 0s - loss: 0.4015 - acc: 0.840 - 0s 16us/step - loss: 0.4321 - acc: 0.8194 - val_loss: 0.4392 - val_acc: 0.8236\n",
            "Epoch 199/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.4484 - acc: 0.8162 - val_loss: 0.4311 - val_acc: 0.8095\n",
            "Epoch 200/250\n",
            "3151/3151 [==============================] - 0s 27us/step - loss: 0.4436 - acc: 0.8064 - val_loss: 0.4310 - val_acc: 0.8332\n",
            "Epoch 201/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.4474 - acc: 0.7982 - val_loss: 0.4405 - val_acc: 0.8043\n",
            "Epoch 202/250\n",
            "3151/3151 [==============================] - 0s 28us/step - loss: 0.4471 - acc: 0.7940 - val_loss: 0.4145 - val_acc: 0.8436\n",
            "Epoch 203/250\n",
            "3151/3151 [==============================] - 0s 32us/step - loss: 0.4203 - acc: 0.8185 - val_loss: 0.3977 - val_acc: 0.8495\n",
            "Epoch 204/250\n",
            "3151/3151 [==============================] - 0s 18us/step - loss: 0.4207 - acc: 0.8261 - val_loss: 0.4065 - val_acc: 0.8317\n",
            "Epoch 205/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.4307 - acc: 0.8121 - val_loss: 0.4103 - val_acc: 0.8280\n",
            "Epoch 206/250\n",
            "3151/3151 [==============================] - 0s 34us/step - loss: 0.4174 - acc: 0.8210 - val_loss: 0.3945 - val_acc: 0.8221\n",
            "Epoch 207/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.4044 - acc: 0.8109 - val_loss: 0.3862 - val_acc: 0.8317\n",
            "Epoch 208/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.3994 - acc: 0.8223 - val_loss: 0.3719 - val_acc: 0.8406\n",
            "Epoch 209/250\n",
            "3151/3151 [==============================] - 0s 21us/step - loss: 0.3999 - acc: 0.8112 - val_loss: 0.3824 - val_acc: 0.8191\n",
            "Epoch 210/250\n",
            "3151/3151 [==============================] - 0s 11us/step - loss: 0.4041 - acc: 0.8080 - val_loss: 0.3763 - val_acc: 0.8399\n",
            "Epoch 211/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.3946 - acc: 0.8051 - val_loss: 0.3786 - val_acc: 0.8065\n",
            "Epoch 212/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.3897 - acc: 0.8153 - val_loss: 0.3625 - val_acc: 0.8428\n",
            "Epoch 213/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.3968 - acc: 0.8223 - val_loss: 0.3732 - val_acc: 0.8347\n",
            "Epoch 214/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4076 - acc: 0.8182 - val_loss: 0.3941 - val_acc: 0.8236\n",
            "Epoch 215/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.3981 - acc: 0.8204 - val_loss: 0.3853 - val_acc: 0.8377\n",
            "Epoch 216/250\n",
            "3151/3151 [==============================] - 0s 18us/step - loss: 0.3997 - acc: 0.8258 - val_loss: 0.3796 - val_acc: 0.8451\n",
            "Epoch 217/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.3995 - acc: 0.8197 - val_loss: 0.4070 - val_acc: 0.7947\n",
            "Epoch 218/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.4088 - acc: 0.8093 - val_loss: 0.3854 - val_acc: 0.8443\n",
            "Epoch 219/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.3938 - acc: 0.8270 - val_loss: 0.3828 - val_acc: 0.8169\n",
            "Epoch 220/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.3990 - acc: 0.8089 - val_loss: 0.3715 - val_acc: 0.8406\n",
            "Epoch 221/250\n",
            "3151/3151 [==============================] - 0s 12us/step - loss: 0.3944 - acc: 0.8140 - val_loss: 0.3807 - val_acc: 0.8377\n",
            "Epoch 222/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.3951 - acc: 0.8245 - val_loss: 0.4001 - val_acc: 0.8436\n",
            "Epoch 223/250\n",
            "3151/3151 [==============================] - 0s 22us/step - loss: 0.3958 - acc: 0.8359 - val_loss: 0.3811 - val_acc: 0.8503\n",
            "Epoch 224/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.4206 - acc: 0.8166 - val_loss: 0.4421 - val_acc: 0.8006\n",
            "Epoch 225/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.4322 - acc: 0.7972 - val_loss: 0.4000 - val_acc: 0.8340\n",
            "Epoch 226/250\n",
            "3151/3151 [==============================] - 0s 17us/step - loss: 0.4079 - acc: 0.8289 - val_loss: 0.3905 - val_acc: 0.8369\n",
            "Epoch 227/250\n",
            "3151/3151 [==============================] - 0s 26us/step - loss: 0.3999 - acc: 0.8340 - val_loss: 0.3887 - val_acc: 0.8480\n",
            "Epoch 228/250\n",
            "3151/3151 [==============================] - 0s 19us/step - loss: 0.4071 - acc: 0.8299 - val_loss: 0.3826 - val_acc: 0.8340\n",
            "Epoch 229/250\n",
            "3151/3151 [==============================] - 0s 26us/step - loss: 0.4033 - acc: 0.8226 - val_loss: 0.3924 - val_acc: 0.8162\n",
            "Epoch 230/250\n",
            "3151/3151 [==============================] - 0s 26us/step - loss: 0.3940 - acc: 0.8347 - val_loss: 0.3769 - val_acc: 0.8391\n",
            "Epoch 231/250\n",
            "3151/3151 [==============================] - 0s 27us/step - loss: 0.3884 - acc: 0.8245 - val_loss: 0.3770 - val_acc: 0.8132\n",
            "Epoch 232/250\n",
            "3151/3151 [==============================] - 0s 24us/step - loss: 0.3860 - acc: 0.8201 - val_loss: 0.3674 - val_acc: 0.8384\n",
            "Epoch 233/250\n",
            "3151/3151 [==============================] - 0s 24us/step - loss: 0.3867 - acc: 0.8264 - val_loss: 0.3784 - val_acc: 0.8414\n",
            "Epoch 234/250\n",
            "3151/3151 [==============================] - 0s 18us/step - loss: 0.3925 - acc: 0.8356 - val_loss: 0.3826 - val_acc: 0.8340\n",
            "Epoch 235/250\n",
            "3151/3151 [==============================] - 0s 20us/step - loss: 0.3952 - acc: 0.8223 - val_loss: 0.3884 - val_acc: 0.8236\n",
            "Epoch 236/250\n",
            "3151/3151 [==============================] - 0s 17us/step - loss: 0.4079 - acc: 0.8058 - val_loss: 0.3861 - val_acc: 0.8206\n",
            "Epoch 237/250\n",
            "3151/3151 [==============================] - 0s 17us/step - loss: 0.3981 - acc: 0.8159 - val_loss: 0.3977 - val_acc: 0.8147\n",
            "Epoch 238/250\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "stream",
          "text": [
            "3151/3151 [==============================] - 0s 23us/step - loss: 0.3921 - acc: 0.8331 - val_loss: 0.3789 - val_acc: 0.8391\n",
            "Epoch 239/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.3906 - acc: 0.8308 - val_loss: 0.3724 - val_acc: 0.8510\n",
            "Epoch 240/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.3921 - acc: 0.8356 - val_loss: 0.3880 - val_acc: 0.8436\n",
            "Epoch 241/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.4055 - acc: 0.8182 - val_loss: 0.3802 - val_acc: 0.8414\n",
            "Epoch 242/250\n",
            "3151/3151 [==============================] - 0s 16us/step - loss: 0.3899 - acc: 0.8331 - val_loss: 0.3710 - val_acc: 0.8547\n",
            "Epoch 243/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.3920 - acc: 0.8366 - val_loss: 0.3753 - val_acc: 0.8443\n",
            "Epoch 244/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.3921 - acc: 0.8334 - val_loss: 0.3772 - val_acc: 0.8340\n",
            "Epoch 245/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.4115 - acc: 0.8172 - val_loss: 0.4021 - val_acc: 0.8354\n",
            "Epoch 246/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4113 - acc: 0.8178 - val_loss: 0.4052 - val_acc: 0.8199\n",
            "Epoch 247/250\n",
            "3151/3151 [==============================] - 0s 13us/step - loss: 0.4121 - acc: 0.8109 - val_loss: 0.3933 - val_acc: 0.8191\n",
            "Epoch 248/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.3998 - acc: 0.8175 - val_loss: 0.3881 - val_acc: 0.8384\n",
            "Epoch 249/250\n",
            "3151/3151 [==============================] - 0s 14us/step - loss: 0.4066 - acc: 0.8178 - val_loss: 0.3913 - val_acc: 0.8243\n",
            "Epoch 250/250\n",
            "3151/3151 [==============================] - 0s 15us/step - loss: 0.4021 - acc: 0.8210 - val_loss: 0.3884 - val_acc: 0.8273\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "Bw0vAnagzBR4",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "#### Accuracy and Loss Performance of the Neural Network Training"
      ]
    },
    {
      "metadata": {
        "id": "6Yg2ajEN5Ka7",
        "colab_type": "code",
        "outputId": "b30ec478-6b3a-4a64-cdd0-f282740fe79b",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "# Visualize results (using history for accuracy and loss)\n",
        "\n",
        "plot_acc(history, \"Model Accuracy\")\n",
        "plot_loss(history, \"Model Loss\")"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEWCAYAAACXGLsWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzsnXe8XWWZ77/v7n2f3pOc9A4hRDoCggg4gAVHUGzD2EZHHXVm1OsdGcvoVbmj1zK2EQuOsSKKKCoCitISkgBppCen13127+/941lrt5xzkhNy0li/z+d89t5rvWutd6199vN7n6601liwYMGCBQvTwXayJ2DBggULFk59WGRhwYIFCxaOCIssLFiwYMHCEWGRhQULFixYOCIssrBgwYIFC0eERRYWLFiwYOGIsMjCwgsaSqlupZRWSjmOYuyblVKPnIh5WbBwqsEiCwunDZRS+5VSWaVUU832zYbA7z45M6uai18pFVdK3Xey52LBwvGERRYWTjfsA24xPyilVgPekzedw3ATkAGuVkq1n8gLH412ZMHCscIiCwunG74PvLHi85uA71UOUEqFlVLfU0oNK6UOKKU+qpSyGfvsSqnPK6VGlFJ7gZdPcux/K6X6lVK9SqlPKqXsM5jfm4CvAU8Dr6859xyl1M+NeY0qpb5cse+tSqntSqmYUmqbUmqtsV0rpRZVjPuOUuqTxvvLlVI9Sql/VUoNAHcqpeqVUvca1xg33ndVHN+glLpTKdVn7P+Fsf1ZpdT1FeOcxjNaM4N7t3AGwyILC6cbHgNCSqnlhhB/LXBXzZgvAWFgAXAZQi5vMfa9Ffgb4BxgHaIJVOK7QB5YZIy5Gvj7o5mYUmoucDnwA+PvjRX77MC9wAGgG+gE1hv7XgPcbowPATcAo0dzTaANaADmAW9DftN3Gp/nAingyxXjvw/4gJVAC/CfxvbvAbdWjLsO6Ndabz7KeVg406G1tv6sv9PiD9gPXAV8FPg0cA3we8ABaEQI2xEz0IqK494OPGS8/yPwjop9VxvHOoBW41hvxf5bgAeN928GHplmfh8FNhvvO4ACcI7x+UJgGHBMctz9wHunOKcGFlV8/g7wSeP95UAW8EwzpzXAuPG+HSgC9ZOM6wBiQMj4/FPgX072d279nTp/lo3TwumI7wN/AuZTY4ICmgAXsoI3cQBZyYMIxUM1+0zMA5xAv1LK3GarGT8d3gh8E0Br3aeUehgxS20C5gAHtNb5SY6bA+w5ymvUYlhrnTY/KKV8iLZwDVBvbA4ams0cYExrPV57EmO+fwFerZS6G7gWeO8xzsnCGQjLDGXhtIPW+gDi6L4O+HnN7hEghwh+E3OBXuN9PyI0K/eZOIRoFk1a6zrjL6S1XnmkOSmlLgIWAx9WSg0YPoTzgVsMx/MhYO4UTuhDwMIpTp1EzEYm2mr215aN/gCwFDhfax0CXmxO0bhOg1KqboprfRcxRb0GeFRr3TvFOAsvQFhkYeF0xW3AS7TWicqNWusC8GPgU0qpoFJqHvB+yn6NHwPvUUp1KaXqgQ9VHNsP/A64QykVUkrZlFILlVKXHcV83oSYxFYgpp81wCpE0F8LPIEQ1WeM8FqPUupi49hvAR9USp2rBIuMeQNsBl5nOOavQXww0yGI+CkiSqkG4GM19/cb4KuGI9yplHpxxbG/ANYiGkWtxmbhBQ6LLCycltBa79Fab5hi9z8CCWAv8AjwP8C3jX3fRHwEW4CnOFwzeSNixtoGjCO2+2lDYJVSHuBvgS9prQcq/vYhJrM3GSR2PeI4Pwj0IM55tNY/AT5lzDOGCO0G4/TvNY6LINFVv5huLsAXkFDiESQY4Lc1+9+AaF47gCHgfeYOrXUK+Bli3qt9LhZe4FBaW82PLFiwIFBK/RuwRGt96xEHW3hBwXJwW7BgAZAcDMS894aTPRcLpx4sM5QFCxZQSr0VcYD/Rmv9p5M9HwunHiwzlAULFixYOCIszcKCBQsWLBwRZ4zPoqmpSXd3d5/saViwYMHCaYWNGzeOaK2bjzTujCGL7u5uNmyYKpLSggULFixMBqXUgSOPssxQFixYsGDhKGCRhQULFixYOCIssrBgwYIFC0fEGeOzmAy5XI6enh7S6fSRB58h8Hg8dHV14XQ6T/ZULFiwcAbhjCaLnp4egsEg3d3dVJScPmOhtWZ0dJSenh7mz59/sqdjwYKFMwhntBkqnU7T2Nj4giAKAKUUjY2NLyhNyoIFCycGZzRZAC8YojDxQrtfCxYsnBic8WRhwcIZiWgf7LjvZM/CwgsIFlnMIkZHR1mzZg1r1qyhra2Nzs7O0udsNntU53jLW97Czp07Z3mmFk47bLgTfvR6yGdO9kwszBZ2/Bq+ewMUiyd7JsAsO7iNzl5fBOzAt7TWn6nZPxdp5VhnjPmQ1vo+pVQ3sB0wpeRjWut3zOZcZwONjY1s3rwZgNtvv51AIMAHP/jBqjFmM3SbbXLevvPOO2d9nhZOQ2QToIsQ64f67mM7R7EIyREItBzXqVk4Ttj9AOx7GKK9UDfnyONnGbOmWRgN4r+CtJRcgfQiXlEz7KPAj7XW5wA3A1+t2LdHa73G+DvtiGI67N69m1WrVvGOd7yDtWvX0t/fz9ve9jbWrVvHypUr+fjHP14ae8kll7B582by+Tx1dXV86EMf4uyzz+bCCy9kaGjoJN6FhZOKfEpeo/3Hfo5tv4D/XAWJ0eMzJwvHFxM98jq66+TOw8BsahbnAbu11nsBlFLrgRuRdpUmNBAy3oeBvtmazL//aivb+qLH9ZwrOkJ87PqVx3Tstm3buPPOO/na174GwGc+8xkaGhrI5/NcccUV3HTTTaxYUc2tExMTXHbZZXzmM5/h/e9/P9/+9rf50Ic+NNnpLZzpyBkRb9HeYz/HyHNQyEDkAPgbj8+8XmjofxqKOeg89/mfKzYIX34RvG49zLuoTBYju2HhS57/+Z8nZtNn0Yk0UzHRY2yrxO3ArUqpHuA+pHeyiflKqU1KqYeVUpdOdgGl1NuUUhuUUhuGh4eP49RnHwsXLuRFL3pR6fMPf/hD1q5dy9q1a9m+fTvbtm077Biv18u1114LwLnnnsv+/ftP1HQtnGowNYvY89AsYgPVr2caEiPwzZfA0I7Zu8bXL5VrHA8MPgOZCdj1e/k8YYjPF4BmMVkMZ22npVuA72it71BKXQh8Xym1CugH5mqtR5VS5wK/UEqt1FpXqQZa628A3wBYt27dtF2cjlUDmC34/f7S+127dvHFL36RJ554grq6Om699dZJcyVcLlfpvd1uJ5/Pn5C5WjgFUdIsngdZxAfl9fkQzqmM5+6H3o1w6HFoWXb8z5+ewlLx1y9BqBNWvWpm5xvfL6+9GyE9ARnj/KO7j3mKxxOzqVn0AJVemS4ONzPdBvwYQGv9KOABmrTWGa31qLF9I7AHWDKLcz2piEajBINBQqEQ/f393H///Sd7ShZOdZQ0i+dhuT3TNYu9D8lrYpZ8ez1PTr79kS/AU9+d+fnG9slr3yaIHJT3Tp+YoU4BzCZZPAksVkrNV0q5EAf2L2vGHASuBFBKLUfIYlgp1Ww4yFFKLQAWA3tnca4nFWvXrmXFihWsWrWKt771rVx88cUne0oWTnUcF83CEKJnomahdZks4sdooi7k4ZmfTh26evBRefXUlbelJyTCLHoMJG5qFpko7HlQ3s+7WMxRudTMz3ecMWtmKK11Xin1buB+JCz221rrrUqpjwMbtNa/BD4AfFMp9U+IierNWmutlHox8HGlVB4oAO/QWo/N1lxPBG6//fbS+0WLFpVCakGyrr///e9PetwjjzxSeh+JRErvb775Zm6++ebjP1ELpwdK0VDHqFloXWGGqtAsMnEhj6bFz29+JxtD28saReIYyeKh/4A/3wHuIOz/s4Qov+jvy/sPPiav9oqinWPGmnaiV57xTCoqjB+Qa4zvh233yLaFV8Du38PoHmhbdWz3cZwwq0l5Wuv7tNZLtNYLtdafMrb9m0EUaK23aa0v1lqfbYTI/s7Y/jOt9Upj+1qt9a9mc54WLJx2MDWLWH955bvtlyLcjgbJMYniAYhXkMWv3gNfXiekcTqj1+iaGWg7OrLIpeGed4mQN7H7AXm12SVB7rnfVY83zVCViZEmWeQSkC4v7o4IrYUkFr0UvA0yf5tTNAs4JZzcVga3BQsgK7enJtfuTkmYmkUxRz4+LOaPH78BHvj49MeZMAnCE67WLAaNKLyeJ+Dxb0weSbRlPQyfwKoC2++FQ1P4B6aCSXbNS8rmtukwvAM23SW5JyYGn5XXYlEIIVPh0O55AvJpaF5ebSIaq7CWT6b1DT8HT3zz8O3JMcjGoGEBXGPkLoc7yxreKeC3sMjCggWA9a+HX75bfrSnA3JpMk6xlW/fuVMicEzoaQMDBaYJqn2NrLwLhpbRtlpeH/kC/Oaf4dtXi8PVRLEgK/AN3z4ON4Fkoleu2GtRLMI9/wB/+cLh+4Z3wuDWyY/LJeW1bt7hmkUuBV9aBzt/A5FD0LOxPL7/aXkt5KBoRBvm0/KXjsLeh+ETLVJuRdlh0ZWioRULMtZ0UkO1lgLyvfzinXDfByWnohKmv6JhPpz1t3De22DZ34DLL5FVlmZhwcIpArvhvht45uTOYzL0bYK7Xl02PQHk0wzZmgGIR4bg2Z+X92WOIvnUFFYda+TVJA+b8Rz2PQwOL9hd8PDnysfFh0SIpieO8WZq8OzP4X9eM7XvZXi7XGuy6937fvjleyY/LpcSYR7ugtRYmQxB/Bmju6BvMzz8GfjJmyFrksUWea0koXzG0CxisPsPksi49efQ9SLwN5XHgGio9UYvmWiPHPO9G2X7jl+XzWN9T1XPd9wgmbp54ue47nPwsk/JtsZFMDIJWQxtP7qFwXGCRRYWLAC0GNnyprA4lbD3IRFSw2WTkM6l2J8JApCMjopAdBq5O6nxI5/TNEO1G2RhmqLyFSaVpdfCnPOr4/zNUN2ZkMUzPxXT1WQw7frJKUqOmBFHk9n/R56bOoM9l5SwU78QKomR8j7zOabGhPzSEfExAIzsFOLo3Vgen08L+WQmwOEub19wOTg85TEgZqh5FwlRRfvE7LT3Icn1eOq7EJ4j+3o2VM/XnF+w7fB7aVos30ElMYztg69eINrRCYJFFhYsgNjuAfo3i63+WKOMZgOmIDft4cUCqpjjUF7mnIuNQioi9m44OrKIDYIrIKtWKN9vpf39rNfKOcf3lc0sZqjuVAlpk+Fnt8Hdb598n7miT03hDD5gkkVUTIRPfFOEZiYm0U7xwfLcKpFLgtNbLpJYaYoaMvwyyTHDVxAvz0MXZX8lQWYToAtyTfPZLr8ezvpbxrNGtFM+I9FMiSFoWiJCf6K3PD49IYTQvBRaV1CoJQuTDN0h0rkCxaIQw57hON/e4RBtsdL3chKyuy2ymGVcfvnlhyXZfeELX+Af/uEfpjwmEAjM9rQs1MI0Uzz7M7HVV/oATjbMPAiTLIxV7BD1ADhjPYAWezccpWYxKII01CGfK8mi60XwpnthycuELArZ8v7oDDWLbGL6/eaKvvZ8w8/BHctg533l/c/+TOz9E4fKvgFdnDzaKZcSsihpFhWCdqhCs0iOyjkqNZv+zVKXKdhePTddlPuv74bX3gWNC/nRJkMjyKfhoU+LprH6NeJniPaUvotCyjClecKMhlcT3/ske4YqCDc9Aa4gOWxcecfD/NfDewD46oN7eGjUWMj810XwwCeM+zGuW+sXmUVYZDHLuOWWW1i/vloFX79+PbfccstJmpGFSVHMVX92G/Ut+7fAD15zZKE3m6jVLAzfRUyFyOEgkDRWmTMmizbwNYqAM805+bSYb+ZfKrZzU1t59mdw103lORwtWUzlgDZhruhrzUy9G4Uk82mZQyZaFpCZGIztKY+tTCqMD8nccklxDptkUZmYN7RdXpNjQhhQ9tnY3RIRFu0t33vlvUYOViXhHYwaWs3QNjG1nfc2PvLHMXZnwjDRy8CQzG1fT1+JLLbaFhNWCQ7tKvvHdGqcoifMo3tG6Y2k2DUYY2AizS+39DLoMgphJEfg6R8b7w1yez6FJGcIiyxmGTfddBP33nsvmYw4wPbv309fXx9r1qzhyiuvZO3ataxevZp77rnnJM/0BY5aU4Zpu//zHbDrdxK+ebJwmGYhcyvY3aRsARqyhsCYzgyVisBv/rVsZooNkHQ38dn7d6JDHWWhY9r6TTQulNeHPyvJYTuM55A5SrIwfUAO7+T7zSikWvIxfSMf7oFz3yyretP0kolXh6hWhv7+8Ga4/yNCQpOZodJRWfGDkI95XXN/yzI5d7TPcFSrw8nCK2SRzOYZShlmqL7NgCay4HrWP3GQx4ad6Pgg0TE5byo6WiKLzVkR/rn+crHQoaEhdkZsfPo3ovWMxLPcvamXXEHzyTddx5fzNxLztJcTAE3iPIFkMavNj04p/OZDxz/SpW01XPuZaYc0NjZy3nnn8dvf/pYbb7yR9evX89rXvhav18vdd99NKBRiZGSECy64gBtuuMHqoX2yUMiJsH3D3VJF1NQkmpcD98Chx+Ds1x6/68WHYPsvYd1t02f5aj2lZlG0e0g7QrRlDDKZjiwO/BUe/xqsuFEcsPFBNtnW8tVn9vCWriaaTXNGLg1OT/m4YIestk1zkSmw01EJa52iaVcJJlkEmiffbz7nWp9FtE9W8C5/2Z9kmp6yMfToXnLagUvl0dH+ctXSyCE5LpcS0nMFxPFvaiKmc7tuXrn+Esj34fBA42JxqscGJM/B4anWejLRkmaxfyRJBhHe+fgIDuCvPRmKGvpyQRRxchEhPVtiSLRXT5i/Rup4D6DGyn6RbHyMCfxs7xfT1Eg8w8GxBE0BFy+a38hb7K/n3JCHC8fvlf+JpGWGOiNRaYoyTVBaaz7ykY9w1llncdVVV9Hb28vg4OARzmRh1lDMScZsfbcIqFqz08HHj+/1vnIe/PoDR14ZpsbFZxBoFVNJJlbSLIoOD3lXmCDG6tzfIsIxOQlZmJpSNikr82ycok9W3ftzdRWaRapaC7DZyuatKmhxDB8JA2bewhQVkqfSLKL9ZX+KaRI0w0szcXLDu3lWd1PQitSYoSloLYI9EytrSErBsuskRDcTLzu3511MVRHs+JCMb1xoPAst13e4D5+boVnsH02Q0UIW2ZhoEA/sSdAR9lDwSUitNyokFUgLoRfdYZ4ZLjCg6/FFy9qRMx8jqn34XHYWNPkZiWcYmEjTGhLibg66GSjWCWlnYmXNIjEE+aNr0fx88cLRLI6gAcwmXvGKV/D+97+fp556ilQqxdq1a/nOd77D8PAwGzduxOl00t3dPWlZcgsnCMVCWcV3BcqC0BSyQ1vF7j3VCnkm6NtUESUTlbZfU8E0Qc27CLbejR7bizKEg7Z70O4wxIyx3jrw1k+uWZg5GrlEyT4/4ZCGR09P+Fln60cVC3K/zhqTUcNCifNfcDnseQBcQck2Tk+AJ8S0GDVW9LU+IRNT+SxifWUHs6lZmM8iG0eN7WVPcRld9hEcY734QIiukBVhqovl+zjv7fDMT2DLD2U+Tp/kl2z5n/L1EkOySDC1M4BQ1+GaBZQ0i30jiZJmUYyLD+Gh/Qn+Zt1SFsXnwy6oT+4HoKUwCArGCl4S2QJ7ne3Upw6UTunKRSm65/LMR17GF//wHF96cDd9kTRd9XIPzUE3h5LGc4gPVjvkY33H3lp3BrA0ixOAQCDA5Zdfzt/93d+VHNsTExO0tLTgdDp58MEHOXDgwBHOYmFWUchRVHZ+trEH7azQLCoT4Q4dJ+2iMvs5E5t6HJQE5Ej9OQDs3P5smcAcHmy+ioqnnjohjMnIoqRZlMliBEPo5epQxbysrnNpcHrJ5ov8r7ufke6S578drvk0LLpKztG8VKY2cRTZ7qaPpDDF6tck5ek0C081m2YSEzhSIwxSz6Cuo2iG85r3XalZAHStk3ySTXeJZtG8VBz7lUiMHE4W4c5pNYsDowkyGD1mDOE9nnOxqCVA55x5ANQXZU5eJfd/KCnkMu6dR0ehp5Q74SnEKbjD2G2KpqAbrSVstjUsmkVL0M1eI6+GWL9cz2V8/vUHpczLLCfoWWRxgnDLLbewZcuWUqXY17/+9WzYsIF169bxgx/8gGXLZqE5ywsFQztgzx+f3zmKOcZSmg/8ZAt9KVuZLPIptJKfSS5ynOzDlWXFj5RtbfgrdhZklR2PR8sE5vTi8DcAULB7xNdwJM0imyidc6BYR2edF2/TXAD27X2ulJ/w86d6+MHjB/nJxkOw4DIhjDnnyTmMkiBf/PWGwy5ThUJe8hPM95MhN0meRSEnhDYFWUSHDqIoMqH9DOp6bGYkk6kBlMhCVuWf+91ODrZcIf6Tvs2SgOmtr56HLgi5NCwsbcr42pjI29G1ZFHhs/D5JBHSnh6j6PBSxIbf7WDxgslMd7AnZgfA3baUMAmSkUEo5PHpJLjlPpsCkviXL2raQiZZeHguaYTUxwaE3MzSLLt/L/6cWfZ3WmRxgvDKV74SrXWJFJqamnj00UfZsGED3/rWt9i+fTvd3d0AxOOnecXPE42/fBHueffzO0cxjzZKXQxUkkUuTdbfQV7b2LPvOBVzS45IJi8cBVkIsTybMkghkyxpCcrpwR2UFXLaEUJrzVjRz8T4JIXzcpWahezvL4Sp8zl55w0vBuCpzU8BmoLdzZcflHt96kAF8XSeC6//Gfps0Y6T0SNoFgWjBIayHYUZqkIgxwcBXTJDfejX1Vp3YVyc7BlniCHq8aQGxIxoEk4mKud1+Ygks3zlwT3cE10i58xMQPMy8DUcPheXX7a7w+AKcs/2OIeixcMSEAumGWo0wZIu8U24suMUjQx6v9tBc+scJsOGwSILm/34O0UOjB3cWvofMLXE5mA5S7xEFiE3B7KGyS/aK5pF+1nyWdnhJR+d9HrHExZZWDj9kY1PXS7iaFHIU5B+W/Ql7VWaRUZ5GCFMPlLTUa5YgJ+/HR76PzPLaE6Mlm3M6Sj89iPlZje1iA2At55tY7Jq1LlkSUtQTg+BsJDFQNbLO+96it/uyZCNjXJoLGmc94+l+wAMshgAm4OBnJegx0FDu5hevDFxIO+NFOgZT7GiPcTWviiprKEdKAWLr2IMw/xxpPBZs16SKyhmqMnMJKWkvArNwtS8DM3ioQOZqkNsMdHw3IFGDjoX4ctHJGEtYpKKNnwvPp7ukTk+me0uO8pbVkgZcBAiM2E6xBvmQ7iTP+4YIoMTVdMNOq78xDN5hmMZFneID8uu8+TtYvYKuB3g9JCyCXnoims82pvnggWN+DuWA5Ds2UomJv+7Dr+QhalZAFVmqAReig6fFFDUBYnoqpsnPTYayxrRbMEiCwunP/KZcv2eY0UxR14LWUQLbgpmietcmhQuhnQdqrY9Z2wAnl4vTXJ++6HDz/nc/VI8rhbJkXKEUToCj31VcjkmQ2wAgu08Oywrc51LlQS/zeXD5hNzykjBy2+3DtDQ3EaYOP2RJDz+X+UmOiUHd1JKfQRaiWWKBNxO8NaTwUU4KaGkh2IiM99x+ULyRc3TPdUO3uciIjac2Sj5whRd5KDsp3Ab5pPJynJMplmYORbBdrL5IgNJSlFHAO6kkIk31Mij9dfz/fp3SUjsrt9Xn9vpLc29N5qF7ktlWs3L+PBvDZNioA3MwFuXj3+751me7n4L+QvfyyO7R6qua2K86Gf/iJBcV3PZZ5Qzosh8Lvk/yrqFyAvBztKYgYyL8xc00ti1mD3Fdlq2f5eJMfm/8hhaYlPAVRpfaYYCyHhbYMAone5vgnc9TvFlnz5sjrOBM54s9Amsyngq4IV2v0B51Xw0mctToZAji/zIE3gommSRT5MsOhnWdXjSNWUlStdT5YJ3lfjzHfCnz1dvyyZFYNfNk+Mih5CV8BSRcLF+8v5W9oznKWoF2VRJ8Ntd3pL9vLWlnTteczbnLluASxUYH+qRiCAzc7mkWcRFswi0Es/kCXocoBQJe5hQTsYejGrmNfi4ZJGYWJ46WE0WppYTIMVEagrzkvHsAIkug8Od3MWCmKrsLsglKeYMDaJCsxiOy7Yo5QitYFbmGaxvoiXs4/78WtlRmy3u9LHF0Cz6J9Js63w1W0JXcDBXxw+3jIkm6WsszS+Fm+89eoB/2b6Ax0NXE8/k8Xj9pdNFtLwfyXs5MCokN6elbM7K2So0CyDQKGY0R0M3AGntJIOLC+Y30BLy8n8Lr6EuvhvHBulv4Qs1lI53O0Q0V5qhABLuZhg08sV8jeD08rFfbee96zfN+m//jCYLj8fD6OjoC0aAaq0ZHR3F4/EcefCZBNPcMVOyOPh4uctcsUBO2/C57OQdXpyFlOzLpYjlHQzpOkKFGlOXaTqZf6kkzNWaohLDh0c7mclU/mYxi5h9DPLVppYSYgNMOJoARRqXCH1D8NtdvlJkTndXJ68+t4tAnZhF0kO7y3OACgd3UnwWBlmYgi3tCFGfl7F7J4osbw/R4HfR3ehjy6Fqstg6lCKlXYRUgvHkNDH+RojvSNaI0K/1WximvpxfhOpfthpzHt4uz8bXyFBU5h01BHUCLzbkO5vb0UFbyMOmiIciNvRojU/J6ePpnghKQTJb4IsH5nHj0FvZeDACKOK2kOGjELIYSss8dwzE+MS92/A67XS3lclgRIsDeijnYf+ozL27OUTREKMZmxCa33im9qCRPV4vkVEJW5AFzX5aQh6cdhuPuy9hwD2f+j3ScMn87pRSNAXceJw2Ql45V4vhx4jYm8r352+iL5Ji/ZMHCbgds57Qe0bnWXR1ddHT08Pw8DH24D0N4fF46OrqOtnTOLHIHYNmMbRDGvvc/D+w7OVQzJHVEskS8Bi5C7kk5NNM5H0MUUeDniAyOkyd3y35Beb1FlwO+/4kFQK6Ly5fIzFSXToDyr4VfxPaHSQ9tEfWzJNpFsUixAcZahEhlcKFypc1C5vbC+bCwCANT0hMGXpsvzEHw3RW6bNIjqI7ziGWzhHwiAjIucK0pQ+CgkMxzTntYt9f2BwoCUYTOwdixPARJMlYYhrNwnBw96UcNEF1TwlzLkDS3UKYA0THDCLd+xB0XwJKMRiVc5ipzygvAAAgAElEQVT2/4i7A39GcjcWzeuiGILfPNvPYK6Odqod7hN5B4PRDOvm1bPhwDiP7JLzP7hDnsl+1cXZTUtKCYm9SYXLbkMpgzBuXEldb7B0vhHCLND9DGTc7BuJ0xJ043M7ySgXbp0mreS7MMkCv0kW3QAE6xr5yt+uLZ2vKeTlKX0B12XEVxSuLxNBU9CN065KBBD2OnHZbfzM8yre17APd/QAw/ZmPne/dCz8hysWTf09HCec0WThdDqZP3/yEDYLZxCORbMolXg2ksYKObJFGwG3g7pwHcRAZ+OQSzGRC0KgFXta4/rB9eAPw233l6Nv5l8OfFyylU2yyKUlykVX2/T7+nroALSvkf60g8ZMj5jMJ9MskqNQzDNQrEcpyCk3tnyaYi5FTjvwupxgmkkMslCG49YVNZy98RrNIpeA1DgFdx25gi5pFgVPHe6YCPM0LlYYZNHd5Oeve0Q7V0qRzObZNRgn5w0SKiYZSxxZs4gWPaVnXAUjbDbmaiYM5JPjEgI6vh8ueBcAwzGZd2tLCwztJunrgMwe8tpGd3srC7tsXLu6nT3/0QLZarIYNTSa8xc0sOHAOAnDUf+n5+SZvDH7r2x+2ctQ/y35IweisLorzKWLmxiOZbj1gnmoe8pa+ogOEVU+RpM59o8k6G6SZ1+wuaCQJqUMzcLwWZSKGNZ1y3fir2d5ezmJsTno5tGJVVxnfK5vLCd8Xr2ilUSmHG6slKI56Oa/dhb5ke8T3PPWNVzxhQ3ki5o3X9RNZ90UtbeOI85osrDwAoG5ap5JS1QzNn/CKBVRzJMt2vG57TTUN0APDI6O0ZxNkiy6aOuYB3vBN7YdxpCeyCY5NS+RchxGHaRkNo8vZaySs3GxzdtEgHz39xv4MPD7AwUa0i46bBWtO2thhM0eyoVpCrjJFtzYC2kK2RQZXHicdrnukmugW8JfzfwBf9K4r1xCVvDmM0qMQj5NxinaStDQLErRQUBau1jeYZBFo49UrsBQLENryMPj+8bIFop4AvWEMkl6pzVDyT1NFAyHba3PwtAsxuxNdAGFZEQ69IFoa8BgNIPdpmhsaoFIEOWth3FI2gKE7GUrekPHAti/g4wzhDsn5sBYUa67rK06yzxmCOGJrI3hVJEWw2exLwprV9TxvquWlAcbzY5y2s4DhbWM6hAjsSz7RxNcuaxVbsvugUKUuPbgdthwmPMKGRnoZqJfTb5IS9DDnwcXklMu0AXc3rIW865JNIV/uWYp9z3Tz/1bB/neUyPki5qvv+FcXrq89bCxs4Ez2mdh4TTC2D741lXH1gP7WDSLScgiXRQzVFuTmHIO9g9RzKZI42L+/JrQxGd+LNdTdnGQtp8NfZvpGU9y1u2/48mtz5XHGlnK2XyRouFD+MOBAmlbhYlqMs3CSJ7blw3SEfaQV25sBlmkceFx2KQd7Ot+BHPPl2OM/IH6TEXzpsRwWbMwKq6m7CJATbIwo6pAzFsdRsjmvEZZPZvRP396bhi3w0aovomQSkyvWRhmqIhJFsW8aBvfvQH2/6WkWQwoMdeo5KiY84Id0h0OGIqlaQq4sHWuhbkX4PIZDZ9c1YK3rq0bgFF7WXDG8nJvi1oC2G1izvE4ReSZGtX+EaOUOUIua+bUJOsZnfDSuLi7eClf97+T/aMJRuLZkmah7UIoYzln6byA9LW4ZT20SJhsLVk0B930JjQ73KtIqMARk+puXNPJP79M8jP+5/GDNPpdXL2iFZvtxBQftcjCwqmBQ49Dz5PVHcqOFqYgnBFZGOYZ0xxVyJEpKgJuB+0tYjvuGRpB5dOkMTQLIKec0mr0mZ+Ig9tbz4839rDPvwaGt3Nw/y7yRc2jT28vX8twcvdGUtQTI6vt/GJHHG+golRHTdhvvlBk524hnOeSAdrCHrI2D45CWghMO0WzqIURHdVWrMgJiQ9LyG3FM0raZRUbcEtoqDNQLn/R1lBfspXPNwSi6bf403PDnDe/AWegkQaVIGJqFqnxcrBA6SZkX0yLiUQXsmJi2vcw7H+kpFkcQFqJupMD0iq1/ayS4ByMZiRs9OL3wq0/JRCS+6sqcwKokISn9uqy3T+Sl3tr8LtoDbqxKXjxYjH1XLSwsXxfBlkktZslrTWNxwzNIoMTh02xoDnAJsPhv6hFxiqjSu9g2lH2V4Ccd+m1kkluc06iWbjJFTSfSt/EL9qm6CVegwVNfup8ThLZAuu6609olWqLLCycGjCLxE0VFTQd8sdCFqZmYZCFoVn4XHb8ASPqZXgERzFNGictHZKR+6xtGSy9TqKfIgfR3jo+8attfGNI6iU5dklXxJ6envK1DLLYP5qggSjjBMnmNaG6ivpEFfedzRe56v8+zK//sgmAbVEv7WEvBZsbRzFDMZckjQuvaxKycHrI2Ty0qIoIpsQQiWR1VYCYMslChJvpGAforAgHbQ97cNoV+0eT9EVS7BlOcNmSZvA2UK9i4uDOpeA/V0vOSSUMzSKhRZhmM5ly9FdqrKRZ9GQDRLQff2YQIodI+9r56x4x44n5q5yk1lAvc6traKm+VljI4lDWX9IGJgzNIuRx0lHnpbvRz8oO+W4vWtiIw6Y4UEEWGeVhbmNNQIJxrgxOGgMumoNusvkiLoetRDjKIJT+lL2UY1EFpeDSD8CqV1VtNjO1H0t3M7HwxsOPmwQ2m2LtXNF+XtQ9SRb6LMIiCwunBmKG8J6q4NxUKBbLZSUqyWJwK+z6w+GrXRNmP+PUuJSuLuZJFWxG9q0IjExMBJZyevF4/exseAnfyVxOLmSUcuh/mowjRCyTZ0OsCRoW0tQnGdNhXdkDQcji4GiSRhUjYpSZbWqsCIOs8FmMJ7PsH03SqsYZI8R4BtrCHvJ2D85iBp0VbcftmEQwAXl39aqbxDA6W625RG1CFqYZyhcuO1crcwccdhtz6n0cGE2w0Sj9cf78RvA1ECTJRCIlCXXZWLl3RWki8r0kMUw5lWSRHCsl5A2kbfTrRprS+yEd4fExH6//1uMMRtMMRdM0BytCwd0yb+WtuceQRAAO5bwUjQJ7YzkHLrsNj9PGB1+2lNtvWMnCFiGGhS0B5jb62LB/vJRn4Q+GDn+mpmahnTQF3DT6xaR2yaKmkhZhc4nmNJ53VZuhKnHFhyXCqwKVZT3MeR0Nzp0nZLHOIgsLL0jEDbPJTMmiUF6Rj41UmF7u/Sf4wavhW1dO3hI1MQRGLSiivWKGKih8LkdppUlCwlydbvm848Vf5p78hfRS7usc0SJoeiJp9NJrmTuxgdXhNAt8yfK1jNo/2YMbmW8bRPmbaAt5qKuv+LFXaBZJI2pnoTfGQFEEQ3vYQ8HuwaXTkEuQwl2yvx8Gw8mdcRnnjw+jahzoZoLZZJpFd1t1GfbuJj97hxNsPhTB7bCxrD1Ycohn42NlE5pJBDX3FDcS6tLpVAVZjJZKfQwk7fTrBhZmpDHRvlwDWsM9m3sZTWSrNItSgp+nliykNMiE9pO1y72NZR2EvJJ/cMGCRl68pJmrlrfy0Zcv54IFjbzuvLk8vm+MnoSYchrravwVUNIs8jYXjQE3jUYpjqtXlH0jDpeMSWp3tRnqCGipJIvmwDQjq3HLeXP52PUrOLtrutr2xx8WWVg4eXjgE3DAyHyOHSNZVNj6BwcHGJhISw2i4Z1SlbNvE/z2w4cfFx+E1pXyfuIQupgnWbARcNtLAsmZEWe72whPXdomK9btqbJQGcqJoEjlCkSWv46iVnxcf5XlwQpzWkZ6P9y24+9ZpHroXrScn7zjQlSlDTuflr7Tf/xkKWRyrjPKoBah2B72UnR4cOksKhMlqv14J/NZQKm4YF8+iHaHIDGEvVht3hsvGmRhaBaqomT3wvbq8t1ndYXZORjjjzuGWN0Zxmm3lQhJJ0dL38HIoZ3c90xFRd0aM1Sm1gxlkHhPQtGvG/Eg43el5Z4/f7/4bKrMLWbpkMM0iw7GL/kYvyxcSNIIYR3N2Al5q8t1eJx2/v7SBTjtNm69YB5tIQ8P7hNib2maZKVuaBbhYJBXndPJ6s4wLUE3V1WShVuul8CD3z35dzIZTM1CqbJv6GjQ4Hfxlovnn/CumhZZWDg5iByEP38ett4tn02yOJquX9kEPPIFKXtdsSIPqzgj8YysWtMROPt1cNE/wlPfFfIwkUuL6aTDSJCKHEIVc+Sw43OXNYs6LRqBzycCakFTAIdN8ex42VR1MO0pRdocVJ18xflmzsluYHX0YUa0EbJpdDazUeSehrfguv7zzGnwlUwqct8ZqeP0p8+RysgzqC+OM1wiCw/a4cVNBntmgii+yR3cgM2IiBrOe4g7GiA+hLOYlogbxEk/nhMhWjKbGMI/rZ2018Ts33B2B1pLs581cwwhbUZPpcqaRSDZw12P7q+4J7mPuGGGymTTVZrF3j4xBUbyTvp0maC2JkMoBdlCketWt3Hxogpz3VSahVKEX/JPjDpbiWovKDtjKU3Ic3htJxMep533XLmY3YbFsH1SspC5t9aHecU5nVyyuIkn/tdVVcX+bMaYJB78rqPXLAJuB16nnc4675Tf5akEiywsnBzsNeLp0xOiCcRn4LPYcR/84WPQu6GUPxC311GHEco5skvGNS2WaBSQns4gtZo+Z4TBtp8loa9GtdKCtosZwelFo2hQ4mvwBURAuRw25jf52TkYhzrpAbE/4eTCBUao7ViS/0pexphnDvZ8kh6M1WcmRtFodZppXFHu4FZBFjqfLtnwU6kEoHFnx8l6RIC1hNxohwe3zuLIRpnQ/qkFjCH4c44g/YUgxAZwUCDlFAEbI0A8W8BpV6UaRKVjbJ7DVqwLmgOcZZg81sw1hLRhhnJnJ9jRI9+dR+WIjVQ49g3TV9J0cKcrfRbj7O4dIqOdFLATd4nDWis7W2M+Xr22i5ef1c7t16+svjfzmdVqFojzd2FzgLGcC1x+opk8Ye/UZAHwmnVd9AdX82RxCa1dCw4fYGgWpdfJYHyfCe2ZkRlKKUVryF2KqjrVYZGFhZODvQ/Ja3pCbPpmE5zCUURDjRm9ixMjJc1i1NaIT2WIRGMwapBF46JyuKJZx+mPnyh3Zwt1ykrVyMTOYxczlFIUnX4alGgWwUBZqK/uCvPUwQg6LGQxWvDxmnXiXN1yKEK2oNjTLf0e4n6zZ0WMaERCdUMVJR3MktlR7UUVMqV55VIJgqSwFbO4w200B93ieHX48KkMznzc0Cym91l4Qw3sT/spRqSabM4jpDZa9BNN5Qh6nGVicHrRDg9+/+SC66Zzu3DYVMm5auZz1Kk4924s95J2xQ6ROfgU3HVT6X4ShmZBrE/8FP5myMZQ6QhJRAjrsPgcCoF2ckXFOXPr+Mrr1tISqqlzVjdP+k201JCIgRXtIQYyTrTTy0Qqd0SycNpt3HT99Xwg8FmWdk3SMtfQGkqvk8EgkiTuqR3cU+CzN53Nh69dPqNjThZmlSyUUtcopXYqpXYrpQ6r4ayUmquUelAptUkp9bRS6rqKfR82jtuplHrZbM7TwgmE1lJptZIszEgoODozlEkWFfbyAUR4JSJDolnYXbL6N80VZgnseRW1m/zN8kM3hTR2cXADyuWjESGLcLAsQC9a2MRYIsu4S3IDtKeOl69uJ+Rx8MR+8XEklr8WPGGi3nkSCZSJkZiQyCpv6HCyGNCG+cOI5sqkEjQqme9la1fwpVukpSrOssCamMZnYZJFuL6JgXwQZYYlGwJ+TAf4655RWmsEsfLWY3PVhI4auPX8eTz4wctpD3urrtFsT7Crt1x7ba4aIr5xvdG9bS95HOSVCGz3hFFapfNcABoyvcSNHAxvo5BvzCNZzx1Tla8INMOHD0LXuZPuXtkR4lvpK5m46H8RTeVKhfimw9Ur2/jTv1xBcDKTlalROKcjC9kX1158M/BZAJw3v6HkCzvVMWtkoZSyA18BrgVWALcopVbUDPso8GOt9TnAzcBXjWNXGJ9XAtcAXzXOZ+F0x8Y74QurpPqqzSlCPF6OYiocTZ5FJVkY4/flRQjnJ/olsa9hgZTYMDULs45TZX2iUKf80I1y5AXspZWhcgdpMjSLcKhcLuLiRbI63xSVH/iqRd047Da66suNdlqaW+DdG9nQ9UaJBMpESRtd5fzhCrJoWcbIvOt4uHh2+X6AXDpRIqqW1g4uMMxcqkKQR7Uf9xHIoqW5lREdLjXvcQRl5Tyh/ewbSfCSZc2HH+ecXEjbbEr8LCbcIbA5WB7O46VM8HPVEKp3o3xIjJBTTsJ+Oc6ZkEXB9oJoEcsKz9Fj78Blt9HSKTXchmwyp47wFGRxBKzsDPOUXsKGumuIpo9shjoiZqRZeGasWZxOmE3N4jxgt9Z6r9Y6C6wHajNPNGD+EsOAWaPgRmC91jqjtd4H7DbOZ+F0RrEIj34FWlfDTXfCyleQjo+x7bmy87l35Ajd1wDGZIV614Ob2LBH/mWey4kQ1tEB0Swajdo6Lr+EyJqaRTYOi6+Gt/wGgq2TaBYigFWog7CSaJ2GcDlqqT3sZUGTn5/uFaFw8VlSR6irXoRbR9gjkS2BZgKBALGih2I6JiGm1JihXH6Gr/k6+7WhpRhkkc8kaDSIqlSMDqpW/RP4j2iGCtY1UvSVj/fWiQ/FDPe9ekVb9XG+xnLY8JGgFHjrWRjI4lFCFtruYqntEMExozlPcowcTprCcj17Vu7pBzuEvPwqg79zJY/86xV0NDfyh8I5PGkXLaqj7tjK7C9vF+f4k/vHKBSnd3AfFUpkMZ3Pwk8RG0ncM3Jwn26YzTvrBA5VfO4Bzq8ZczvwO6XUPwJ+4KqKYx+rObaTGiil3ga8DWDu3LnHZdIWZhF7/yir/ld9U7JZe56kmIxw3183s8IwnSeSyenPkYqUVuCeXITe4XHWAQeK4iB1JXphfB8seznFoiZXLOL21JV9FpmYCMV5F8lnh6dEFvkKzUKaE/0ZgHCo2kxw8aImfvTYWjad/e+cs1wSrd58cTfL2oLcdsmCUmZ1nddJDC+F1ASF4jgZ7aQ+XB0bX+9zkUEEWjIyjB/IZZKTkoW9gixi+HDZpycLPGFevDYEj8tHX508o4Q9SKvPzerOmjj9l3588m52U8HbwDxfhnkhBSlQy17ONVt/ga1o9I9JjpDBQUM4ACNgz0nAQGXkU75hMS0hD0GPk1tz/8yKVIigOzm5SegoEHA76G708+he+R95/pqF6eCehrzOfRN/HG+h8IR9Rg7u0w2zeWeTBQHXdiG6BfiO1voOpdSFwPeVUquO8li01t8AvgGwbt26F0aHo9MZW+8WQbbiFQDkXSF8pGgsDJO0u7FTIJmahiye+SkcLK8h6okxkpLVf69upqAV7fFtUMyzM9fE2+54iESmwOPBEHZTs8jESuGXWw5FCI7naHXH8SNkUfqx15UXH6rGNPOeKxdz9cpWzln8itK2ixY2cdHCpqpx9X4nce2lkIqiixEm8NPgqxZedT5nqXWnvyjCtJhJ0GY3miZV5D/Y3eV5pB2hqePs6+YACsJzOK/JVyILm0E8ne0d3LZi/uEF6DrXMiP4GggUorzzonXwAHDxe7GZodAAyVHSxUbCfpm3yyCL/gqycLSKZdrM99gxEH3e0UErO0Lc+7T4aWrzLGaMo9EsQh3E518DT2yeUZ7F6YbZNEP1AHMqPndRNjOZuA34MYDW+lHAAzQd5bEWTiP8aksfkeFeWbE7pGTCiJHQttQxwDB15HCSSk3TR/vhz8KT0oIy4WqmUUXJGGSRwM2YqmNxRlpr/mCnjWS2wEg8w1DOy/joEOlsXrQId4A/bBvklV/9C8NpyKcNzULby2aECrLAUU0WzUE3ly6eJHKmBnVeF3F86EwMWyZCTPnL5asNeJx2bDXO00I2Ras9JlE/FULK7i5rFlnHNE7R+m5439NS5jtQMc9wF5z1Wq6+4fW87cULpzh4BvDWi1M+lwIUtK9ht38tWXMNWsyT1g7qQ2LacuWNgooVZOHtFLIwy44UNayq1XhmiKtXls1rJ0SzQIoKOmyKOfWTBwicCZhNsngSWKyUmq+UciEO61/WjDkIXAmglFqOkMWwMe5mpZRbKTUfWAw8MYtztTDL+PdfbWV0eICoCvL6bz1GKlvgUEp+yOf6h2lonUPR5pSSEJMhm5CKpAAo9nuXSwVXY3xGu4i7mujQ4ix/JtnAZUuaeemKVnZN2DjQ28f6R3dDMc9ozs17129iVWcYm9ODsyDaTB57OZqlkiymi4SZBnU+J3G8qEwMRzZKyja5gP8/N1e743Q2SbMtBv5qTcXhLvsTpiULkPkrVe7WBuDywau+AR1rZnYjU8HbIDWecklxjCvFE2s+zRsy5Yz5LA6ajGgyT0FIOYqfhHYzrMM0NYtgD1aYb16yrKZI4AxRWYrjhPgsEIJ75vaXlcqWn4mYNbLQWueBdwP3A9uRqKetSqmPK6VuMIZ9AHirUmoL8EPgzVqwFdE4tgG/Bd6ltZ6BMdXCqYR4Js9IPIsnN0F/zstfdo+y6dA4+2IiINyJPoLNQhbZzBTRUINbAQ1XfxJe+TUOFRqpVzHyGRH0aZzkfSIktM3Bs3E/7WEP//yypdh99YRVkoP9Eo3zx31JbErxjTesw+7y4i4K4TgcTillAdNqFkeLOp+LmPZiy8Vx5aJknKFJx3k81avRbCpOA9EqfwWA0xiX03b00TqiXf7y/I/xPqaEr14yuPPpUhRV57yFPKGXloZkcdJUJ3N16iwFuxdQjBNkt+4srfwrfRRHo7VNB4/TzjyjeuzRhM5Of7KQkGL9kTtuTloF+AzCrHpjtNb3AffVbPu3ivfbgItrjzP2fQr41GzOz8JxRD4rCW+X/FMpnh+AQp7Ub/6NeWoh/mKUg0VZZT51YJzxaMVaJdCGtrvIpVOlFp5VMCuarnwVhDvpufdhQiqFyoppw+PxUfC3QQRygU5ySTttYQ9LWoMsWbWQiU1bGB6RXIcn+rLcctFc2sIeBpwe7CmpTBv0VwjTYLtRaFBJg6FjQL3PSRQ/jlyckHIw6JlC4NSYOBKJOA1MgL86aMPlkWcnCXlHOSelxBQVOXjMGtKU8NYLUaTGS0S0uCWAxkbO7sVZSJHBSXOo7IMo2GUO/y//SnLeVi6saUoEx8F0BPzk7Rfys6d6mdvwPM1CDjf8825QVv6y9QQsHB/0b4G//j/YeV/19s130bz5K/yN7TFCOkF/Vn68j+8b45mRinHBVpTDjU3niCRrejUD8f1PkXTUcc9eTb5QZH9KhE44JwlhDeEQOigmjXGXBM61mUln3jr8OsHouETIxLWPN1/UDYDdVSaIkL9CsNgdkocxRd7B0SDocbJLd2GjSKMeo1hbOtxEjYkjk4pTpycOM0OZBQ0ntH9m8fymKep4axZmwmO0v/Sc2sOSa5BW8iyz2kFr2EPeWJfmDbL4ceEK9tZdWDqVUorXnT+Xr906ebLdTNES8vDOyxcen2J7NvsRu9i9EHDmxnlZOLEwQ1NH95S35VLw0GcAWGzrwaY0uwzT0593jdCJC7MSBMF27E4XLvLSUc7oG0ByDL7zN9iHdvNkYQl3/H4X58xtYLQoNvtWRsngpLspgD0s2b+HEOHYZrQGxRPGofM4UyPggnkdLaUMYWdFhFGdv0aY1s09rIPdTGC3KQ64FpXj+CapZwQcplm4dZpg8XAzlMsgiyg+LlxYXRl2WpjnOe6ahXE/sf5ynwmlWNQSIDbuJQjklIuw10lGOXDoPDm7PGOHTVX3qQD+45Wrj+/8LBxXWJqFheMDM0N6rFwniN0PQKyfAnaWKkm56c/6CBmRL3M62stjA63YnR6cBlmUML4PhrbiJcOTrOTgWJItPRHGEeHUrsZIayfzmnx4GqRG09aU2QPCEP7GCrhDiSqzZE75ulVkEawxWXScA/XzjuFhlBH3dpEyVtm1rUBLqNEs2hjHRrEqbBbA4zXMUNrPy89q56hhRkTNlmYR66/SwBa3BIjk5Z5sDhdKKYpKvvOcEoL4/GvO5n1XLT6+87Ewq7DIwsLxgaFZRPue49CYkSsxvg+AA875LFIS+RwhwCvP6aTB7+Ld15xDKaUm2IbT5cFFjv5KsjDKedyW/QBb5r0JgP9+ZB8TSpzFHWqUDC66G/20d0tBtofGm3A5bNSbOQ1GyY92JWaoVfPL+Z0eb5kgGoI1TuMrPyaZ3s8DYb+HHXQD4PRP0dmsRrNoU5LtbVZ2NWE6uKP4WTyTXIT6+VKe4wgRPTOGqVlUOLgBlrQGmSgYvRoMbaZok+8ia5Pt16xqe94hshZOLCyysHB8YGgWtvF9vPnbj5PKFmD8ALjD7Cx04VQSzDauA6ydV89T//ulXLKkRaJNAIJtOFxu3CrPWKXPwihzHdEB1s2XFfLmQxE6O0SLCKkkae2ku9GPs3Up/9r2TR4srqEtVFFq2xBqnQZZLKzQLLyVZBGqWXnbHWB/fs7WOp+TTTlxVLuDU5FFjWZRIotqTUTZXRSw0dnePjNb/AXvhLf/6fjb3St7SlRoLeu660vd8UzNzdQsskrutVQa3cJpA+sbsyDIJkl//aXk9/752I43NIuASjEx0s/nf7cTIgco1s1lf7acExAhUF1R1BMGuxs8dSiHG4+twHiiovKsoVlkcNJZ56XTOPbspYtLDuMMLrqbROjPXboWUGV/hXkNYJFb5mj3lOfj85a1icbg8e8rUO9z8WyxG4DmlrbJB9VoFu0lsqhp86kU9nkXcs4FV85sEk4vNBw59HPGqJxfhWZxdlcdOYdRPNBoOapt4oNK4sHrtJ/wLm8Wnj8ssrAAQOy5h/H0P8HWx39/bCcwfRbAxQ0T/GpLH3r8AL20MKjLQmVcB8pRSiCCPNgqq167C6+twFiykixEs8jgwueyl+oZXbq0Bd1+luxTLloNZ6lZobX6GkIqq/xGvSVXmRTc3rKQM/MBjifGEll+UzyP33e8k9bll04+qHTL9CkAACAASURBVEKzyOAipAwzXi1ZALzlPlj7huM+z2NCZVvYCrKw2RRho5+1y/QJGeHH8YLzjM9HOFNhkYUFAJI7HgAgYZTSnjHSEbI2EQyv6s4wFEujIwf466gfb4P4CIrYiCtf9ao/2FFOeLK7cKv8lJqF12XnmlVtnDuvntWdYewdUtrb4XCW6hyd1RWmwe+qtukbQtce6wOnX0IhDaiKVb3XdZxt+sCrz+2ivamRC974ianzNZQS7Qrp+FdCbevQUw02e6knR22IcWuThP2WyNgw503kHVP34bBwSsMKnT0VkY7Cvodh+fUn7JKeg38CoJiMHGHkFEhF6HEtYG56O6u8YzQzgS2fZmuunusuOQsegqQ9SFvIV86SBrjhS5RiSw2yGEtMolloJ363g8uXtvCKcwwHdZuQRYceKg132m384f2XVech+BqElGJ91QmDUG0Cep7+iclww9kd3HB2x5EHOjxQyJBy1UPKuJ+pQm1PJXjqpNNhDVnMa2+DXTCnWYha2cUMNZZz4vFaa9TTEda3dipi013wo1urO8jNJuLDhKPSU8KWOYp+EpV48lvwq/dCOsJgPkDU3kBDYYRlHtFQCqG5nHeWFItzB5v4am3SVbAVjGQ6HJJnMZ6cQrOoXZG2C1mEitUE1+B34ap1oJqd1Vw1folKsrCdxBWvYYrKuoXM8g7frJDXcYfXMEXVhOU6faJxBHxG2LBD7mUsa7fMUKcpLLI4FWGEnJp9G2YdRmezjHbgyM6QLDZ+F7asR6fG6c+6ybibUPFBLm2WarAvPn8dtpBEHzkDjayZM81q2e7CSZ7xZA6tDW2j5LNwlhoTldA4g8qpRitP3LVkUWF6sp1E4ezwSCtYI7kt7zoNtAoom8pqM91NUnaU8y0AYoVJSN/CaQHLDHUqInJQXtPHaBKaKYzr9Oom3Pk4xaI+vNeBiUwcHv0yLL1OSmEPPgu6iIr2Ml48i2KgFWKDXNaSgmF4yQXrRGB4Gw43AdXC7sZBjmy+SDJbkN4ShmaRxVnqj12CzS4NexoWHPkeS2RRU8xvls1QRw2HG1x+HEYZ8oL7NMlB8E5BFu5qsrA7hSzSuPFYZHFawtIsTkWYZJGaBbKIDcDnl5QL84H4SIA+mggRZyQxTR/s3b+Hhz4NX78U7v0n0MXSrgntx9vQAfFBlrpGwd+C0yh+x6pXwaKrpjipAYcLe1FyLEp+i3yaonJQqCwfXomL33t0vp0OIwHwMDNUpWZxEtdODg84/TTUifB1BWdQzuNkYirNwtCQTMe9vdSn2m1pFqcpLLI41aD17GoWg89CfBD2/6W0qWCQUsLTTkgl6Y+kpz4+bjhfm5fDsz+t2pV2BKlr7oLEMIztqy6V8fI74Ly3Tj83uwu7FrIo+S3yGfJGjL7v+QgZd1CaAbWurN5e5bM4mWQhmkXAL2Q2Zbb3qQZTs6gtJeIyyKKkWYjWltIuy2dxmsIii1MNqfFST+hZ0Syi0m6S0V2lTYlohIx24qlvJ0yiutxGLeKDoOwi/IFi21loI9KloakFW6gN0NC3yehjPQPY3ShdxE6hSrPIKxcuu+2wLnMzxht/AVf+7+ptlZrFSTVDeaT3hNPIKD8dIqFgas0i1CH/J0HxV5nRUGlLszhtYZHFqYbIgfL72dAsYkIW44e2seVQBK01kfERonhpbm7BoYqMjE2TaxEfhEALD6QWkbnwffzI8Qr2F4zezm3tEDAim3KJmRfhM4S1i1yFZpEmp1yTm6COB04VM9SaW+CcW8tCd7KEvFMRJZ9FTRHGujnwgZ3QfYl8NoIHUrgsn8VpCossTjWYJiiYJc2iF4DMwE5u/MpfeO3XH2PngV7StgCL5kq9pcj48NTHx4dJuRq57bsb+Hz+tXy2bzW7C9Khbv6cznIYLMxcszAEt5M8YwmjPlQ+Q1Y5n58J6iiuCZxcsjjnVnjRbWWyONUT8kyUNItJyp8Hmsv1qIyFQFK7/397dx4d91keevz7zK59s7zEsmwpcZw4CUkcE4eEm4QA2VgCpIWkBUJKmy5st71AwwUKBU5Le2h7S+tyCZAWKCVQCsT0hISQsOSSBTvEiWMnjpfEtizbkmytMxrN9tw/3t9II0XL2NZPI2mezzk6mnnnNzPvT2P/nnm357VuqAXKgsV8kw8WsTpfWhbpPhcslksvn71hNXu7hwhnhmhqaiJS5b7NHjx8BHLZ8XtT5A0doyPt+qO//tgBehNpKpa5VNNnr1kF1QX7J59iyyImGfoKWhYpfOznzo9ZBELzY4ObhdayWLrerYqf6YtBMN+ysG6ohcqCxXxzYj9E69x/Ph9aFomeDtLq/rO+a22an334ajadEaaypmG0S+GljsPse/DLsHmT23yogA51sWswRkNlmFQmRzAgbNh0FQSjROpWQPWysYNPYcwCoLlCCsYsRrw1Fj596y8MFvPB6JjFAgkWy9bDxztdt9N0vG6oJBELFguUBYv54sl/g6fvge3/Ae1Xugu3Dy2L4NARnpWz3J2evdRVhKnIxd36Ay8xXGtVmr3bH4FcevzCwFwOjXdzOF3DX950PpWRIJesbqDyklvhQ9tdnUNRd6GTANS1nFzlvC6hpgrGjVmM6CQL8mZLvhuqlAvyCuWD10IZ4C6WN8A9rFFi1g21IM2Tr1OL3y9e6OZ93/oNP/2zq8Yn0gM3Q+lHHwJAK5cgN/4dqR/9Gbne5wnnlOBUC+ROwvNHBzhwrI/rsn0ca3wD9O6BHpfig+SAFyzcBept51YRePpF91UiWbCiO9lHIJcmHm7ihvOXU18RprkmCoGAm/2SV73MTZ082dlF3vH1UehNZlxZZoTkZKu3Z4vXmpkyyd9cW36+69pZur7UNZld+TEL64ZasKxlMRee+U/kgY8zNJLhX36+9+WPx93ahYcrr+PdyQ/z6n/ZyX/tijPQ28Pv/dtW9nYNksvpy593Ej75w2f57D0us2xTyzq3TqJjm3twZMBtQuS1LC5dEWJdwO1sV5hYcLDHjXesWd1GOBjgyrObOXfFhBXR4HI2tVzy8vKZeBfuuogyMBoskiRzPnZDBUOuC2q+dEM1tsOfPDZ+7GcxsDGLBc+ChY9eODZIJpuD7/8+Vx7/DhUk+e62Q/QMTVghHXezj/6591Ukmi/kwlX1nNu2isZAgkf39fC6v/8lN//fR+kfTk/yLsVJZXIsx40/nHnm2bD6VdCxFbJpGBkc1w0V6t1PA65F8eiusT21f73jOQAuWb9u+jd765fh5rtPvpJeV0VdWBkcHpsNNawhf2fQhGLzpxtqsapayki41ks1b5edhcg+NZ/0J9Lc+I+P8I3HDoxeBC8K7COZzvHYvgkJAuM9ABynls2/s4HNv7OBi85eQ0hTPPzBTXzqTesZPPw8X/jC53j33b+ma2CaFdZT6BlK8aZ2153VuGINtL7KLf47+DigboVzfn+CQ0+MPu+h7XtGg9vBQy8B0NY2w65rIq5r6mR5yeZqw7lxLYtELkSVr8EiOn+6oRarV76Xn1+zBRBbZ7FAWbDwSU9fH98LfZyuZx+C5W5Ht+trXDbZ7sHJWxarW1rHxjO88YNVFSluf+VStjR+kb/M/iNP7jvKF36yG1Xl/mePct+OIzPWJZdTjg0kWRPwUnXUtbhgAfDC/d77ed1JFfUuJYgnnB7k3u2uS2qg23VDSeGMp9nkBdWaSI6B5FjLIpELU+FXNxR4LQsLFr4KRVne0oYIrKirmPl4M+/Y/xCfDHe9xEWB/Tx45Gl0dQQBLg3sJhJ8M10TgsWRzg4aNcQ1Fxak3M7Phkn2wa/+kcrBlwD4k4ujfOHJDg4eOsSuYwlS4Rpes27ptN00x+MpMjmlNf0i1LW6VkS0BupbYc9P3EH5bKyv+gD8+CMu4V56mNVVae7f3cUN5y+nYqSbTCRKKOZTRtR8sAjlSGVyJNNZol7LwrcB7vz7WjeU7y5cVc9vPvF6Gqoipa6KOQXWsvBJstd9CyeTZHjY7e3QPrKL5dWhcS0LVWXn3n30SR3vuLRgXUJ+ZexwHzz/36M5dm5bH+C8M2r5fPwTbKv8AG/P/Zhf7plmxTVwtN91Wy1J7BufSG/VJuh5wd3OB4tNd8DNX4NrPwuxWtbVK0/sP8Eje7pZKd1kalr8W7zmTWOtDrlMtoPJTME6C5/HLBbCRkOLgAWKhcuChU+y/a7rJiYphhMuWERzw6yrGqBrcGzM4Vd7jyPxHsK1y8a3DvJ7PxzdAf2HYP1bAKge7uS//3gTa7IHiOSSfCB8Lz/ZOf2OekcHkkRIUz34oltElbdy49jtWMGspgt+Czb+HsTqWF2VIZXNsfln+2gN9BBZsubk/xjF8loWVUEvWAyn3DoLPxflgQtSpdwlz5gFwIKFXwaPAtAYzjCSTHBCXerp9ujQuJbFbw72skT6qV2yYvzzl653XUGPftHdP/dNLotn30EXPDQH1ctpYIiHnjvqZl0V2Nc9xFv/5Vcc6R/m6ECSM6UT0cz4lsXKgumtEzcF8sqaQsMsqY5y8ESCNcHjBOpbT/1vMpPRYJEFYGA4iWiOEY3437KwbihjpmVjFj4JJty3/dbaANKX5JAupVGGaI0M0tM9Fix2HO7n7cEhQjUT5tWHonDWa2HXvW7wdeUGqFvpgkU+Z1PLRkLP/zeZ4QGeOdxPfUWYv/7x86xdWk1NLMxTB/v45mMHCIhwbvCQe87SgmCx/AJ3kcylxzarKRSrIzAyyM8/cjX9fb3UfKnfjXP4xQsWlV6wSMRdqvYRwv52XzSsHt2RzxgzuRlbFiLyfhE5pUQ1InK9iOwWkb0icuckj/+DiGz3fl4Qkb6Cx7IFj205lfcvpUjCzTxaWqFESXFIXRrvFcE+N+DstQSePdxPAwNQOcnOaOtudL+XnecSzNWvdsHihLf2YdWlADTKIA89d4y3felRHtx1jG88doAnXjyOkOM7Ww9x8ESCDZFOdzEu3Lc6HHMrhmF8N1RerA6S/VRHQ6wUN73X12DhjVlUBNy02UQi4cqDUTa1+bgZ0E2b4eav+vf6xiwCxXRDLQe2ish3vYt/UaObIhIENgM3AOuBW0VkXA4DVf1TVb1IVS8C/gn4fsHDw/nHVPXNRZ3NPFI54gadm6JZYqQ5ok2oBFkqfai6GUo9QyP09fcR1SRUNb/8RdZe67qe8mMLhcEiUgPN5wBwcVOWrz7yIn2JNLdfsYahkQzH9zzBjtgdrEg8zwM7O7lGfu1eZ+JAbssrXRCZuN0oeMHCbblK74GxOvglUgVAhbpv+QODrmXRvqLJ37n5wbANcBszgxmDhap+AlgLfA14D7BHRP5KRM6c9olwKbBXVferagq4B7hpmuNvBb5dVK0XgJq0+yZeF8oQJcUwEbSqmfqsW0XdNTDCjsP9NIl3MZ4sWFQ2wrt+AFd91N2vb3WbF3U/B41tULkEgMuWw0gmx9ervsiHV+0mGBBeIfupJsHfNf6AV+We4oxsp9svYaIrPwrv/K/JZzh5LQtgLHW63y2LQJiYuhbF4y+4GWXnrV5kqS+MWYCKGuBWVQWOej8ZoAH4noj87TRPWwkcKrjf4ZW9jIisBtqAhwuKYyKyTUQeF5G3TPG8O7xjtnV3Tz99dE6pUp91q7TDmThBUbKBKFKznJqMCxbdQ0l2dRznYvFyRU0WLADarxrbUKi+FVA4+ITrTvJmTF3YlCFIlquyj1N16BdcvKqeFnF/j3XxJ/lyzd2kK5fCuZM00Kqboe3Kyd87WgupQbe3Rd8BNxDsd86iSBXhTJyAwLMHXVfeOS1L/H1PY8yMZhzgFpEPArcBPcBXgY+oalpEAsAe4KNTPXWSsqmy4d0CfE9VswVlraraKSLtwMMiskNVx+3Go6p3AXcBbNy48fQy7c2mZB9RvBTbw70AVFZWIdXLqOhzU2q7BkZoe/4rvC/yNXdcYdbWqbRd6VoTiR6XcM4b51hXk+Zz162CXwBD3bzl4pW0nOhDK1uQM68h2vsSbLhtNJ1G0fKL70YGXMuibpX/GwRFa5BUnJpYmFDS/Q2jsSp/39MYM6NiZkMtAd6mqgcKC1U1JyJvnOZ5HUDhjigtQOcUx94CvG/C63d6v/eLyM+Bi4FJtm6bh7xps8BosLj1inXQmyF8ZDsAXYMjtPXvpCu4jKW/+5XxU1qnUrcSbv8x3PsncOY1Xj6nMMHh49x6SZ0XLI7xzstWw45hiJ4Fb/6nUz+P/KB3sh8GOt37+y1SDakhamIhovmUH4VbnxpjSqKYbqj7gNHt0kSkRkQ2Aajqc9M8byuwVkTaRCSCCwgvm9UkIutw3VqPFZQ1iEjUu70EuALYVURd54W013qIR5aMBova6mqoXo7Eu1lZG2Z/9xDLUwc4Vn2u62oq9ht789nw+z+FNa92z6lschsU5ccWhrz8T30HT398Id+ySA64161ePv3xsyFSBakhamNhopIPFpPs72yMmVPFBIsvAUMF9+Ne2bRUNQO8H3gAeA74rqruFJHPiEhh5/mtwD3euEjeucA2EXka+BnweVVdMMFiuNcLFpWrIOOt1g7FoGYZaI7LluV4av9RWvQoqYa1p/dmVUvc1qf5YBHvglTC/T7dYBEtaFnEu9z4ht+i1TDitSzyXXnWsjCm5IrphpLCC7nX/VTUYj5VvQ/XMiks+4sJ9z89yfMeBS4o5j3mo5E+1w2Vql4JfU+5wlBsdH/qixrT7NzbQTCqhJaf5o5olY3jWxaZJHR5cfV0p7nmkxn2HXSv61e22UKRahjqprY6TBhrWRgzXxTTstgvIh8UkbD38yFg/4zPKmOZgS5GNIwUzhwKx0a7cdZXx1krHQDUrjr/9N5sYjcUuE2N4PSDRa23h3bnb9zvqjmYwhqphtQgrY2VrKnz1lZYy8KYkismWPwRcDlwGDdovQm4w89KLXTZwWN0U0ekomAWTyjm0koA7aEezgocJqvCinY/goW3XerpdkNVNkK4aiz4zMVWn1431EevX8cHrvKCVdCChTGlNmN3kqp24QanTZFyQ12c0FrOqSnItxSqcGspItXUDx9kfaiTw7Kc1orK03uzyiY3iJ44MVZ24Ffu/U6320jEBZyj3mZIc9UNlRoiGgrC0GGXF2uqNSjGmDlTzDqLGPBe4DxgtPNYVX/Px3otaKFEDwOBBmKF6wNCUXfxbWxDel/kFeHDdEbO5LTXQ1cuAdSlAJEgaNat8m5/zaltbTpRfatbMQ5z07KIVEM2BZkUdD8PTWed/PoQY8ysK+Zq8k1cfqjrcDP5W4BBPyu10MVSJ0jFmlzyv7z87cYz4cgzLEt3cMHGq07/zfJrH7p2Qe3Kse1B268+/deGsa4sCUKFj8n88qJejqrUkDunpef6/57GmBkVEyzOUtVPAnFV/TrwBhbwTCXf5XLU5vrQqubxwSI/SNvYDkNutlRo5YWn/375i3nPHjd7KT8I3X716b924etXL52dlspM8gkN490ueWGzBQtj5oNipsB68xfpE5Hzcfmh1vhWo4UslyU10EWELKHaZeOnfIbyLYv2sbIVsxAs6vKL5NUtopMAZIZh+StO/7VhLFjM1biBl3mWw08Cai0LY+aJYoLFXd5+Fp/ArcCuBj7pa60Wqq++jog3zbSyYfnULQtwLYCaWVgRXVE/lh02VueSBWZTs9cKGG1ZzMHgNoxtwpSfgWXBwph5Ydpg4SULHFDVXuCXQPt0x5e9/HoEoL555YRg4bUy8psPzUarYvTNWt1e3bF62DTLs5rzazXmYnAbxrqhDm11U2Yb2ubmfY0x05r266eq5nApO8xJal6+aqzrKRCCoBeXq5dBXevsjSnA2AU9n8tpNlU2utdfPkfDVPluqK6dLg9W0Hb+NWY+KOZ/4oMi8mHgO7i8UACo6ompn1KmYvWQdDvDNjSfAYNekt1QQQtDBD74lBtbmC35riI/goUf9Z1OfjaU5mxw25h5pJhgkV9PUZhCXLEuqfFU3XRPIEmEWH6xHLw8XcVsf1v2M1gABHzc0nSiSMFCRhuvMGbeKGYFt3UaTyOXU7677RBvvaCBaC7D/XVv54HsRv4hEHD5oGD82IUf8sEin/hvIYsULGS0YGHMvFHMCu53T1auqt+Y/eosPLuODHDn93ewRFt4HXBImzlS601bDXupPPxOhLfsPAiE3WrnhS5c4bq8NGfBwph5pJj+kFcW3I4BrwV+A1iwAOIjGQD6+tye293pGA2VXnqK/AyokM8ti4Y18LGOsZbMQibiuqJyaTcRwBgzLxTTDfWBwvsiUodLAWKAZCYHQHzABYtjI1HqK8PuwXz301yk2F4MgSIvUuXWoMzFinFjTFFOZaQ1AZzm9m6Lx3Aq634PuMHso6kIG/Iti0DQdQ/5PWax2LReBs3nlLoWxpgCxYxZ/Ag3+wncuoz1wHf9rNSC0fUc5/76bwjxVlJxFyx6sxXUV4THjglX2uY9J+u3/7XUNTDGTFBMy+ILBbczwAFV7fCpPgvLTz7B6oM/5fLA2WQSbtrsoFaOjVmA6x6ybUGNMQtcMcHiIHBEVZMAIlIhImtU9SVfa7YQ1Lmd3NZKB5oUAAapoK6yoGURrR3Ld2SMMQtUMcHiP3HbquZlvbJXTn54GYnWAnB+4CUOZZrRkBAnNr4b6m1fnpt9IIwxxkfFBIuQqqbyd1Q1JSK2dRlAOgHAhbKPPq0mFaxCCdBQVfDnWXlJiSpnjDGzp5i5id0i8ub8HRG5Cejxr0oLSMoFi/bAUVZJF3Fxq4/HtSyMMWYRKCZY/BHwv0XkoIgcBP4c+EN/qzV//Wx3F0PeQrx8LiiAywLP0a9uiuy4MQtjjFkEZgwWqrpPVS/DTZk9T1UvV9W9/ldt/ukeHOH2f93KD5467ApScY5G20hpiGpJ0p2OURkJEg3NYeI9Y4yZAzMGCxH5KxGpV9UhVR0UkQYR+dxcVG6+6RpMAtCf8IZwUnEGgg3sDLoFZANaYV1QxphFqZhuqBtUtS9/x9s170b/qjR/HR9yQWJwtBsqToIYT0cucuVU0tmfLFX1jDHGN8UEi6CIjC5BFpEKoCyXJB+PjwBjyQNJDZEgxs7YBgAqqhv4nU2W/M4Ys/gUM3X234GHRCSfg+F24Ov+VWn+yrcs4iMuHxTpBHGNcrhiHUgb11/+Gq5/5RxtP2qMMXOomKyzfysizwCvAwS4H1jtd8XmncGjvPqp/8Vd4X4eGXgfcBGk4sQDUSKRCNzxlEuvbYwxi1CxOaCPAjngZtx+Fs8V8yQRuV5EdovIXhG5c5LH/0FEtns/L4hIX8Fjt4nIHu/ntiLr6Z+Dj3POiYe5Nvgk6wYfhVwOUnGGNEpFOGiBwhizqE3ZshCRs4FbgFuB48B3AFHV1xTzwiISBDYDrwc6gK0iskVVd+WPUdU/LTj+A8DF3u1G4FPARlzG2ye95/ae3OnNolR89GYkPQCZYUAZzEWJhW2qrDFmcZuuZfE8rhXxJlV9tar+Ey4vVLEuBfaq6n4vXcg9wE3THH8r8G3v9nXAg6p6wgsQDwLXn8R7z76CYBFL94/eH8hasDDGLH7TBYubcd1PPxORr4jIa3FjFsVaCRwquN/hlb2MiKwG2oCHT+a5InKHiGwTkW3d3d0nUbVT4K3W7tAlVGYHRu/3ZyPEwrajmzFmcZvyKqeqP1DVdwDnAD8H/hRYJiJfEpFri3jtyQKLTlIGrrvre6qab7kU9VxVvUtVN6rqxubm5iKqdBpScbIqHNMGqnKDo3mhXLCwloUxZnErJt1HXFW/papvBFqA7cDLBqsn0QGsKrjfAnROcewtjHVBnexz50Q6OUicCgakhlodJDviWhYDOW+A2xhjFrGT6j/xxhC+rKrXFHH4VmCtiLR5Kc1vAbZMPEhE1gENwGMFxQ8A13qpRRqAa72ykhmJDxInRiZST53ESSYGAEho1LqhjDGLXjGL8k6JqmZE5P24i3wQuFtVd4rIZ4BtqpoPHLcC96iqFjz3hIh8FhdwAD6jqif8qmsx0sMDJDSKVDRQnxoiFR+gCkgQs5aFMWbR8y1YAKjqfcB9E8r+YsL9T0/x3LuBu32r3EnKJoeIEyNU3UT1QJKuoeMAxIkRtWBhjFnkrP+kWGmXNDBU7bZIzfZ1AJBQa1kYYxY/CxZFCqbjxDVGpKYJgMCAG2+PY+ssjDGLnwWLIgXScRJEidUuASA02IEiJLF1FsaYxc+uckUKZRPENUZVvQsWkUQnuVAFSsC6oYwxi54FiyKFMgniVFDXuAyAysQRMsFKAOuGMsYsehYsiqFKODvMsMSoaVgKQFDTjEQbAAsWxpjFz4JFMTJJAuRISQXRqvrR4pfOeCOAjVkYYxY9u8oVw8swmwpWjNu34muJ/0EwINRVhEtVM2OMmRO+LspbNLwMs5mQG6PYHjiPw+ka7n0hySffuJ6amAULY8ziZsGiGF7LIj+gPfLOH7Ht2SN8qqmK91y+poQVM8aYuWHBohhesMh6LYtN7U1sam8qZY2MMWZO2ZhFMUYGAciGq0tcEWOMKQ0LFsXwWha5cGWJK2KMMaVhwaIYXrDQcFWJK2KMMaVhwaIY3mwoItYNZYwpTxYsiuG1LCRqwcIYU54sWBQjFSeHEIxUlLomxhhTEhYsipFOkNQIsYjNNDbGlCcLFsVIDzNM1HJAGWPKll39ipBLDzNMhFjIsssaY8qTBYsiZFMJRjRsqciNMWXLgkURciPDtn2qMaas2dWvCDo6ZmEtC2NMebJgUQRND5O0bihjTBmzYFGMdL4byoKFMaY8WbAoRsbGLIwx5c2ufkWQTJIRa1kYY8qYBYsiSCbJsNo6C2NM+bJgUYRANkmSCBUR+3MZY8qTr1c/EbleRHaLyF4RuXOKY94uIrtEZKeI/EdBeVZEtns/W/ys50yC2RGSRIhay8IYU6Z8y4wnIkFgM/B6oAPYKiJbVHVXwTFrgY8BV6hqr4gs+5j+FQAACvZJREFULXiJYVW9yK/6FS2bJqAZl0jQxiyMMWXKz5bFpcBeVd2vqingHuCmCcf8AbBZVXsBVLXLx/qcmvQwgMsNZbOhjDFlys+r30rgUMH9Dq+s0NnA2SLyKxF5XESuL3gsJiLbvPK3+FjP6WWSADYbyhhT1vzcoEEmKdNJ3n8tcDXQAjwiIuerah/QqqqdItIOPCwiO1R137g3ELkDuAOgtbV1tuvveC2LlEQJB61lYYwpT35e/TqAVQX3W4DOSY65V1XTqvoisBsXPFDVTu/3fuDnwMUT30BV71LVjaq6sbm5efbPAEaDRX1trT+vb4wxC4CfwWIrsFZE2kQkAtwCTJzV9EPgNQAisgTXLbVfRBpEJFpQfgWwixJIjyQAWLN8SSne3hhj5gXfuqFUNSMi7wceAILA3aq6U0Q+A2xT1S3eY9eKyC4gC3xEVY+LyOXAl0Ukhwtony+cRTVXPvHDHaxL7eZdwNqVPrVcjDFmAfB1U2lVvQ+4b0LZXxTcVuDPvJ/CYx4FLvCzbjNJZ3P8++MHeXXgAO+KwLoWCxbGmPJlI7ZT6B9OA1DBCAC1NTWlrI4xxpSUBYsp9CVcsHjDuQ2uIFxRwtoYY0xpWbCYQl8iBcA5S8KuwIKFMaaMWbCYQr5lURN0vwlZsDDGlC8LFlPo9VoWleIFi3CshLUxxpjSsmAxhdEB7oALGtayMMaUMwsWU+hNpAgGhKimIBCGoK+zjI0xZl6zYDGFvkSa+oowkkna4LYxpuxZsJhC33Causqwyw0VsvEKY0x5s2Axhb5EivoKL1jY4LYxpsxZsJhCXyJNQ2UEMsMQrix1dYwxpqQsWEyhL5HvhkpaN5QxpuxZsJhCXyLlWhYjgzbAbYwpexYsJpHK5IinsiwLJaBjK5yxodRVMsaYkrJgMYm+YbcQ74LBX0AuDa/47RLXyBhjSstWmk1iqOsAu6K3E94dgqa1sOKiUlfJGGNKyloWk0gffoZKGYFACDb9IYiUukrGGFNS1rKYRKavA4AXf+tBzj57XYlrY4wxpWcti0lo/2EyGqC2uaXUVTHGmHnBgsUkQoOdHKOB+mpbX2GMMWDBYlLR4aN00UQsHCx1VYwxZl6wYDGJqpFjnAg1l7oaxhgzb1iwmEiV+nQXA5Glpa6JMcbMGxYsJhruJaIphmPLS10TY4yZNyxYTNTvps2mKleUuCLGGDN/WLCYaOAwANmaM0pcEWOMmT8sWEyQPn4AgEC9rbEwxpg8W8E9QbrzGQa1mliDtSyMMSbPWhYTyNGn2ZlbQ0NVtNRVMcaYecOCRaFMiuiJ3ezUNhqrIqWujTHGzBu+BgsRuV5EdovIXhG5c4pj3i4iu0Rkp4j8R0H5bSKyx/u5zc96jup+nkAuzc7cahqrwnPylsYYsxD4NmYhIkFgM/B6oAPYKiJbVHVXwTFrgY8BV6hqr4gs9cobgU8BGwEFnvSe2+tXfQFyndsJALtop7nG8kIZY0yenwPclwJ7VXU/gIjcA9wE7Co45g+AzfkgoKpdXvl1wIOqesJ77oPA9cC3Z7uS/ceP0bf5tQDU5foJaYx33ng1dRXWsjDGmDw/g8VK4FDB/Q5g04RjzgYQkV8BQeDTqnr/FM9dOfENROQO4A6A1tbWU6qkhML0VLYD0AOMrNjEe65oP6XXMsaYxcrPYDHZ9nI6yfuvBa4GWoBHROT8Ip+Lqt4F3AWwcePGlz1ejNq6Ri758JZTeaoxxpQNPwe4O4BVBfdbgM5JjrlXVdOq+iKwGxc8inmuMcaYOeJnsNgKrBWRNhGJALcAE7/C/xB4DYCILMF1S+0HHgCuFZEGEWkArvXKjDHGlIBv3VCqmhGR9+Mu8kHgblXdKSKfAbap6hbGgsIuIAt8RFWPA4jIZ3EBB+Az+cFuY4wxc09UT6mrf97ZuHGjbtu2rdTVMMaYBUVEnlTVjTMdZyu4jTHGzMiChTHGmBlZsDDGGDMjCxbGGGNmtGgGuEWkGzhwGi+xBLeIu5zYOZcHO+fycKrnvFpVm2c6aNEEi9MlItuKmRGwmNg5lwc75/Lg9zlbN5QxxpgZWbAwxhgzIwsWY+4qdQVKwM65PNg5lwdfz9nGLIwxxszIWhbGGGNmZMHCGGPMjMo+WIjI9SKyW0T2isidpa6PX0TkJRHZISLbRWSbV9YoIg+KyB7vd0Op63m6RORuEekSkWcLyiY9T3G+6H32z4jIhtLV/NRNcc6fFpHD3ue9XURuLHjsY9457xaR60pT61MnIqtE5Gci8pyI7BSRD3nli/1znuq85+azVtWy/cGlTt8HtAMR4Glgfanr5dO5vgQsmVD2t8Cd3u07gb8pdT1n4TyvBDYAz850nsCNwI9xOzNeBjxR6vrP4jl/GvjwJMeu9/6dR4E2799/sNTncJLnuwLY4N2uAV7wzmuxf85TnfecfNbl3rK4FNirqvtVNQXcA9xU4jrNpZuAr3u3vw68pYR1mRWq+ktg4t4nU53nTcA31HkcqBeRFXNT09kzxTlP5SbgHlUdUbc75V7c/4MFQ1WPqOpvvNuDwHPAShb/5zzVeU9lVj/rcg8WK4FDBfc7mP6Pv5Ap8BMReVJE7vDKlqnqEXD/EIGlJaudv6Y6z8X++b/f63a5u6CLcVGds4isAS4GnqCMPucJ5w1z8FmXe7CQScoW61ziK1R1A3AD8D4RubLUFZoHFvPn/yXgTOAi4Ajwd175ojlnEakG/gv4n6o6MN2hk5QtyHOGSc97Tj7rcg8WHcCqgvstQGeJ6uIrVe30fncBP8A1R4/lm+Pe767S1dBXU53nov38VfWYqmZVNQd8hbHuh0VxziISxl0wv6Wq3/eKF/3nPNl5z9VnXe7BYiuwVkTaRCQC3AJsKXGdZp2IVIlITf42cC3wLO5cb/MOuw24tzQ19N1U57kFeLc3W+YyoD/fjbHQTeiTfyvu8wZ3zreISFRE2oC1wK/nun6nQ0QE+BrwnKr+fcFDi/pznuq85+yzLvUIf6l/cDMlXsDNFPh4qevj0zm242ZFPA3szJ8n0AQ8BOzxfjeWuq6zcK7fxjXF07hvVu+d6jxxzfTN3me/A9hY6vrP4jl/0zunZ7yLxoqC4z/unfNu4IZS1/8UzvfVuO6UZ4Dt3s+NZfA5T3Xec/JZW7oPY4wxMyr3bihjjDFFsGBhjDFmRhYsjDHGzMiChTHGmBlZsDDGGDMjCxbGnAQRyRZk99w+m5mKRWRNYeZYY+aTUKkrYMwCM6yqF5W6EsbMNWtZGDMLvP1C/kZEfu39nOWVrxaRh7wkbw+JSKtXvkxEfiAiT3s/l3svFRSRr3j7FfxERCpKdlLGFLBgYczJqZjQDfWOgscGVPVS4J+B/+OV/TMuPfYrgG8BX/TKvwj8QlUvxO1FsdMrXwtsVtXzgD7gZp/Px5ii2ApuY06CiAypavUk5S8B16jqfi/Z21FVbRKRHlz6hbRXfkRVl4hIN9CiqiMFr7EGeFBV13r3/xwIq+rn/D8zY6ZnLQtjZo9OcXuqYyYzUnA7i40rmnnCgoUxs+cdBb8f824/istmDPC7wP/zbj8E/DGAiARFpHauKmnMqbBvLcacnAoR2V5w/35VzU+fjYrIE7gvYbd6ZR8E7haRjwDdwO1e+YeAu0TkvbgWxB/jMscaMy/ZmIUxs8Abs9ioqj2lrosxfrBuKGOMMTOyloUxxpgZWcvCGGPMjCxYGGOMmZEFC2OMMTOyYGGMMWZGFiyMMcbM6P8Das7/lUHh03EAAAAASUVORK5CYII=\n",
            "text/plain": [
              "<matplotlib.figure.Figure at 0x14cada20240>"
            ]
          },
          "metadata": {
            "tags": []
          }
        },
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEWCAYAAACXGLsWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzs3Xd8m+W5+P/PJcmyvHfixE5iZ5NBQhZhQ9i0hUKBktIeRltO29NSSnv6o+O0lHG+nJ4O2tPJYbQ9LaSUQqGsMAMhCZCEDEicaSfetryHZMuS7t8fz2NbXonjWLYTX+/XSy9Lz9ItBZ5L97puMcaglFJKHYljtAuglFJq7NNgoZRS6qg0WCillDoqDRZKKaWOSoOFUkqpo9JgoZRS6qg0WCg1RCKSJyJGRFyDOPZmEXlnJMqlVDRosFDjgogcEpGAiGT22r7dvuHnjU7Jji3oKDVaNFio8aQIWN35QkQWAnGjVxylThwaLNR48n/Av0S8vgn4U+QBIpIiIn8SEa+IHBaR74uIw97nFJGfiEiNiBQCH+vn3EdEpEJEykTkPhFxHk+BRSRWRB4UkXL78aCIxNr7MkXkeRFpEJE6EVkfUdb/zy5Ds4jsFZELj6ccSmmwUOPJu0CyiJxi38Q/Dfy51zH/A6QA04HzsILLLfa+LwIfB04DlgHX9jr3j0AQmGkfcwnwheMs8/eAlcBiYBGwAvi+ve+bQCmQBUwEvgsYEZkDfBVYboxJAi4FDh1nOdQ4p8FCjTedtYuLgT1AWeeOiADyHWNMszHmEPBT4HP2IdcDDxpjSowxdcD/izh3InA5cIcxptUYUw38HLjhOMt7I3CPMabaGOMFfhRRng5gEjDNGNNhjFlvrGRvISAWmCciMcaYQ8aYg8dZDjXOabBQ483/AZ8BbqZXExSQCbiBwxHbDgM59vPJQEmvfZ2mATFAhd0s1AD8HphwnOWd3E95JtvP/xs4ALwiIoUicheAMeYAcAdwN1AtImtEZDJKHQcNFmpcMcYcxurovgJ4utfuGqxf69Mitk2lu/ZRAUzpta9TCdAOZBpjUu1HsjFm/nEWubyf8pTbn6XZGPNNY8x04BPAnZ19E8aYx40xZ9vnGuC/jrMcapzTYKHGo88Dq4wxrZEbjTEh4EngfhFJEpFpwJ1092s8CdwuIrkikgbcFXFuBfAK8FMRSRYRh4jMEJHzjqFcsSLiiXg4gCeA74tIlj3s9wed5RGRj4vITBERoAmr+SkkInNEZJXdEd4G+O19Sg2ZBgs17hhjDhpjtgyw+2tAK1AIvAM8Djxq7/tfYC2wA/iAvjWTf8FqxtoN1ANPYfUpDFYL1o2987EKuA/YAuwEPrTf9z77+FnAa/Z5m4DfGGPWYfVXPIBVU6rEagr77jGUQ6k+RBc/UkopdTRas1BKKXVUGiyUUkodlQYLpZRSR6XBQiml1FGdNFkuMzMzTV5e3mgXQymlTihbt26tMcZkHe24kyZY5OXlsWXLQKMhlVJK9UdEDh/9KG2GUkopNQgaLJRSSh2VBgullFJHddL0WSil1LHo6OigtLSUtra20S7KiPB4POTm5hITEzOk8zVYKKXGpdLSUpKSksjLy8PKxXjyMsZQW1tLaWkp+fn5Q7qGNkMppcaltrY2MjIyTvpAASAiZGRkHFctSoOFUmrcGg+BotPxftZxHyxa2oP87NV9bC9pGO2iKKXUmDXug0VHMMwvX9/PtuL60S6KUmocqa2tZfHixSxevJjs7GxycnK6XgcCgUFd45ZbbmHv3r1RLqklqh3cInIZ8AvACTxsjHmg1/6fAxfYL+OBCcaYVHvfTcD37X33GWP+GI0yxrmdAPgCupCYUmrkZGRksH37dgDuvvtuEhMT+da3vtXjGGMMxhgcjv5/1z/22GNRL2enqNUsRMQJ/Bq4HJgHrBaReZHHGGO+YYxZbIxZDPwP9spjIpIO/BA4HVgB/NBexnLYxbocOAT8GiyUUmPAgQMHWLBgAV/60pdYsmQJFRUV3HbbbSxbtoz58+dzzz33dB179tlns337doLBIKmpqdx1110sWrSIM844g+rq6mEtVzRrFiuAA8aYQgARWQNchbXkZH9WYwUIgEuBV40xdfa5rwKXYa1HPKxEhAS3S2sWSo1jP/rnLnaXNw3rNedNTuaHn5g/pHN3797NY489xu9+9zsAHnjgAdLT0wkGg1xwwQVce+21zJvX47c3jY2NnHfeeTzwwAPceeedPProo9x11139XX5IotlnkQOURLwutbf1ISLTgHzgjWM9dzjEuZ34O4LRurxSSh2TGTNmsHz58q7XTzzxBEuWLGHJkiUUFBSwe3ff39xxcXFcfvnlACxdupRDhw4Na5miWbPob5zWQAt+3wA8ZYzp/Hk/qHNF5DbgNoCpU6cOpYwAxLudWrNQahwbag0gWhISErqe79+/n1/84he8//77pKam8tnPfrbf+RJut7vrudPpJBgc3h/A0axZlAJTIl7nAuUDHHsDPZuYBnWuMeYhY8wyY8yyrKyjpmMfUJzbRWu7Bgul1NjT1NREUlISycnJVFRUsHbt2lEpRzRrFpuBWSKSD5RhBYTP9D5IROYAacCmiM1rgf+M6NS+BPhOtAoar81QSqkxasmSJcybN48FCxYwffp0zjrrrFEphxgzUMvQMFxc5ArgQayhs48aY+4XkXuALcaY5+xj7gY8xpi7ep17K/Bd++X9xpgjjhFbtmyZGeriR5975D1a2oM885XR+UdQSo28goICTjnllNEuxojq7zOLyFZjzLKjnRvVeRbGmBeBF3tt+0Gv13cPcO6jwKNRK1yEeLcTb3P7SLyVUkqdkMb9DG6AeB06q5RSR6TBAmvorC+gfRZKKTUQDRZAfIwOnVVKqSPRYEHnaKgQ0ezsV0qpE5kGCyA+1oUx0NYRHu2iKKXUmKTBAqtmAWi/hVJqxJx//vl9Jtg9+OCDfOUrXxnwnMTExGgXa0AaLNoaOX3/z1gqe7XfQik1YlavXs2aNWt6bFuzZg2rV68epRIdmQYLE2ZO4R9Z5CjUYKGUGjHXXnstzz//PO3t1hyvQ4cOUV5ezuLFi7nwwgtZsmQJCxcu5Nlnnx3lklqiOinvhBCbDEASPm2GUmq8eukuqPxweK+ZvRAuf2DA3RkZGaxYsYKXX36Zq666ijVr1vDpT3+auLg4nnnmGZKTk6mpqWHlypVceeWVo75euNYsHE5CMYkki08XQFJKjajIpqjOJihjDN/97nc59dRTueiiiygrK6OqqmqUS6o1CwBC7iSS/D5thlJqvDpCDSCaPvnJT3LnnXfywQcf4Pf7WbJkCX/4wx/wer1s3bqVmJgY8vLy+k1JPtK0ZgGY2BSSxYevQ4OFUmrkJCYmcv7553Prrbd2dWw3NjYyYcIEYmJiePPNNzl8+PAol9KiwQLAk0wSPvzaZ6GUGmGrV69mx44d3HDDDQDceOONbNmyhWXLlvGXv/yFuXPnjnIJLdoMBTg8KSRLtS6ApJQacVdffXWP7BGZmZls2rSp32NbWlpGqlh9aM0CcMSlkIQfvzZDKaVUvzRYAM74VJKlVYfOKqXUADRYAMQmkyR+fO0aLJQaT8ZT8tDj/awaLAA8KcQQItjeOtolUUqNEI/HQ21t7bgIGMYYamtr8Xg8Q76GdnADeKxZ3G3N9aNcEKXUSMnNzaW0tBSv1zvaRRkRHo+H3NzcIZ+vwQLAkwJAU0PtKBdEKTVSYmJiyM/PH+1inDCi2gwlIpeJyF4ROSAidw1wzPUisltEdonI4xHbQyKy3X48F81ydgaL1qa6qL6NUkqdqKJWsxARJ/Br4GKgFNgsIs8ZY3ZHHDML+A5wljGmXkQmRFzCb4xZHK3y9RBrBQtXoIlGXwcp8TEj8rZKKXWiiGbNYgVwwBhTaIwJAGuAq3od80Xg18aYegBjTHUUyzMwu2aRhI+Set+oFEEppcayaAaLHKAk4nWpvS3SbGC2iGwQkXdF5LKIfR4R2WJv/2R/byAit9nHbDmuTiq7gztZfJTUabBQSqneotnB3V/y9d5j1FzALOB8IBdYLyILjDENwFRjTLmITAfeEJEPjTEHe1zMmIeAhwCWLVs29PFvETWLYg0WSinVRzRrFqXAlIjXuUB5P8c8a4zpMMYUAXuxggfGmHL7byGwDjgtaiV1ecARQ2ZMmzZDKaVUP6IZLDYDs0QkX0TcwA1A71FN/wAuABCRTKxmqUIRSROR2IjtZwG7iRYR8KQwKTZASZ0/am+jlFInqqg1QxljgiLyVWAt4AQeNcbsEpF7gC3GmOfsfZeIyG4gBPy7MaZWRM4Efi8iYayA9kDkKKqoiEsju61FaxZKKdWPqE7KM8a8CLzYa9sPIp4b4E77EXnMRmBhNMvWR1oek8sOU9nchjFm1Ne7VUqpsURzQ3VKn05GoAxfIEhTmyYUVEqpSBosOqVPxx1qJYMmKhq130IppSJpsOiUPh2AaVJFRePoL46ulFJjiQaLThHBolKDhVJK9aDBolPqVIw4yHNozUIppXrTYNHJ5UZScpkT46VS+yyUUqoHDRaR0qcz3ak1C6WU6k2DRaT0GeSGy6ho0JqFUkpF0mARKWsO8eFWgk2Vo10SpZQaUzRYRMqcDcCkjmIa/R2jXBillBo7NFhEypoDwEwpo6CiaZQLo5RSY4cGi0hJkwi7E5kpZewoaRjt0iil1JihwSKSCI6sOSxwV7KjVIOFUkp10mDRW+YcZko524s1WCilVCcNFr1NmEtKqJbcpm1UN+l8C6WUAg0WfS2+EX/KDP7o/i/4y/VQunW0S6SUUqNOg0VvCZnE3Poir8oZpFa/C1sfHe0SKaXUqNNg0Q9XSjYbF97HlvBswpW7Rrs4Sik16jRYDODyhZPYFZpCsGIXj60/MNrFUUqpUaXBYgBnzsigKm4GbgKsfXvjaBdHKaVGVVSDhYhcJiJ7ReSAiNw1wDHXi8huEdklIo9HbL9JRPbbj5uiWc7+xDgdfPfmawFIb92vI6OUUuNa1IKFiDiBXwOXA/OA1SIyr9cxs4DvAGcZY+YDd9jb04EfAqcDK4AfikhatMo6EMeEuRhxMtdRzHad0a2UGseiWbNYARwwxhQaYwLAGuCqXsd8Efi1MaYewBhTbW+/FHjVGFNn73sVuCyKZe1fjAeTMZMFjmKd0a2UGteiGSxygJKI16X2tkizgdkiskFE3hWRy47hXETkNhHZIiJbvF7vMBa9myN3Ocud+9hRXBuV6yul1IkgmsFC+tlmer12AbOA84HVwMMikjrIczHGPGSMWWaMWZaVlXWcxR3A9PNJMi0ESnfSEQpH5z2UUmqMi2awKAWmRLzOBcr7OeZZY0yHMaYI2IsVPAZz7sjIPxeA04I7eK+wblSKoJRSoy2awWIzMEtE8kXEDdwAPNfrmH8AFwCISCZWs1QhsBa4RETS7I7tS+xtIy9pIuHMuVzk2sb7H2jqD6XU+BS1YGGMCQJfxbrJFwBPGmN2icg9InKlfdhaoFZEdgNvAv9ujKk1xtQB92IFnM3APfa2UeGYeSHLZQ93FlzPn555nie3lNDSHsQfCLHpoPZlKKVOfmJMn66AE9KyZcvMli1bonPx9mY2v/YkyzffyQMdN/C70JWcPyeLmVmJPPxOEW9+63zyMxOi895KKRVFIrLVGLPsaMfpDO7BiE1i6eW34k+dw7dmV/GNi2azbq+XP717GID1+6MzEksppcYKDRaD5HAIcXMuwFXyLreunESyx0UgGCbJ42L9/prRLp5SSkWVBotjkX8eBP0kPfcFfr28hjtXJvKziWvZcbCMoA6rVUqdxFyjXYATSt7ZkDQJ9r/CObn1nDN1JVQ9QrLZwEsfzOcTy2eOdgmVUioqtGZxLDzJcGcBnPlVKNsKB1/HxKVxumMPrz3zCM/tGJ2pIEopFW0aLI6VCEw7C8IdUPkhctrnMK44zkss5bENRaNdOqWUigoNFkMx5XS6MpLkn4dkL2BlXCnbihsoqfONatGUUioaNFgMRVwqZC8AccCUFTBpMdn+fQhhbYpSSp2UNFgM1dJbYOnNVj/G5MU4Ai18bLKPFz+sGO2SKaXUsNPRUEO1/PPdzyctAuCT2TV84YNEyhv8TE6NG6WCKaXU8NOaxXDImguuOJa5DgLwWkHVKBdIKaWGlwaL4eCMgamnk1r5LtMzE3h1twYLpdTJRYPFcMk/D6p3cdUsF5sO1uJtbh/tEiml1LDRYDFcpp8HwA2ZhwiGDWveLx7lAiml1PDRYDFcJi0GTwoTi19g1cxk/vzeYV2GVSl10tBgMVwcTlhyE+x9kf9puoO6plbu+Ot2wq/eTXDbE/gCwdEuoVJKDZkGi+F0yb1w9UMkNB3g90sOs25nIWbDLyl4+fdc97tNfY8vehvWfm/ky6mUUsdIg8VwW3gdZM5mVf1TfG9BHU5CpPpLKahooj0Y6j7OGBqfvhM2/YpAXenolVcppQZBg8Vwczhg5VegYgc3tPwfAJMdtbhMB4drI/JGHXiNlOb9AGze8MpolFQppQYtqsFCRC4Tkb0ickBE7upn/80i4hWR7fbjCxH7QhHbn4tmOYfd4hshYxaOyp0gDpyEyRUvB6tbug7xvfM7Kk0a7cZF6c63CIVPjrXQlVInp6gFCxFxAr8GLgfmAatFZF4/h/7VGLPYfjwcsd0fsf3KaJUzKlxuuOLH1vPZlwMwTaoorGm1toWCuEo38kpoGQ0pp5DfXsB7RbWjVFillDq6aNYsVgAHjDGFxpgAsAa4KorvN7bMWAW3roXLHwDg1LhaDla3sLeyGVP1Ie6Qj9r0JaTNOYtTpZD1ezQBoVJq7IpmsMgBSiJel9rbevuUiOwUkadEZErEdo+IbBGRd0Xkk/29gYjcZh+zxev1DmPRh8nUlZAyBWKTme+p4eVdlVz64NtsfPMFADLmnYd76nI80kFxwZZRLqxSSg1sUMFCRGaISKz9/HwRuV1EUo92Wj/bejfM/xPIM8acCrwG/DFi31RjzDLgM8CDIjKjz8WMecgYs8wYsywrK2swH2XkiUD6dPIdVfgC1mioxr3rKTWZLF+0ECYuACCmbh+VjW2jWVKllBrQYGsWfwdCIjITeATIBx4/yjmlQGRNIRfosTKQMabWGNOZROl/gaUR+8rtv4XAOuC0QZZ17MmYQU7HIRxiuHEOnC672OU8hbnZSZA+HSNOZjrKeHvfGKwdKaUUgw8WYWNMELgaeNAY8w1g0lHO2QzMEpF8EXEDNwA9RjWJSOQ1rgQK7O1pETWZTOAsYPcgyzr2TL+A+LYqNl8X4h7v7cQQ4kD+jYiI1RmeMYN5rgreK6ob7ZIqpVS/BhssOkRkNXAT8Ly9LeZIJ9jB5avAWqwg8KQxZpeI3CMinaObbheRXSKyA7gduNnefgqwxd7+JvCAMebEDRZzLgdxkLH2azjbGjh05d/59NXXdO2WzNnMi6lg86EhBoumCvivPCjfNjzlVUqpXga7Ut4twJeA+40xRSKSD/z5aCcZY14EXuy17QcRz78DfKef8zYCCwdZtrEvIROmnQWH1sNpn+XUpWf23J81lwl7X6KiuYmqpjYmJnuO7frePeCvt4LF5BO3tU4pNXYNqmZhjNltjLndGPOEiKQBScaYB6JctpPLwmvBGQtn39l3X9YcHCZEnlTy/lCaolrtvo4mHX6rlIqOwY6GWiciySKSDuwAHhORn0W3aCeZJTfBnQWQ0WdQF2TOBmBBTDlv7q0+9mu32Oc0lx/5OKWUGqLB9lmkGGOagGuAx4wxS4GLolesk5AIJGT0vy9rLsTEc+OEwzz9QRnv7K85pku3N1YC4KspgY2/gvUax5VSw2uwwcJlj1y6nu4ObjVcYjww+zKW+tYzOyuO/3j2I4wZfK6o+mora21D1WHY8gi883MIBroP2LcWKnYMd6mVUuPIYIPFPVijmg4aYzaLyHRgf/SKNQ7Nvxrx1fDvc7wU1bRS1JlHahCCTVYzVEp7Bab+ELQ3weENUL0HjIGnb4O3/ztKBVdKjQeD7eD+mzHmVGPMl+3XhcaYT0W3aOPMrIvBnciZLa8CsG6v1Wn90ocVbCuuP+KpDp/VbJUg7Yixl3J95kvwm9Nh19PQ1gDNlVD8Hjx6GXT4o/c5lFInpcF2cOeKyDMiUi0iVSLydxHJjXbhxpWYOFj8GRL2PcPKDB/r9nlZu6uSL//lA67+zUYeeGnPgKd6ArW04+56HU7NgxarHyO8/ufWxuZKKHwTijdB7cFofhKl1ElosM1Qj2HNvp6MlQzwn/Y2NZzO+jogfDP+RTYeqOHOv25n/uRkVs2dwBPvF/dY86K1PWitvGcMSaEGKj3WKKuAcfKj8K08HryAfeEcHFUfAhBursQ0FFsnN5b0fmellDqiwQaLLGPMY8aYoP34AzBGM/edwFJyYcGnWNL0OlcsmMh5szN59Awvt04+TIu/jYKKpq5DP/Xbjfzon7vxN9USQ5DGVGupkCIziT9Wz6Rg2X0w44Ku4x3hDkp2vwvA7559k72VzUMrY1sjvPoDCLYf/Vil1EljsMGiRkQ+KyJO+/FZQFfriYb8c3C2N/LLVW5+4/wpE1+4hbM3fp7HYn7MxgNWP0ZpvY89lc1sOFBDedlhAEz2qQDUxk9nemYC3/vYKcxeYS28ZOKtIbuT24sAcDSV8vJHlUMrX+E62PALKNWU6kqNJ4MNFrdiDZutBCqAa7FSgKjhlrvC+vvKf8DeF+HCH8IF3+Nc54e0f/QsAJsOWnH6cK2PwiIrACRPng0zVrHowtX8/ctn4olxQt7ZEJ+BLFoNgEuszu9c8Q49D1Vbo/XXd2xzQZRSJ7ZB5YYyxhRjZYXtIiJ3AA9Go1DjWsZM8KRandFJk+CsO8CEqdr0ONdV/4pdG09hU8nkrsPXb9/NxcDk3Kmw4hkSgITOnXGp8O1CqD8Mm37VdU6eq5YPiusJhsK4nMe4/pUdLLbu3s+0qe1kJsYe18dVSp0YjmelvH6SHKnj5nBA7nLr+fxrrNdOF65rfodxuJj/ymqm7Pot/za1mCniZVnbRoLiIjZ9ysDXTMruetogKUyPqccXCLGrvGngcwbSZp3z1rYCntpaeuznK6VOSMcTLPpbCU8NhymnW38XdKcxz5i9Es/XN7M95UK+4VjDv1ffxeux3+RK5yaqT7sdPCkDX88VC3FpAKTOPRdPoI442oaWtNCuWWRIIxUNOl9DqfHieILF4PNRqGOz/PNwzcOQs7TH5rTUVBZ//SlC1/0J85knKU4/mw/di5l0RZ8s730l2etMTbXSo5+T0cwbe4aQtLDdqllkSDPl0VoG9uCbUPlRdK6tlBqSI/ZZiEgz/QcFAeKiUiIF8elw6nX973M4cM6/CoCZsy8d/DUTJ1qT8XKWAPBQ69e5u/Fm6luXkJbgPvK5VbshebLVB9JZs6ApemuG//N2K7nijX+LzvWVUsfsiDULY0ySMSa5n0eSMWawCyepsWDSIpi82OoPufgegp50TpN9vH602oUx8MglVnJC6OqzSJcmKhqj0AxljJVy3TvwjHWl1Mg7nmYodSK56G645SVwOOGsr+OcfCozXV5e2XWU+Rb+egg0Q42dN7Krz6KJmpaANYt8OAVaINgGDcXQ3jK811ZKDZkGi/FCxAoUnS/T8slzVLPpYC3BUHjg81qqrL/1h6y/7VawSKcZB2GqGod5Jnfnqn8ANXuH99pKqSHTYDFepeeTEGqE9iZ2ljUOfFxnsGg4bDURtTUSxIlDDKm0UD7cTVGtEZP9vBoslBorohosROQyEdkrIgdE5K5+9t8sIl4R2W4/vhCx7yYR2W8/bopmOceltDwAljj2U7zlpYGP61yyNdACrV5MezMlYSstWIZEoZM7smZRXTC811ZKDVnUgoWIOIFfA5cD84DVIjKvn0P/aoxZbD8ets9NB34InA6sAH4oImnRKuu4lJYPwC9jf8PHd34NQh09dof2v07HzxdDXWHXtqbD2xETptBYw3AzpTEKNQs7WMSla81CqTEkmjWLFcABe6GkALAGuGqQ514KvGqMqTPG1AOvApdFqZzjU7oVLFJMMy6CVJYf7rH7wManiWkson3va13b1jz3AgCHjDUjPNfti17NIu8srVkoNYZEM1jkAJELJ5Ta23r7lIjsFJGnRKQzZ8WgzhWR20Rki4hs8Xq9vXerI4lNgvjMrpdPvv5ej92Oql0AuCq30SCpAGT7rF/6hyQH43DxNeff8RcfeW3vto4QZ/y/13m9oGpw5WqtgdgUa0JiY7E1GkspNeqiGSz6SwfSe4LfP4E8Y8ypwGvAH4/hXIwxDxljlhljlmVl6fIaxyxjJjitRIB79+/hQLU9VNUYJvoOAOA0QQpDmfjdGSxyWYsnLVq4CLnuj2RRz8Xex2jrGHj4bGVjGxWNbYPPQ9XqhYRMyF4IwJPPv3jE6yulRkY0g0UpEJndLhcojzzAGFNrjOkce/m/wNLBnquGwcd/Bp99CoBsqedvW63KXLO3mGS6F0dqdmXgmTCdaabMOm3FKXDKx2nNOo1JeNlW3DDgW9T7AgA0+DoGPKaHVi8kZBGaaK3PsXf7Ru7/585j/mhKqeEVzWCxGZglIvki4gZuwFqatYuITIp4eSXQ2Ui9FrhERNLsju1L7G1qOE2cD3nngCuOFelt/GNbGaGwoaTgfQCa3ROswyZPQ2as6jrNk2iNNUjKzidHanivaOB1sDqDRIMdNI6qtYaqcBL/+nQxlSaNWz1vcu/O89m56ZWhfEKl1DCJWrAwxgSBr2Ld5AuAJ40xu0TkHhHpXBvjdhHZJSI7gNuBm+1z64B7sQLOZuAee5sabiKQPJmFya1UNbXz9n4vTYe2AxCz0Mp6O3vGDFjwqe5z7Ay3sRnTSJcWXvrgIA+vL8SYvmnEOmsW9YMOFl7erXLwbmEtjSlzyQlZtZmSjzYM9RMqpYZBVOdZGGNeNMbMNsbMMMbcb2/7gTHmOfv5d4wx840xi4wxFxhj9kSc+6gxZqb9eCya5Rz3kieTTS2TUjw88dIbTD+0hsOOXDz51qp9jqSJkDWn+3hPsvU3xWopTGyr4r4XCthU2LeGUddqN0Mcrg+RAAAgAElEQVT5B9EMFQ6Br5bKYCLXLMlhzqKzunYdrG7sNxgppUaGzuBWkJyDo7mC+850cn/9t3GEA3gv/a01IsnptpqrAC57wNrmslfHs4PFX0/7kC2eL/PMu33nRXQ3Qw0iWLQ1gglTHUwgyeOyajOn3gCA8Tdy0Ku5opQaLRosFCRPguZyVr17CzGuGJ5f8jDLTj/XmuX9nVKYYq8LvvLL8MU3us9LyQXAtf3/yKSRgwXbqG/t2dx0TM1QHT4AWkwsibExMHEeXPN7wrEpJNPKm3t0eLRSo0WDhYLkHAgHkWA7qV9ey81XRayT4TrCGttJk0CcELIGtE0KV7FuX8+U5501ikZ/B+HwUZqROqwJfm0mhkRPdwZ8R1wqE2L8FNW2HsOHUkoNJw0Wqms2N5f9J2TOHPx5Tpe1KJJturOa01+7loZ3HqbcXnK1s8/CGGhqO0pTVNA6pw03yRHBAk8KGU4fdS2D7CRXSg07DRYKpq+CL70DS4aQr9FuisLh4sLYAia3FrBr/T/4/B+3AD2bn47ab2HXLNpxkxgbESziUkkVX1fg6VK2FTp0HXClRoIGCwUOhzVjWvqbOH8UmbOsju5Ji1kYtNbNTvcfpqCiiZL9O/lF4+3MSbD6Io7abxFRs+gRLDypJEsrta0Ra2f46uDhi2D748deZqXUMdNgoY7PxffCrS9DWh5OrLQc+VKBEObA5peZY4r4WNI+YPA1izbjJskT0709LpXEcEvPmkVjCZgwNFcM68dRSvVPg4U6PnGpVlOUvT4GgEc6uCC7nYpD1lDa05xFADT4B1+zSPL0rFnEh1to8HcQ6uwkb7KDhCYaVGpEaLBQw8MOFgfDVgaX6/P8JPlLAZgRtJIS1rcOsmZBTK9gkYIr3I7bBLrThnTWKHw6sV+pkaDBQg0PO1iUT/kYAOdlNDJFrHkRE1r34pQwmwpr2XigZqArdNcsjJuEXh3cAMm0UtsaIBCMaH7ya7BQaiRosFDDY8oKOOOrnPOZ74InlbjGg+Q7vTSZOFxBH3lU8OruKr74py10hML9X8OuWRDjIcYZ8Z+mxwoWKdLKT1/Zyzk/foOwNkMpNaI0WKjh4YqFS++HhAzInA3F75FimijKOBeA/477IwukkNZAaOCU5nbNwhWb0HN7V83Cx+sF1VQ1tdNSY6VTb2nw8s7+I9RWlFLDQoOFGn4zL4Rqa6W9RRdcDyv+lcUxpTw99W84BN7ZP0DaDrtmERsb13O7x0qJniKtBO0O7o56Kxstvjoe21A0/J9BKdWDBgs1/BZc2/08PR+u+DGOc7+Ju2oHl07y83avmsA7+2u46+87IdhGBzEkxLl7Xs+uWaTQne7D7bfSiiRKG3VNmmBQqWjTYKGGX+ZMmLTYep5mpxKZfzUAn0nays7SBioau2dev/hRBWs2lxAM+AhIr2Gz0LV+RrbbqnnMTI8hKdRArVg1jrZmbYZSKto0WKjoOOt2mHUJxFk3dFKnQO4KTve/g8vh4L9f7k5nXlZvBY5Am6/v7G3oChYT3X4cAp9fFA/ArqCVasT46rrnX5yoNvwC3vrv0S5FVNW1Bvj2UztoaQ+OdlHUEGiwUNGx4FNw4996phCZuhJ37V4+f/Y0nt5WxkdljQCU2UkHO9p8tBm3lZ48kjMGPClMdjUzIyuRT0yz5lrMXbQSgKTes7tPRHtfgj3Pj3YpomrTwVqe3FLKB4d1BNuJSIOFGjnp0yHUzpeXxBPjFP65oxxjTFfNItjuw997Ql6ntDxWpjXzy7M7SPzHzRCTwIRTL7Z2STNVTVYT1ftFdQQHGpo7lgVarMdJzNts/RtVNraNcknUUGiwUCMnfToAyb5izpiRydpdldT7OvB3WDmlQgE/vvDAwSKlrYxTKp8FA3xpPUw4BYBUacHb3M6eyibufugJvvR/W46/rJUfQWMpHaFwVw0oqgKt1uME9/Y+74Cj02rsFPOVTRosTkQaLNTIsYMFdYVcMm8ih2p9lLz0Mz7vfAGwahZ98kJ1SsuH+sNQtdvKkJsxA+LSrV20UN3cRl3Rh7wY+11C+17hut9t5LfrDlJS5+P632/i0XeKaA+GBl/Wv90Ej3+an67dwyd+9U5XzaVLMADb/gzhYarFBFqh/cSvWfzklb3818t7+l3oytvUxhLZR4XWLE5IUQ0WInKZiOwVkQMictcRjrtWRIyILLNf54mIX0S224/fRbOcaoQk54AzFuoKuXjeRABSCh7n0851QHefRVq8u++56fkQ7oDyD2DCXGubOwHjiCFVWqhqaidQtQeA8zKbKarx8cg7Raz9sAzX4be55/ld3PnXHRjT9yb217d28Pib27o3hMN2YPqI6nefwBg4UN3rRr7/FXj236B403F/LQAEfFYzVD/lO1FUNPrZWdpIW0eYknpfn/2TvW/zdOzduL0fjULp1PGKWrAQESfwa+ByYB6wWkTm9XNcEnA78F6vXQeNMYvtx5eiVU41ghwOSJsGdYVMTPawIi+NCcFKJomV3ykU8NGOm5zUuL7ndma1NWHIsoOFCBKfzkSXj+rmNkz9YQD+Zb6bOy6aRU1LO9Wbn+Jx93/y8OJCXviwgkfe6dVEEurg02+eyzlvf6Z7m6/GCkzA53kWgMKaXk1Enbmp6g8N9dvoZozdX2G61iE/Eb22u6rr+d7K5j77JzXvBCDYXNVnnxr7olmzWAEcMMYUGmMCwBrgqn6Ouxf4MaB10/EgfTrUWTfsz86PI17aSRI/05OCEGyjjRgm9xss8rufT4j4zRGXzkRXK9VN7biarSy3jpZKlk6zhuxOarBqDBdWP8ayKUk8u728x2VbNvwvAFNMOaGwIRw2/M8z6wAoYwKz3bXExTgp8vYMFp3pRl7e8D57KpuG9l106vBjdcQA7S1HX352lFQ3tbHoR6+w9XD/yRtf2V3VFej3VfUNFtParOHSHb7o9AGd8MOnx7hoBoscoCTidam9rYuInAZMMcb0N2YwX0S2ichbInJOf28gIreJyBYR2eL1DpBCQo0t6dOhrhAqdnJRdnfTzixPE7EEaDNuslM8fc9LyQWH3Zdhd2wDkJBJpqOZquZ2Enx2CpDmCmZPTCIx1sVSxz7aXUlIXSE/4rcUl5fT0h6kua2Drz/xAc53rLkNpSaT8gY/eyqb+ahgt/U2s5YRE2xhVoaboprust7/wm5e3LgdgNaqg6x5P/I/8yGI6Nj+6FAZp93zKge9Y6//Yk9lM43+Dt7a2///azUVxVyU72ZKehx7q3qWPxwKMydspaqX9mbaOo6h/2gQ/IEQy+57lSfeL2Z/VTN/2FDEe4W1w/oe410/PYnDpr81OrtCv4g4gJ8DN/dzXAUw1RhTKyJLgX+IyHxjTI+fcMaYh4CHAJYtW6Y/K04EWXOtppbfn0P8KZ/o2pznqsdDAGLi8MQ4+57ncELqVKsTOD69e3tCJmkcoqzeR2rYbhpqKsfpEFbmxjKv9DB1C77ChMQY5m34Jd9ztrK9eBUVjX427NhDnMf6lZyEjw9rfRz0tpBtN4vF5CyG/S8yPz3EpqpWwmHDUx+U8r/ri3gpww+tcIqngUcPHWea9Ighs/uKKwmFDXsrrTklY0nnkNcdpX1rBqGw4SeBe3FVz6J0wl3s69UM1Vyxn1SxgmIiPqqb2pmaET9sZTtc10q9r4PfrDtArMvZ1cf0/vcuZEJSPz8+1DGLZs2iFJgS8ToXiGwDSAIWAOtE5BCwEnhORJYZY9qNMbUAxpitwEFgdhTLqkbK4s/ATc9DbAoUdFcop9jBwhV7hBtI3tkwY1XPbfGZpIQbqWlpJytk5YuiuRK2PMp/BH6OS8JkzD0HLrqb4IyLWew4wOZDdby6u4ppUgnAbplBIn4O1bSw+VAdszyNGGestb44MDcpQEm9n6t/u5FvP7WTxVNSmZNo9S3kSjUFFU00H0/TUUQ/RWWNlbqkvMHPL17bzzPbSod+3WFWbqdo2Vna0GegQG2zn5lSxrTmrcyemMhBb8+Jkq2Hu4czJ4u/R7qX4VBca32HJXV+DlS3cM0SqxGjuqn9SKepYxDNYLEZmCUi+SLiBm4AnuvcaYxpNMZkGmPyjDF5wLvAlcaYLSKSZXeQIyLTgVlAYRTLqkaKMwbyz4Hp5wIGkiaBOJgstXgIEOs5QrC48n/gmt/33JaQSWywiQyaSBI/re5M6GiF1+9lmned9ZZTVwAQM2kBMxwVvLOnjPX7a7gix7phtabPxymGcm8Nmw/VMTe+GUmeDAmZAOTHtxEKGw6XlLBhym/5+7SncTRbgSapw4vTBAdOu471i/zra7ZR09L/javS291cEqg5xAOuh6iuqeXh9YU8/UHZkb7NEVXRYNUs6n0dlNb3vNnXVZUQK0E8gXounezHAKt+uo5CuzktVLGLoHEQdHpIxD/scy1K7PJkJ3s4c0YGq1dMtco1EjP7W2ug8K3ov88oi1qwMMYEga8Ca4EC4EljzC4RuUdErjzK6ecCO0VkB/AU8CVjjC6JdjKZcaH1N2MmJGYzMVSJUwyxcQlHPq83+4a+3G2NhGrIXGJt99fB8i/ADU90N1tNOAUnYXzlBfg7Qlyc3YoRJ6csPhOAHQdKqGpqZ4qzzuojibeuPSXOurE9n/YzcrzrcX74N/DVQspUxITJddSx+QhNUU9uKeHZ7eX8/NV9ffYZY7j7790DAec1vcMNrnUEDm2kuT3YNbt9LChv9BPvtpoId5T2DI4t1d2/5Razj2f/7SwafB28ao+QCrbW0kAiYU8aifj7zls5TiV1PhJjXbx8xzk8ckUiud71wAgFiy2Pwp+vgdDJnfMqqvMsjDEvGmNmG2NmGGPut7f9wBjzXD/Hnm+M2WI//7sxZr4xZpExZokx5p/RLKcaBZ3NSWl5kJJDVsDqJE5IOMZ2evuGflGK9Qu8Y9Ly7n1L/gXmXtH9euJ8AH5ynovbzp1ObrgcSZ1KYtoEAKq81TgdQnrIC8mTIT4DgDyPn99fO4Nc/14ruLU3AgamWO91RkbLEWsWa3dZtZA1m0v6zNdoDYQw7d0d3DnGOtbUHASsvFn9zQ0ZDRWNbZwxPQO3y9Hn8wa8h7pflLzHgpwUclLj+Kjc6mYMt9bRaBJwepJJcfiobRnGm3hzFZ/Y823mpQZJjXcT99Z9TFz3TWCEgkVbI4SDJ326Fp3BrUZH2jQ4999h8Y2QnEOqvxiAhMSkY7uOXbNY6tgPQMz0s6zt7iSYML/nsRkzwRHDfGcp373iFBz1RdborNhkAJLwc++V84hprbQmENo1EvHXcWm2fUNfeH339aacbr13chO7yhv7vamX1PnYVd7Ev543HadDePy94h7761oCxNHdPDVVrH6XaWL9Im8PhqkdA0kSjTFUNPiZkh7Pirx03t7n7dwB5duhwarZhaecAYetiYpXZFRQUGrVuBxtDTRJIo64ZNJd7V2pP4ZF8UaW+t7hYzFbrfKUvIf46nA5zMgEi6BdS9JgoVSUrPo+TDsDUnJxB63/0dJSko/tGnbNYprvQ1pj0pk0Y5G1fcpycPYa7OeMgaw5VsoQY6z5HhkzuoLFDy7O5TNzsH4lpk2zjo9NsZqc6qxf+sz9GLjs0TWTl4A4mOVppN7XQXk/aSxeK7Bu+quXT+WcmVY+rMigUtPaToJ0n5ckVrNTZ7AAxkRTVFNbkPhALXd9eAXXZVeyv7rFyhZc9BY8dB6zK/5JDak45l8F3gLY+Cu+V/YV8uo30NzWgSvQiN+ZjMQmk+LwU9s6fB3PpsUKsEvDO6H2IPjrEBNiWlyAOt8IBIvOteNPgnQtR6LBQo2+nCVdT3MzU4/t3IQsABwdrSTkzMfhSYSZF8Opn+7/+EmL4fAG2PGE1ZyUPh08VrA4baITyj7oPg6s2oWvFmoPAGKNkMo+1dqXkgNJk8gRawRTfwkHO/a+wtNx95G3+V5+UvdVcho/YFd59wjw2pYA8f3MR82TSuLsIcTlDaMfLCoa/cxylOLpaOCsGKsWt25vtf29QGagFK9zIiy8zpoP8+p/ADBZaiioaCa2o5H2mGSITSIJ/4Cd/UPhq7ea7ma0bIWSd7u2T/P4qR+RmoX976M1C6WiLP+87ucx/czePpK4NBD7P+NMe3T1Z5+CRTf0f/yq71sjsP7xZXDEwLSzINZu+mprsnJPOd3ds8TjM+xgcdBawMkVC7nLrWMSJkBKLqlBL06HsKufYDGxegNLzG5477ekNe1hpbOAlz+q7NpfF1GzaDDdnftTpJozp1uBs2wMBIuyej8TsdahyGgvJjctjnV7vdDYPVqrMXaS1Sw4+zIrLQuQIU18VNZIfKiZUGwqeJJJwD+sfRa+Omt+TVygDrb+oWv71NjWkWnCs2sWv3xpG43+sTn7fjhosFCjz+53ALqbeAbL4ejKPkvWnKMfnzwJbnkJPvYzuGMnTDq1qxmK9mar/T17IbjsZIadwaLuIKTPsLad+y1rrojTBck5OJtKmZGV0NWZ2ykcNhhfPQ3ubPheFcSmMCOhnY/Ku4NKjd1n4TOxNBtr2LARJ24JcV52gAS3s8cw1UJvy7GvNLfnRQJ/vIYf/uPDIbXhP/JOEV/80xayxQoWUlfI+XOy2HighlBD9zwQX/xk68mZX7ODcAo5rmYOVNaTSKsV2GOTiQu3UtsSGLaO+1CLl1pjB/zSzVZ/EzDZ3dpVs/jJ2r3c+/zuYXm/Puyaxa6iMnaWDjzQ4USnwUKNDRMXDv3czmAzmGABkDQRln/eGvEE4E4EBNoarGAxubtZjPgMaK2F2kKrfwOspqmpVuc2KbnQWMbi3BTeLazlUETCwbIGP4mmmbAnDWI8kJBBtqu1a74CWM1QKc4AAWccAadVq5LsBQBcMy1ATlpcVzOUMYarf7Ox3yG4R3TwDdxFr7Puvff59lP9Z949kjf3VDMlPZ6bT421C32A82dPoDUQoq6iiFLPLHaEZ1CbZX8nU1fCLS9CSg657la8XqtPwRGfDrFJuMN+gqEgzcO0vKrDV0NBeCr11/0drvoNXPsoABOc3RMDn9payvM7y490maELWk1qCcPcvDbWaLBQY8P5dgb7zuyyx8LutyBzkMGiN4fDaooq+wACzTD5tO598enQVGr3b8zoe25KLoTa+cZZ6bgcwtee2Nb1a3Z/dTNp0oIrwa75xGeQ4WjumgkNVjNUmitAfEIykybYn2OKtVxsYvUWclLjupqhGv0dNPo7eK/oGHMe2RlyT5VCXiuo5rkdx3bTLKppZcnUNCZS13W9M6fG4nY68NUUs601k2uC95Ew79KeJyZkMcHRTFmF9X7uxPSuWlwifmqah+fG6m6roZYUkuaugtNu7Pr3y3I0U+8LUNFoTQKsamqPTpLGDuvfJ0HahndI8BijwUKNDad8HH5Q1/3r/VjEZ1g3oaTsob9/bHL32hSRwcIT0eHeWZuIlJILwCRTw0+uW8SeyiYu+8XblNT52F/VQioteJLtmk98Jimmiea2YFdTUm2rVbNwxyURn2A3h006FeZcAW//hNM9hymu9WGMocpOXVFQ0YwvcAy/yluskVWXpFWQkxrHCzsrMMYMqobR1hGirMFPfmaClUbFTvkW33yYFXlpZFNH9tSZ7Lvvcq5YOKnnyQlZpJkG3B1WnihPcmZX/1ASvmHrT/AE6mh2puFy2rczVyzEJpNKE2FD9zBf+lmXZBgYe+hsIm14tWah1Ahw9JNAcDDO+Df42E9B+stdOUixSVaOJqe7KycUAHaTENc+BjlL+55nBwsaS7lkfjZPf/ksaloCPP5+MfurW0hztOJO6gwWGSSGrP6KSrt2UdMSINnRDu4EiLUnJCZMgKt+DZ4ULmt4kub2IN7m9q4UGaGwYWc/yfxoLIXybX02m86ahaOQVXMn8M6BGm79w2Y++ZuNR/1aiuxmta5gkW03F9Ye5HOLEoiVDhbNm4fT0c93n5BFYqieVLFu0AlpmV0jzxLFT+1w3Fg7/HjCPtrc6T23J1g5wwDe2FPdtTkawSLYbuWlSpDh7bgfazRYqBPflBVw6vVHP+5I7JsYWXOt+RWd5lwO/1ELC67p/7xkO1iUbYGqXSzMTeHMGRk8t72cNwqqSKHF6tgFSMggtr0OMJTb/RbWaKiAFSzcdrBIzLKav3KXkWnPbD/gbemRIuOD4vq+ZXnjfni815BhY8BebCjHv5dVczPxBUK8udfLjpK+CQF76wwW0zPjrOasafakxz0vcOlEq8bgTp/a/8mJWcQEW5lod4ynpE7oUbPoPTGvcz2RI2prgj983BqdBtBq1Ro64jJ6HhefSULQ6mx+a5+XeZOScbscUQkWITtYJEu79lkoddLrHD47cUHffb0n90WKTwdXHGz4BTx0AZRv5xOnTqaswU+HrxEn4e5gEZ+JIxwggTYqGv2YcIhzfK+RHK7vGSw6+2DS8olvOQwYDla3UGVP+stNi+ODw32DRWnRHmip4sMDh7o3+uqQcAe7wtNwBX2cmVLfNX8DrHQjR9IZLPLi2qzJiunTYekt8NFT8Bc7QKfk9H+y/TlmiNVnkZo5obvPQnp2BhtjuPhnb7HoR6/w4GtH6MCv3Q+H1kOJnU+rxQoW4fisXu+dSVyH9R21dYRZkZ/O9MyEqASLzhncUxNDWrNQ6qTXOXw2u59gcSQiVtNMxkzr5vjXz3HpzATSnG18fIZdQ+kKFtav33RppryhDd/uV/iJ67ektZdZgSK2V7BIz8fR4WOqu4WD3laqmttIi49h0ZTUfm96zlZr/sZ9f3i2OwV4i7XtPac1wiu2bg/3XDWfKxdZI8GO1slc6G0lO9lDQrvdlJM8CT7xIFz/J2swAHTXrnqzP8cshxUs4pMyur7nie6OHjdWb3M7hTWttASCPLX1CGnZO2dJ++0hqnbNwpHYK1jEZxAbqOeeq+bzk+sW8Y2LZjNzQiL7q/uu4He8nCErWKTHBLRmodRJr6tmMf/Ix/Xnc8/AV96Dax+BxmJS3ryLzQl38KO4J6z9nVlv7SG+0+OtmoVv57Pd12ithtM+B1f+yuqgha6lZFemNXGguoXKxnZ+5PoD5zt2UFrvJxgKd51uwmFSgtYoqSmmjKe2WDfc2ko759acCwAB716uWzaFTy21bvC9O2RrW9rZcKCm63WJt45ZmW67cxtrQiPAvKus3F5peT3nyURKsBI0znZW0EK8VUOzv+fs2J5NNvvt4LciL52yBj/+gWo8nbOk26xgEbJTfbiSew1uSMhEfLX8y8ppXLs0l5T4GGZNSKK0/gjXHgpjiDFW0Eu0R0ONlcSPw02DhVLQ3WcxlPkesYnWjXDqSivdxc6/4go04T78trW/V81iRkIbu8sacOx7iS1iB6esU6yRYEs+133ddCtYLEqo56C3hYamRq4MvMCyljcIhg0VEbmoqrzVxIt18z03vYG/binh2e1lPPv2Vmvb6cutfFfePfDub8mvWQf0rVl85+kPuenR92nrCNHgC/Ctqru4M/gINNgJEJMiRjyt+j7cvn3ggQV2EMkxlfhdKd3fgSOGmTE1Pfpg9lU1s0T2cfWsGIyBwpoBmova7ZqBXbPw26k+4u3MwV3iMyHcYWWEtc2ckIgxDO+StcHuz5CAn0AoTFPbyZmqXIOFUgALroULvgcJGUc/9kguuhvyzrHShXSugNcrWEyN9eOu/IAMGphw/r/Ct/bDRT/se63UqSAOZsd4qWhso63WumFnBKxmncO13SvsFR8+2PV8eWINpfV+vr5mO7VV1jmTcvKsgFS+DV79Adl7/gT0rFnsLm/ild1VBMOG4jofr+6uYqaUMq9tG1TutD5H50TGTkcagZbQ3TSUmTXReuJyw+TFzA/t6RoKDFBUWcsTsfdzSd2fATjobaVfncHCrlm0N3vxmVhSk3vlFOscRm2PBAOYNTHRvvYwBouO7jkznnDnCLfoNkW1tge7FpUaSdFcg1upE8fkxdbjeKXkws3Pw1s/hmo7vUSvYLFqqoP8hDo4BFOXf6LnmuKRXLGQnMusGKtZKDVQBW5IaLWamA7XtXI21q93b1kRAOG4DLJ9+3hh1j8JrLyDRYeSYWcKuOOtGe77XgIgpukwDulZs3h4fSEi1gCqoppWXvmwjOukBZpaoAgrueKxDE92x0PSZAi1I+d8s3v7lNOZWvYQDYFmjDGICJRtJZYOXL5DOOQIQ1wDPfssgi11NJJAeoK753Gp06y/DcUw4RRobyYvIYjTIcPbyR1Rs4gNW8G7tiXAjKyBThi6bz+1g7f31dDgD9ARMqz/9gVMTj3GXGrHQWsWSkVDZOqRzmARmwRON1M9fs7LaLZyWg0UKDql55HaVsoVC7O7sts6WytJcgW71p0GaPZaAcSRfw7SWML8kic4rf19HC1VVnoTsIYF26SxlKx4R4+aRUFlM8unWeXZXd7ErgNF3eWoP9RzsuJgfWk93PERzItYHHPK6bhMgDnhQup9HRhjyKqzsv066wqZkh7PQW8L4bDh928dpKSu+3N21izaW+oIBMOEffU0mAQyE3sHC3s4b0MxhEPwh4/jfvpmpmXEs7/q2INFUU1rv6v7ddjDZtudCbiCVm0oWjWLN/ZUEx/rZNXcCYTCht29cpFFmwYLpaKh88bsTuqetyFitaX7aqC+qKtP4ojS8qD+EHdePIdZsd3DZZcnN/ZohgrU2yOIFvz/7Z13eF3Vleh/615VS7J6s6qLXLFxEc0YCA5gTI0xA06ZcQphyIOETF5mIJNJJsMkL8AwSV5eSBjzwkuhJS/BE0NowYMBg3ER7ja2ZFsu6rKkq973/LHP0b2SJUu2dSVbWr/v03fP3aftfY+911l7tZVWCInHTpS+E/6lI1eAecPAdDF7Qj1VAZpFma+FGWkxJESFsXZ7CTHdfZLinY3mFZVkNYxAsm06k4WeQsp9Nup5btd+u893nOlJ4RyqbGT9x5X88LWPWbvdn9m2rnCJGI4AABl4SURBVM6mHDlWUso/rt2NtNRRTxSJ0eG97xGdYpNS1hbDjuehbAdU7GVacjRFgyzhvLa7jLXbAzyySj7i3l++x7+8vPeUY+vr7YTdFp6It8MKi6qGNkrqWvqPhenDv7y8l2c2Hhn0uLbOLqob2/nU/AweW2lT5B+oaLACc7DYlGFChYWiBIOEKTYFuqtVuMRlQ3UR1By2xwzGxExoqmJaQij3zPUHC86PrqXypNU0yn2tSEOZNSLPvg3+4bA9r/aocx8nhUrKLOvie+m9AMyMqKawspFVazaxp8RHXXMHabER5CRO4FhNM8nS5831bDSL/ohOoS0mm4WeQioaWik4Us1Cz0E6w2LBdJM/sZ5DVY08+poVIG4ixY6ubrYetEGKKaEt/KHgBE311TRKDLGRob3vIWJ/67qjsOGHgEBTFbMTheLqJjoCPMn68pO3Cnn0tY+tV1OrD/PL61lS/0q/GomvwWo6XZGJSHcHaROEncfr+N66vaxa8yEnaptPOcfFGMPvtx7nd1uPn7JvX2k9O4/7hXWFzwr1rAntxDSfICMukn1l9Vz7xAYee/3jAe8xnARVWIjIjSJyQESKROTh0xx3p4gYEckPaPuWc94BEVk20LmKcl7iDbUTc2Qfw+ukBVC2077xxw9Bs3AD3upL7DmJNhXJ6tqf8XTtPbS2tfHq7jJSpRZPrKNBuBNl2Q5rCHaFUmgkfLXApkcBpnqrOHqymQ8P1/RMWJPiIpicaOtqXJzgJN1Lm+fU7sg6+9+jD11pFzNHiqmsb+Xg7i1MlBZk3p0A3DWlneyECT1GbrcC4QtbjtHZbL2bJtJEUnQ4UV0NzJ6S3X+6kbhsOPKu/e2mXQfA3KhaOrtNj5F7/f4KnnrH7xzQ0t5FYWUDFfVt9r6NVUh3J5lSzdGa5lPe4hsbrbAwTlDgNZMjebewmo2F1bR3dvNvbxwY8Dco9bXS1N7FwcqGU9LO//O6PXzrpd0938t8LYTQyXXb7oNf3cyMtBj+sq+CkroWfvVB8YjEdwRNWIiIF3gSWA7MBj4tIrP7OS4G+BqwOaBtNrAKmAPcCPzcuZ6iXDhc+WDPW3wPGQtt/QPTPUTNwhEWvhK7rJR+MYTFENtRSaLU8+d3NvHKrlJyw+oJjw+IpI7PgWonErpvcsboNAiJIBN/6db3i6pJxEd2WCO5SVZYzI1zhMUda2wsybnk3upDWMbF5HgqqTl5ks4jNkeVd8HnbNdbT/CH+xbzgxUXsXRmCqV1LRhj+M2mo6RF2ElVutp47vNzSQltIT0tvf+bxOX4XWfnfwaABTE+vB5h7fYSWtq7ePil3fz7mwdo7bCxF/vKbPJBwEbJN9vYlRSppb2zm7I+dovGJivQvDFWWCzJCqe6sY2Wji4WZMfxpx2lAxavOljhCBoDuwK0CGMMBysaKT7Z1BOzUV7fyv3ePxFzchfUlzA/oZ32zm5CvUJ7Vzf/7/3Bl7LOlWBqFpcCRcaYw8aYduBF4PZ+jvtX4HHoVVvyduBFY0ybMeYIUORcT1EuHOZ/unfcBPReyhmKsOhJVHjcviHHZfVK4/7Wu+9QdOwEU7uP9A4ojAvI19Q3tbrHA/G5pHVat1KvRzhc3cRjoWuYs+kbPcIiL7rVlkhNmnHmke2DEDLJ1kqvOrSNvLY9NIWn2N8mPBZqDhEfFcZnL8shNzGK0roWPjxcQ1FlI1lR/uWjGVEteDpbTtXeXNzfICYdpl4LQEJrCcvmpPLiluM8+XYRVQ1tdHQZdjtVDvc6halCPELB0VqqKq2bcprHth8NqFdCSQHddVYjC4+1rrqXTLKG9ohQD9+5xb4bbyuu6bd7hRX+aPLtAcKiurEdX0sHze1dPfmzynytrPBu7Elrsijc9uvqvGSWX5TG1iO1QQ8GDKawyAACF+NOOG09iMgCIMsY88qZnuucf6+IbBORbVVVVX13K8r5R8JUf2qRoRi4Xc2ipMDmZorNgvwvwNX/AMDCyAp+fHEJXtNpo6pdXNdR8Vgtoy/xk0nvLmfVJVk9qcVzpJLwusNcNyuFv182g9yIJmuQ9wRhmnCy13aX7uISzwFr9BaBxCl+jQi7LNbc3sUz7x8hJiKEhJBW3DTp1Bbbz752IRdXWORcaY+JiIPaYr5w5WR8LR387O0i5mdZQePm2tp9wkdiVBiLcuJ5fvMxHl9rtZ7ccDuxv7mvgm/8bgctbZ3w69uZX/RzAMITrFBP8/jIS4nm2hkpXJwZR1SYl23F/Ru6D1Y0khwTzpSkKHYECItA195jNVY4ldW1kOSpxzNtKQAz5CgAN81N59GV8/jd315uXZCDSDDjLPrreY/oExEP8GPg82d6bk+DMWuANQD5+fljM8ZeGVt4PNarqOSjXkFrAxI2wU50hW/a70l5MPlqu73zBb6c3Q4tH9iJMbDCnztRxmb604cEEptJ+LFNPLpyHr/YcIiXd5aS6qlDGpuZ4Onm/munwQsnh9bHsyEmjZPEcr2ngHSpgWlX2fbcq2DTkza9SEwaGU4cwdsfV/LJWSl4qpqsptBQ6hcWEQNoFq4wzlns/157hPyceB6/cx7R4SFcOyOF5f/7XQpcYVHi46KMWBblxLOluIbl08LgOCSZGsJCPPzqA3vPS1NhVXsDTjYvJPsywMaLvHjvg4SFePB6hAXZ8T3X7kthRQPTU6NZ1f6fxB3+kI6u/yLU6+nlrXX0ZDOLchKorvURTYv1aItJJ6mpiNe/fi8zUmOCLiRcgqlZnAACLWKZQGCJrhjgImCDiBQDlwPrHCP3YOcqyoXL4q/ZyoBD/U8+MdPaK7xhkJHvb0+eAcc2w+ENVqsIvJ6rTfRX3Q9s7EVrHXS0MjU5inDamUgTYOxEDDZJ30B5n84VEXwTZ3Kl13FHddxpWfR5MF2w/bcApDvCYqo5yp3Ru23yQndprsZZpx9Is0ifDyvW9NgriM+FmiOICHflW40qMszLwux4PjpWR21TOwcrGpifFcd910xl40NLWZptTaXS0czMgNu8vqmg972iU6232fHNJEaHExNhvbMW5cTzcXn9KQbsrm5DYWUjeSkxLOnezGVmFxsP2jxXhyobiQz1IuKP0m/xOfalqGS73Fixh5lpE0dMUEBwhcVWIE9EJotIGNZgvc7daYzxGWOSjDG5xphc4EPgNmPMNue4VSISLiKTgTxgSxD7qigjR971sPirQz/e9YjKWNQ7ZiF5JvicnE35X+p9Tkw6eMN7F3IKJNoJ1GusYGpKNMkSUEzJ58QYNFUFT7MAJt/0IF0zb4Ubvu8vqpQ41WpOBb8BY5g0MRQwfDXkP1m67zs2KM8VFoMtQ4nAxXdbLzCwNiLf8V4pOgDycxOobmzj5xuK6DbwmabfEPbmw1arafaXsJ0Xa82q/3jTTDwNfd5dQyIg8xI4sRW6/XaVRTnxdBtO0S6eePMAze1dXDElnrj6QsKki7e27QFsOpJpKdFMio3kmBOQ2FnvZP11hUXVAegKQonY0xA0YWGM6QQeAN4A9gO/N8bsFZFHROS2Qc7dC/we2Ae8DtxvjBnGVJGKcgHh2i1yl/RudwP/8r9wqv3D44XP/n9Y8nf9XzPayZ3UWEl2wgTSvQExFT3CojqowkJm3ox31bNWcAa+IU9fboVg3TGSn5rDipDN5Hqq8HY0WC+yU4TFAMtQfcm63Np9jvd+77x5bjqRoV6efu8ISdHhpFS8C9uftZNxs984vXJGCN+8YTpfvmoKDy2O6X3t0EjIusx6X7k2l8MbWPLnpaSGt7Nuh1+4bCuu4RcbDvHpS7NZNqkNcVK9Hzj4Ma/vKWdPiY9pKdFkJ0yg4Ggtq5/ZgqfFyQQ8Iclql13tcOQdKN8DLYMH/w0HQY2zMMa8aoyZboyZaoz5gdP2XWPMun6O/YSjVbjff+CcN8MY81ow+6ko5zWuZuFWqXPJuwHm3Q3XPNT/eVOuOTXxn4ubAqSxnFCvh0eWBiRQ9J2w8QntjeeeWPFscF19D76OtNTyichCcr3+0qhD1iz6knMFiNeOLYDYCaGsWGh/46Uzk5GGcuhogrJdVrNwBOuCuDYeWJqHiDAjMqAuhifUCucsx2Fzx7PWH/bjP+PxHePz01p4dXcZDa1WE/jd1uNEhXn57i2zodwfSzEjwsd9zxbQ2WVYdUlWT3DknhIf12Y6wjQqCaYvs0Jj/SOw5hr47QroDH7RJY3gVpTznWnXw8xb/Ov6LjGpNgbibOwKActQALOinUhjTwjs+SP8+la7xj9ngHKywcR1KT74BgDL4kuJ7g6YnCPibP9b6wCx7rZDITzGLuUdeeeUXV+8MpeIUA+3zU0Fp0YGRzdaYZHqhIe5NT0A6gOWoUIi7GfiNJh7F3zwf+C9J+CEffddntFKS0cXL31kYzte21POzRelELnxh7DzxZ7L/NNVE/n2TbNYf6eXy5LauHJaEtNTo/nDVxazep51ZyYq2TosLPicDe4MibCZhN/+/tB+g3NAs84qyvlO+jxY9dzwXjMq2brVOvW5aay031Nm2bfdiDi4731/9b6RxEnNTvFGACIqd/XeHx4Ns26Frf/XvtGfiWvv5Kth449tLW+3hgkwLSWG3d9bRmhTOT2Ol0c/sMJiyifsdmOgsCixWYSbT0KoIyxEYMV/WCH24VM9AYE5Us6lk2fxg1f3U3C0lsa2TlZPOg5v/Zs9LzEPfMeJbCnny9fnwv9aDLNv49Y71nCrU9GQpipbvjfMERr5X4SdL8AtP7aecrVHra0kGG7ODqpZKMp4xOO1SxnuBNhYbr+78RkXrRwdQQH2zTk2E7rcFBbO5O1x3m3Dou3yG1gbxJmQs9h6W5XtOGVXqNcD9U79i5h0OLrJpkKfkGTrY5TttMtLYDUL1zMtJCBNuMdjKx42V9viS4DUFvMfn1tEVnwkr+wq5cY5acyu/LONt4nLsX2amGGX/xpKbYT/obd7GcppdtyYXdtOfA588yDMvBluegL+6ldBFRSgwkJRxi8xAUsujZX2u2sPmP/Z0esX+JeiPAEJArOcZbjwGOt5dDa4mXdPFlnB0Nc47BZLWrga2nyAsRrEJfdYW8d7T1iBUV/q5P5K8GsWLnk3+AMvE/Og5gjxUWG8/NUlfPSd63nqrhnI/ldgzgp4YCvc8hNrl6ov8bsDN1VCZUCW29O5MXtDhzUVy0CosFCU8Up0mn8dvqHc2gHm3QVX/U+bw2o0cYWFG4AYHguZzpt8eIydHO9ZD19848yuGzPJagInD1nD8Et/23t/j7D4a6tRgK05csUDMONmeP+nVsB0NFnngdgMv83CJTTCltdNmm6N6rVWAEwICyFuQphNmd7RZOM/QsKtRjAx0+b/qg3I8XTov/zbQXZjHgoqLBRlvBKdag3cne32TTk6zRqAP/ndEXlTPS2usJi+zJ+yJGexrQ/iGucz8081+g+Gx2O9rU5sg6r9UPSWdRHucpazGsqtx1RMun+pa0KC/T2mL4O2ehsECVYLm/rJ/vuw/DG4d4PNLNxU5S8H29EKG38E2Yutq61LbIZdCqwu9Ofj6iUsguvGPBTUwK0o4xV3GeqX19tlj75xHKOJG0OSPt9OnMkz7GT98LFzX5tPnAr7/mS3TRc8fxdUHYSv7/JrWB4vXHoPVOyGNJv0sCdwcNsz9jNjEcz5VP/38IbaPzf+pXijTZO+83mrvdyxprdAjsuxMSRF662Bf8o1sP05K8Q8XkezGAU35gBUWCjKeGXiJDtZ1pfC3c9aD6PzhamfhNUv29iFz/3Bb0QeDiNuYkBUe2y2TdII9q2+odQas8FqN6tf9h+bMstqHcXvWYN03BDqe7j3emEVLPmGTdsyMcO/vOYy1SYIpHKvHXvWZbBljRVWhzfYILzATMKjgAoLRRmvzFtlA9qm3+h3yTxf8Hj8E6prdB8uEqfZz7gc60m0d619468ttprFQEWpQiOtHaJqf+8lpNOROgfu+i1seNQayNvqrbbUl9gMf7qQhMn+pa03v2OF00UrrZfVKKI2C0UZr4RH20nofBMUwcYVFpMWwPQb4JYf2e+1xdYjaeIAxZTAvxQ1VFuJiC11m3eddb2tLrQFrPrDTTEfP9kKyNgsKyiSpsMdT/efPXgEUWGhKMr4IinPZvB1J/zQSCeu4n0bSOfaS/rDFRZD1SxcMi9x4i7MwMJizh12WSznit73uOqb1m4xyugylKIo44vIOPgfH/a2AcTn9kSMDziZg+NSm3D6Y/ojM6DQ50DnxmbA3/lzRbHwr60n2EUrz+xeQUKFhaIo44++dcnjc+HYJjs5p56mhGxkvM3LdKbEpFrh1NHqN6APxpRP2L/zBBUWiqIobl3z5Jm9a4YMJ5fdB+3Nox/DcpaosFAURXGFxZkuL50JV9wfvGuPAGrgVhRF6REW/bi1KoAKC0VRFJi00FbsO0+MyecjugylKIoSEmZrgSsDopqFoiiKMigqLBRFUZRBCaqwEJEbReSAiBSJyMP97L9PRHaLyA4R2Sgis532XBFpcdp3iMhTweynoiiKcnqCZrMQES/wJHA9cALYKiLrjDH7Ag573hjzlHP8bcCPgBudfYeMMeqaoCiKch4QTM3iUqDIGHPYGNMOvAjcHniAMaY+4GsUPcV2FUVRlPOJYAqLDOB4wPcTTlsvROR+ETkEPA58LWDXZBHZLiLviMhV/d1ARO4VkW0isq2qqmo4+64oiqIEEExh0V9M+ymagzHmSWPMVOAh4J+c5jIg2xizAPgG8LyITOzn3DXGmHxjTH5y8uiWHFQURRnLBFNYnAACS0llAqWnOf5F4FMAxpg2Y8xJZ7sAOARMD1I/FUVRlEEIZlDeViBPRCYDJcAq4DOBB4hInjGm0Pl6M1DotCcDNcaYLhGZAuQBh093s4KCgmoROXoO/U0Cqs/h/AsRHfP4QMc8PjjbMecM5aCgCQtjTKeIPAC8AXiBZ4wxe0XkEWCbMWYd8ICIXAd0ALXAauf0q4FHRKQT6ALuM8bUDHK/c1qHEpFtxpj8c7nGhYaOeXygYx4fBHvMQU33YYx5FXi1T9t3A7YfHOC8PwJ/DGbfFEVRlKGjEdyKoijKoKiw8LNmtDswCuiYxwc65vFBUMcsxmgcnKIoinJ6VLNQFEVRBkWFhaIoijIo415YDJYZd6wgIsUBGX63OW0JIvIXESl0PuNHu5/niog8IyKVIrInoK3fcYrlp86z3yUiC0ev52fPAGP+noiUBGRuvilg37ecMR8QkWWj0+uzR0SyRORtEdkvIntF5EGnfaw/54HGPTLP2hgzbv+w8R+HgClAGLATmD3a/QrSWIuBpD5tjwMPO9sPA4+Ndj+HYZxXAwuBPYONE7gJeA2bmuZyYPNo938Yx/w94Jv9HDvb+XceDkx2/v17R3sMZzjedGChsx0DHHTGNdaf80DjHpFnPd41i0Ez445xbgd+7Wz/GifdyoWMMeZdoG8A50DjvB34jbF8CMSJSPrI9HT4GGDMA3E78KKxKXWOAEXY/wcXDMaYMmPMR852A7Afm6R0rD/ngcY9EMP6rMe7sBhSZtwxggHeFJECEbnXaUs1xpSB/YcIpIxa74LLQOMc68//AWfZ5ZmAJcYxNWYRyQUWAJsZR8+5z7hhBJ71eBcWQ8qMO0a40hizEFgO3C8iV492h84DxvLz/wUwFZiPzeL87077mBmziERjMz183fSujXPKof20XZBjhn7HPSLPerwLizPNjHvBYowpdT4rgbVYdbTCVcedz8rR62FQGWicY/b5G2MqjDFdxphu4Gn8yw9jYswiEoqdMJ8zxrzkNI/559zfuEfqWY93YdGTGVdEwrCZcdeNcp+GHRGJEpEYdxu4AdiDHaubvHE18KfR6WHQGWic64C/cbxlLgd87jLGhU6fNfkV2OcNdsyrRCTcyQidB2wZ6f6dCyIiwC+B/caYHwXsGtPPeaBxj9izHm0L/2j/YT0lDmI9Bb492v0J0hinYL0idgJ73XECicB6bGr49UDCaPd1GMb6AlYV78C+WX1poHFi1fQnnWe/G8gf7f4P45h/64xplzNppAcc/21nzAeA5aPd/7MY7xLscsouYIfzd9M4eM4DjXtEnrWm+1AURVEGZbwvQymKoihDQIWFoiiKMigqLBRFUZRBUWGhKIqiDIoKC0VRFGVQVFgoyhkgIl0B2T13DGemYhHJDcwcqyjnEyGj3QFFucBoMcbMH+1OKMpIo5qFogwDTr2Qx0Rki/M3zWnPEZH1TpK39SKS7bSnishaEdnp/C12LuUVkaedegVvikjkqA1KUQJQYaEoZ0Zkn2WouwP21RtjLgV+BvzEafsZNj32POA54KdO+0+Bd4wxF2NrUex12vOAJ40xc4A6YGWQx6MoQ0IjuBXlDBCRRmNMdD/txcBSY8xhJ9lbuTEmUUSqsekXOpz2MmNMkohUAZnGmLaAa+QCfzHG5DnfHwJCjTHfD/7IFOX0qGahKMOHGWB7oGP6oy1guwu1KyrnCSosFGX4uDvgc5Oz/QE2mzHAZ4GNzvZ64CsAIuIVkYkj1UlFORv0rUVRzoxIEdkR8P11Y4zrPhsuIpuxL2Gfdtq+BjwjIn8PVAFfcNofBNaIyJewGsRXsJljFeW8RG0WijIMODaLfGNM9Wj3RVGCgS5DKYqiKIOimoWiKIoyKKpZKIqiKIOiwkJRFEUZFBUWiqIoyqCosFAURVEGRYWFoiiKMij/DUrFK9yqaoVFAAAAAElFTkSuQmCC\n",
            "text/plain": [
              "<matplotlib.figure.Figure at 0x14cadeb9e10>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "metadata": {
        "colab_type": "text",
        "id": "HH3U6FWEEsxW"
      },
      "cell_type": "markdown",
      "source": [
        "#### Confusion matrix for Neural Network Classifier"
      ]
    },
    {
      "metadata": {
        "id": "7yYrzmIS5KbB",
        "colab_type": "code",
        "outputId": "45223c06-f30e-40cc-8454-8e9fcda0465e",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "# Evaluate the mode in test set\n",
        "\n",
        "x_test = df_ADL_Falls_test[x_columns]\n",
        "y = df_ADL_Falls_test.loc[:,['Fall_ADL_BIN']]\n",
        "y_test = np.array(y)\n",
        "\n",
        "# Print the \"accuracy/loss\" of the model\n",
        "test_loss, test_acc = my_model_NN.evaluate(x_test, y_test)\n",
        "print('The \"accuracy\" of the model (in the Test set) is:', test_acc)\n",
        "print('The \"loss\" of the model (in the Test set) is:', test_loss)"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "1349/1349 [==============================] - 0s 24us/step\n",
            "El \"accuracy\" del modelo (en el conjunto de Test) es: 0.8272794662713121\n",
            "El \"loss\" del modelo (en el conjunto de Test) es: 0.38835879150720065\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "gr0kuQwI5KbE",
        "colab_type": "code",
        "outputId": "6e96834c-e10b-4d00-c5ac-9b9bd0a22939",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "# In this cells, we make the predictions and check the accuracy:\n",
        "\n",
        "y_pred = my_model_NN.predict(x_test)\n",
        "Y_pred = np.argmax(y_pred, axis=1)\n",
        "\n",
        "cm = tf.confusion_matrix(y_test, Y_pred,2)\n",
        "\n",
        "import tensorflow as tf\n",
        "\n",
        "# initialize the variable\n",
        "init_op = tf.initialize_all_variables()\n",
        "\n",
        "print(\"Confusion matrix:\")\n",
        "#run the graph\n",
        "with tf.Session() as sess:\n",
        "    sess.run(init_op) #execute init_op\n",
        "    #print the confussion matrix\n",
        "    array = cm.eval(session=sess)\n",
        "    print(array)"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Confusion matrix:\n",
            "[[696 114]\n",
            " [119 420]]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "va95CuG1zSdu",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "#### Sensitivity, Specificity, Precision and Accuracy"
      ]
    },
    {
      "metadata": {
        "id": "IrKSf0jYeu7u",
        "colab_type": "code",
        "outputId": "8841e8f7-749a-4992-c1c8-eac3e290be3a",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 119
        }
      },
      "cell_type": "code",
      "source": [
        "# calculations of measurements of performance\n",
        "\n",
        "n_TP = array[0,0]  \n",
        "n_FP = array[1,0] \n",
        "n_TN = array[1,1]\n",
        "n_FN = array[0,1]\n",
        "\n",
        "print(\"-----------------------------\")\n",
        "print(\"-----------------------------\")\n",
        "\n",
        "# SENSITIVITY = TP / (TP + FN)\n",
        "NN_Sensitivity = n_TP / (n_TP + n_FN)\n",
        "print(\"NN_Sensitivity = \"+ str(NN_Sensitivity))\n",
        "\n",
        "# SPECIFICITY = TN / (FP + TN)\n",
        "NN_Specificity = n_TN / (n_FP + n_TN)\n",
        "print(\"NN_Specificity = \"+ str(NN_Specificity))\n",
        "\n",
        "# Precision = TP / (TP + FP)\n",
        "NN_Precision = n_TP / (n_TP + n_FP)\n",
        "print(\"NN_Precision = \"+ str(NN_Precision))\n",
        "\n",
        "# Accuracy = (TP + TN) / (TP + FP + TN + FN)\n",
        "NN_Accuracy = (n_TP + n_TN) / (n_TP + n_FP + n_TN + n_FN)\n",
        "print(\"NN_Accuracy = \"+ str(NN_Accuracy))"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "-----------------------------\n",
            "-----------------------------\n",
            "NN_Sensitivity = 0.8299383543285982\n",
            "NN_Specificity = 0.8748147149979787\n",
            "NN_Precision = 0.8695591126088178\n",
            "NN_Accuracy = 0.8523147214943224\n"
          ],
          "name": "stdout"
        }
      ]
    }
  ]
}