{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "name": "BioBot_FDS_06_Models_Using_Balanced_Ds_Moving_Window_181120_03.ipynb",
      "version": "0.3.2",
      "provenance": [],
      "collapsed_sections": [],
      "toc_visible": true
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    },
    "accelerator": "GPU"
  },
  "cells": [
    {
      "metadata": {
        "id": "IjyaQqpewlW4",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "# BioBot_FDS_06_Models_Using_Balanced_Ds_Moving_Window\n",
        "## Deliverable_06: Implementing several models (KNN, SVC, NN/TF-Keras) using a balanced dataset and moving windows\n",
        "Author/code developer: Yan Bello. 20/11/2018. As part of the Master in Artificial Intelligence (UNIR). \n",
        "This file/code is part of the development and exploration/experimentation on a Fall Detection System (FDS). \n",
        "\n",
        "---\n",
        "\n",
        "\n",
        "In the following sections, we used this dataset: \n",
        "SisFall: A Fall and Movement Dataset. \n",
        "Created by: A. Sucerquia, J.D. López, J.F. Vargas-Bonilla\n",
        "SISTEMIC, Faculty of Engineering, Universidad de Antiquia UDEA.\n",
        "Detailed information about this dataset can be found in this website: http://sistemic.udea.edu.co/en/investigacion/proyectos/english-falls/.\n",
        "Reference paper: Sucerquia A, López JD, Vargas-Bonilla JF. SisFall: A Fall and Movement Dataset. Sensors (Basel). 2017;17(1):198. Published 2017 Jan 20. doi:10.3390/s17010198\n",
        "\n",
        "---\n",
        "\n"
      ]
    },
    {
      "metadata": {
        "id": "3zwtaJ0RwiwC",
        "colab_type": "code",
        "outputId": "fbe96559-0d59-4d47-9f21-ab81e8f10e44",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 34
        }
      },
      "cell_type": "code",
      "source": [
        "# Preliminary step 0. We need to establish/select our working folders. First, ensure  the previous dataset files are available.\n",
        "# The code below is prepared to work with two options: local drive or mounting a Google Drive for Colab\n",
        "# Select the appropriate configuration for your environment by commenting/un-commenting the following lines:\n",
        "\n",
        "# To work with Google Colab and Google Drive: \n",
        "from google.colab import drive \n",
        "drive.mount('/content/gdrive')\n",
        "FILE_DIRECTORY =  \"gdrive/My Drive/Colab Notebooks/\"\n",
        "SisFall_ALL_DIRECTORY =  FILE_DIRECTORY + \"SisFall_dataset_ALL/\"\n",
        "\n",
        "# To work with a local drive, uncomment these line:\n",
        "# FILE_DIRECTORY =  os.getcwd() + \"\\\\\"\n",
        "# SisFall_ALL_DIRECTORY =  FILE_DIRECTORY + \"SisFall_dataset_ALL\\\\\""
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Drive already mounted at /content/gdrive; to attempt to forcibly remount, call drive.mount(\"/content/gdrive\", force_remount=True).\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "_uBcnFPnxamQ",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "## 6.1 Load a dataframe with prepared info from ADL/Falls dataset"
      ]
    },
    {
      "metadata": {
        "id": "AX1kSYtOeokk",
        "colab_type": "code",
        "outputId": "f4913a80-04b5-47fa-a284-a1344c80de56",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 1003
        }
      },
      "cell_type": "code",
      "source": [
        "# In these cells, we are using a balanced dataset, prepared in advanced to include\n",
        "# moving windows/slices of the ADL/Fall data. The windows sizes are explained in the preparation code file.\n",
        "my_data_file_name = FILE_DIRECTORY + \"Unified_ADL_Falls_BAL-completo.txt\"\n",
        "\n",
        "import pandas as pd\n",
        "\n",
        "# Create a dataframe and load data\n",
        "df_ADL_Falls = pd.DataFrame(pd.read_csv(my_data_file_name, sep = ','))\n",
        "\n",
        "df_ADL_Falls.drop('0', axis=1, inplace=True)\n",
        "\n",
        "df_only_ADLs = df_ADL_Falls[df_ADL_Falls.Fall_ADL == \"D\"]\n",
        "df_only_Falls = df_ADL_Falls[df_ADL_Falls.Fall_ADL == \"F\"]\n",
        "\n",
        "# Show part of the data frame\n",
        "print(df_only_ADLs.tail())\n",
        "print(df_only_Falls.tail())"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "       0_Win_ID Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  \\\n",
            "73877        36      D14      SA        D  D14_SA17_R04.txt      -0.210534   \n",
            "73878        37      D14      SA        D  D14_SA17_R04.txt      -0.447829   \n",
            "73879        38      D14      SA        D  D14_SA17_R04.txt      -0.284214   \n",
            "73880        39      D14      SA        D  D14_SA17_R04.txt      -0.346115   \n",
            "73881        40      D14      SA        D  D14_SA17_R04.txt      -0.435528   \n",
            "\n",
            "       max_S1_X  mean_S1_X  min_S1_X  range_S1_X    ...     range_S1_N_VER  \\\n",
            "73877       347    184.245        42         305    ...           1.657134   \n",
            "73878       347    190.090        42         305    ...           1.657134   \n",
            "73879       347    198.075        42         305    ...           1.657134   \n",
            "73880       347    207.485        49         298    ...           1.657134   \n",
            "73881       336    208.140        49         287    ...           1.657134   \n",
            "\n",
            "       skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV   corr_NH  \\\n",
            "73877           1.817618      0.248727      0.061865  0.634564  0.635461   \n",
            "73878           1.676049      0.248163      0.061585  0.584440  0.581614   \n",
            "73879           2.034859      0.232242      0.053936  0.558561  0.556817   \n",
            "73880           2.236304      0.222970      0.049716  0.570966  0.569200   \n",
            "73881           2.236037      0.182395      0.033268  0.406767  0.404551   \n",
            "\n",
            "        corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "73877  0.999159  0.206554 -0.088792 -0.079303  \n",
            "73878  0.999086  0.319084 -0.174252 -0.187629  \n",
            "73879  0.998980  0.391342 -0.269796 -0.251746  \n",
            "73880  0.998893  0.421665 -0.312372 -0.302835  \n",
            "73881  0.998343  0.488371 -0.518035 -0.317396  \n",
            "\n",
            "[5 rows x 59 columns]\n",
            "        0_Win_ID Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  \\\n",
            "147595        36      F15      SE        F  F15_SE06_R05.txt       4.111592   \n",
            "147596        37      F15      SE        F  F15_SE06_R05.txt       4.244877   \n",
            "147597        38      F15      SE        F  F15_SE06_R05.txt       5.183164   \n",
            "147598        39      F15      SE        F  F15_SE06_R05.txt       7.486561   \n",
            "147599        40      F15      SE        F  F15_SE06_R05.txt      22.411060   \n",
            "\n",
            "        max_S1_X  mean_S1_X  min_S1_X  range_S1_X    ...     range_S1_N_VER  \\\n",
            "147595       154   -219.865      -944        1098    ...           3.884620   \n",
            "147596       154   -224.050      -944        1098    ...           3.884620   \n",
            "147597       -23   -237.650      -944         921    ...           3.704261   \n",
            "147598       -50   -229.520      -944         894    ...           3.429267   \n",
            "147599       -50   -202.230      -944         894    ...           3.429267   \n",
            "\n",
            "        skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV   corr_NH  \\\n",
            "147595           2.456030      0.711003      0.505525  0.789523  0.795165   \n",
            "147596           2.523805      0.701421      0.491991  0.797671  0.801252   \n",
            "147597           2.709271      0.681044      0.463822  0.788771  0.791569   \n",
            "147598           3.164896      0.635677      0.404085  0.838269  0.839009   \n",
            "147599           5.262582      0.435109      0.189319  0.784909  0.786527   \n",
            "\n",
            "         corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "147595  0.996670  0.743355 -0.502157 -0.256155  \n",
            "147596  0.996602  0.744948 -0.497394 -0.130966  \n",
            "147597  0.997573  0.769560 -0.397485 -0.082333  \n",
            "147598  0.997839  0.788514 -0.478965 -0.174934  \n",
            "147599  0.996057  0.692815 -0.406540 -0.065363  \n",
            "\n",
            "[5 rows x 59 columns]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "colab_type": "text",
        "id": "WBxuHWyIKrW6"
      },
      "cell_type": "markdown",
      "source": [
        "## 6.2 Prepare ADL/FALLs data for TRAINING/VAL/TEST sets "
      ]
    },
    {
      "metadata": {
        "id": "yQvWjvHJLn5I",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Split FALLs data into TRAINING/VAL/TEST sets"
      ]
    },
    {
      "metadata": {
        "id": "VXdxVtQ2eolr",
        "colab_type": "code",
        "outputId": "bab81529-b40e-4a33-dcb7-ee5e1c59eb76",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 2261
        }
      },
      "cell_type": "code",
      "source": [
        "\"\"\"\n",
        "    Preparing the FALLS DATASETs\n",
        "    In this cell, we prepare the TRAINING/VAL/TEST DATASET splits for the falls data.\n",
        "    Using a 70%-20%-10% split\n",
        "\"\"\"\n",
        "\n",
        "import random\n",
        "import math\n",
        "from numpy.random import permutation\n",
        "\n",
        "df_filenames = df_only_Falls.File\n",
        "df_filenames = pd.DataFrame(list(set(df_filenames)))\n",
        "\n",
        "\n",
        "# Randomly shuffle the index of each set (ADLs and Falls)\n",
        "# -------------------------------------------------------\n",
        "# First we prepare the sets of ADLs\n",
        "random_indices = permutation(df_filenames.index)\n",
        "# Use a test-split (of 30% of the items)\n",
        "train_split = math.floor(len(df_filenames)*0.7)\n",
        "val_split = math.floor(len(df_filenames)*0.2)\n",
        "# Train set with 70% of the items.\n",
        "df_only_Falls_train_FN = df_filenames.loc[random_indices[0:train_split]]\n",
        "# Test set with 20% of items\n",
        "df_only_Falls_val_FN = df_filenames.loc[random_indices[train_split:train_split+val_split]]\n",
        "# VAL set with 10% of items\n",
        "df_only_Falls_test_FN = df_filenames.loc[random_indices[train_split+val_split:]]\n",
        "\n",
        "print(\"-------TOTAL--------------------\")\n",
        "print(str(len(df_filenames)))\n",
        "print(\"-------train_split--------------------\")\n",
        "print(str(train_split))\n",
        "print(str(len(df_only_Falls_train_FN)))\n",
        "print(\"-------VAL_split--------------------\")\n",
        "print(str(val_split))\n",
        "print(str(len(df_only_Falls_val_FN)))\n",
        "print(\"-------Test_split--------------------\")\n",
        "test_split = len(df_filenames)-(train_split+val_split)\n",
        "print(str(test_split))\n",
        "print(str(len(df_only_Falls_test_FN)))\n",
        "print(\"-------SUMA de_splits--------------------\")\n",
        "print(str(test_split+train_split+val_split))\n",
        "\n",
        "print(\"-------HEADS: train + val + test --------------------\")\n",
        "print(df_only_Falls_train_FN.head())\n",
        "print(df_only_Falls_val_FN.head())\n",
        "print(df_only_Falls_test_FN.head())\n",
        "print(\"---------------------------\")\n",
        "\n",
        "print(\"-------% train  --------------------\")\n",
        "print(len(df_only_Falls_train_FN)/(len(df_only_Falls_train_FN)+len(df_only_Falls_val_FN)+len(df_only_Falls_test_FN)))\n",
        "print(\"-------% val  --------------------\")\n",
        "print(len(df_only_Falls_val_FN)/(len(df_only_Falls_train_FN)+len(df_only_Falls_val_FN)+len(df_only_Falls_test_FN)))\n",
        "print(\"-------% test  --------------------\")\n",
        "print(len(df_only_Falls_test_FN)/(len(df_only_Falls_train_FN)+len(df_only_Falls_val_FN)+len(df_only_Falls_test_FN)))\n",
        "\n",
        "\n",
        "# Test set with 10% of items\n",
        "df_only_Falls_test = df_only_Falls[df_only_Falls.File.isin(df_only_Falls_test_FN[0])]\n",
        "# Val set with 20% of items\n",
        "df_only_Falls_val = df_only_Falls[df_only_Falls.File.isin(df_only_Falls_val_FN[0])]\n",
        "# Train set with 70% of the items.\n",
        "df_only_Falls_train = df_only_Falls[df_only_Falls.File.isin(df_only_Falls_train_FN[0])]\n",
        "\n",
        "\n",
        "print(\"---------------------------\")\n",
        "print(\"---------------------------\")\n",
        "print(df_only_Falls_test.head())\n",
        "print(len(df_only_Falls_test))\n",
        "print(df_only_Falls_val.head())\n",
        "print(len(df_only_Falls_val))\n",
        "print(df_only_Falls_train.head())\n",
        "print(len(df_only_Falls_train))\n",
        "\n",
        "print(\"---------------------------\")\n"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "-------TOTAL--------------------\n",
            "1798\n",
            "-------train_split--------------------\n",
            "1258\n",
            "1258\n",
            "-------VAL_split--------------------\n",
            "359\n",
            "359\n",
            "-------Test_split--------------------\n",
            "181\n",
            "181\n",
            "-------SUMA de_splits--------------------\n",
            "1798\n",
            "-------HEADS: train + val + test --------------------\n",
            "                     0\n",
            "380   F01_SA02_R01.txt\n",
            "578   F05_SA02_R04.txt\n",
            "203   F06_SA20_R01.txt\n",
            "734   F01_SE06_R05.txt\n",
            "1040  F07_SA08_R04.txt\n",
            "                     0\n",
            "1152  F10_SA22_R01.txt\n",
            "486   F10_SE06_R03.txt\n",
            "1022  F01_SA19_R04.txt\n",
            "1441  F02_SA16_R01.txt\n",
            "887   F02_SA04_R05.txt\n",
            "                     0\n",
            "681   F14_SA08_R02.txt\n",
            "706   F07_SA13_R05.txt\n",
            "1134  F12_SA22_R03.txt\n",
            "543   F14_SA03_R04.txt\n",
            "1627  F05_SA07_R05.txt\n",
            "---------------------------\n",
            "-------% train  --------------------\n",
            "0.699666295884316\n",
            "-------% val  --------------------\n",
            "0.19966629588431592\n",
            "-------% test  --------------------\n",
            "0.10066740823136819\n",
            "---------------------------\n",
            "---------------------------\n",
            "       0_Win_ID Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  \\\n",
            "74210         0      F01      SA        F  F01_SA02_R04.txt       1.372828   \n",
            "74211         1      F01      SA        F  F01_SA02_R04.txt       0.236132   \n",
            "74212         2      F01      SA        F  F01_SA02_R04.txt       0.082957   \n",
            "74213         3      F01      SA        F  F01_SA02_R04.txt       0.036610   \n",
            "74214         4      F01      SA        F  F01_SA02_R04.txt      -0.081237   \n",
            "\n",
            "       max_S1_X  mean_S1_X  min_S1_X  range_S1_X    ...     range_S1_N_VER  \\\n",
            "74210       113     14.660      -109         222    ...           0.549762   \n",
            "74211       113     18.515       -81         194    ...           0.549762   \n",
            "74212       113     17.510       -35         148    ...           0.549762   \n",
            "74213       113     14.995       -44         157    ...           0.549762   \n",
            "74214       113     13.875       -44         157    ...           0.549762   \n",
            "\n",
            "       skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV   corr_NH  \\\n",
            "74210           0.613749      0.140211      0.019659  0.374884  0.996669   \n",
            "74211           0.812078      0.131952      0.017411  0.287184  0.996819   \n",
            "74212           0.930072      0.131743      0.017356  0.300993  0.996938   \n",
            "74213           1.004197      0.130710      0.017085  0.324425  0.996974   \n",
            "74214           1.037387      0.127712      0.016310  0.368552  0.997010   \n",
            "\n",
            "        corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "74210  0.431172  0.100111  0.000433 -0.101398  \n",
            "74211  0.343333 -0.222682  0.127680 -0.028122  \n",
            "74212  0.355332 -0.313782  0.120176 -0.060068  \n",
            "74213  0.377679 -0.313504  0.137816 -0.023554  \n",
            "74214  0.419866 -0.325779  0.159143 -0.034850  \n",
            "\n",
            "[5 rows x 59 columns]\n",
            "7421\n",
            "       0_Win_ID Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  \\\n",
            "73882         0      F01      SA        F  F01_SA01_R01.txt       1.171828   \n",
            "73883         1      F01      SA        F  F01_SA01_R01.txt       1.165338   \n",
            "73884         2      F01      SA        F  F01_SA01_R01.txt       1.252298   \n",
            "73885         3      F01      SA        F  F01_SA01_R01.txt       1.573220   \n",
            "73886         4      F01      SA        F  F01_SA01_R01.txt       2.841073   \n",
            "\n",
            "       max_S1_X  mean_S1_X  min_S1_X  range_S1_X    ...     range_S1_N_VER  \\\n",
            "73882        85     -2.090      -114         199    ...           0.554633   \n",
            "73883        85     -2.670      -114         199    ...           0.554633   \n",
            "73884        85     -2.095      -114         199    ...           0.554633   \n",
            "73885        85      0.385      -114         199    ...           0.554633   \n",
            "73886        85      4.115      -114         199    ...           0.554633   \n",
            "\n",
            "       skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV   corr_NH  \\\n",
            "73882           0.446500      0.137164      0.018814  0.301081  0.996504   \n",
            "73883           0.413361      0.138937      0.019304  0.325666  0.996664   \n",
            "73884           0.412581      0.140126      0.019635  0.349794  0.996720   \n",
            "73885           0.584479      0.138831      0.019274  0.392907  0.996872   \n",
            "73886           0.730946      0.138403      0.019156  0.367269  0.996289   \n",
            "\n",
            "        corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "73882  0.335763  0.263937 -0.062947  0.245159  \n",
            "73883  0.359071  0.251497 -0.069058  0.238255  \n",
            "73884  0.382485  0.260917 -0.059155  0.248694  \n",
            "73885  0.425018  0.300276  0.016490  0.236701  \n",
            "73886  0.403227  0.146171 -0.003278  0.237217  \n",
            "\n",
            "[5 rows x 59 columns]\n",
            "14719\n",
            "       0_Win_ID Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  \\\n",
            "74005         0      F01      SA        F  F01_SA01_R04.txt       2.378725   \n",
            "74006         1      F01      SA        F  F01_SA01_R04.txt       1.758380   \n",
            "74007         2      F01      SA        F  F01_SA01_R04.txt       1.259635   \n",
            "74008         3      F01      SA        F  F01_SA01_R04.txt       1.228117   \n",
            "74009         4      F01      SA        F  F01_SA01_R04.txt       1.033344   \n",
            "\n",
            "       max_S1_X  mean_S1_X  min_S1_X  range_S1_X    ...     range_S1_N_VER  \\\n",
            "74005       114     12.435      -181         295    ...           1.137148   \n",
            "74006       114     10.215      -181         295    ...           1.137148   \n",
            "74007       114     14.545      -181         295    ...           1.137148   \n",
            "74008       114     17.245      -181         295    ...           1.105898   \n",
            "74009       114     16.290      -181         295    ...           1.124796   \n",
            "\n",
            "       skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV   corr_NH  \\\n",
            "74005           2.095565      0.187731      0.035243  0.490460  0.995227   \n",
            "74006           1.964441      0.188469      0.035521  0.478774  0.994868   \n",
            "74007           1.820719      0.187108      0.035009  0.414585  0.992573   \n",
            "74008           2.009225      0.179511      0.032224  0.414591  0.992878   \n",
            "74009           1.910983      0.182354      0.033253  0.436105  0.993287   \n",
            "\n",
            "        corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "74005  0.553179  0.047076  0.530257  0.288389  \n",
            "74006  0.543768  0.022320  0.534386  0.291252  \n",
            "74007  0.496643  0.088437  0.466868  0.261847  \n",
            "74008  0.494766  0.119705  0.479750  0.256974  \n",
            "74009  0.512598  0.093130  0.479493  0.279663  \n",
            "\n",
            "[5 rows x 59 columns]\n",
            "51578\n",
            "---------------------------\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "8g__pIq7K9fy",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Split ADLs data into TRAINING/VAL/TEST sets "
      ]
    },
    {
      "metadata": {
        "id": "Bno5z0-8eomc",
        "colab_type": "code",
        "outputId": "ae626dc4-be00-4759-cbd2-5d7cf3187152",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 2261
        }
      },
      "cell_type": "code",
      "source": [
        "\"\"\"\n",
        "    Preparing the ADLs DATASETs\n",
        "    In this cell, we prepare the TRAINING/VAL/TEST DATASET splits for the ADLs data.\n",
        "    Using a 70%-20%-10% split\n",
        "\"\"\"\n",
        "\n",
        "import random\n",
        "import math\n",
        "from numpy.random import permutation\n",
        "\n",
        "df_filenames = df_only_ADLs.File\n",
        "df_filenames = pd.DataFrame(list(set(df_filenames)))\n",
        "\n",
        "\n",
        "# Randomly shuffle the index of each set (ADLs and Falls)\n",
        "# -------------------------------------------------------\n",
        "# First we prepare the sets of ADLs\n",
        "random_indices = permutation(df_filenames.index)\n",
        "# Use a test-split (of 30% of the items)\n",
        "train_split = math.floor(len(df_filenames)*0.7)\n",
        "val_split = math.floor(len(df_filenames)*0.2)\n",
        "# Train set with 70% of the items.\n",
        "df_only_ADLs_train_FN = df_filenames.loc[random_indices[0:train_split]]\n",
        "# Test set with 20% of items\n",
        "df_only_ADLs_val_FN = df_filenames.loc[random_indices[train_split:train_split+val_split]]\n",
        "# VAL set with 10% of items\n",
        "df_only_ADLs_test_FN = df_filenames.loc[random_indices[train_split+val_split:]]\n",
        "\n",
        "print(\"-------TOTAL--------------------\")\n",
        "print(str(len(df_filenames)))\n",
        "print(\"-------train_split--------------------\")\n",
        "print(str(train_split))\n",
        "print(str(len(df_only_ADLs_train_FN)))\n",
        "print(\"-------VAL_split--------------------\")\n",
        "print(str(val_split))\n",
        "print(str(len(df_only_ADLs_val_FN)))\n",
        "print(\"-------Test_split--------------------\")\n",
        "test_split = len(df_filenames)-(train_split+val_split)\n",
        "print(str(test_split))\n",
        "print(str(len(df_only_ADLs_test_FN)))\n",
        "print(\"-------SUMA de_splits--------------------\")\n",
        "print(str(test_split+train_split+val_split))\n",
        "\n",
        "print(\"-------HEADS: train + val + test --------------------\")\n",
        "print(df_only_ADLs_train_FN.head())\n",
        "print(df_only_ADLs_val_FN.head())\n",
        "print(df_only_ADLs_test_FN.head())\n",
        "print(\"---------------------------\")\n",
        "\n",
        "print(\"-------% train  --------------------\")\n",
        "print(len(df_only_ADLs_train_FN)/(len(df_only_ADLs_train_FN)+len(df_only_ADLs_val_FN)+len(df_only_ADLs_test_FN)))\n",
        "print(\"-------% val  --------------------\")\n",
        "print(len(df_only_ADLs_val_FN)/(len(df_only_ADLs_train_FN)+len(df_only_ADLs_val_FN)+len(df_only_ADLs_test_FN)))\n",
        "print(\"-------% test  --------------------\")\n",
        "print(len(df_only_ADLs_test_FN)/(len(df_only_ADLs_train_FN)+len(df_only_ADLs_val_FN)+len(df_only_ADLs_test_FN)))\n",
        "\n",
        "\n",
        "# Test set with 10% of items\n",
        "df_only_ADLs_test = df_only_ADLs[df_only_ADLs.File.isin(df_only_ADLs_test_FN[0])]\n",
        "# Val set with 20% of items\n",
        "df_only_ADLs_val = df_only_ADLs[df_only_ADLs.File.isin(df_only_ADLs_val_FN[0])]\n",
        "# Train set with 70% of the items.\n",
        "df_only_ADLs_train = df_only_ADLs[df_only_ADLs.File.isin(df_only_ADLs_train_FN[0])]\n",
        "\n",
        "\n",
        "print(\"---------------------------\")\n",
        "print(\"---------------------------\")\n",
        "print(df_only_ADLs_test.head())\n",
        "print(len(df_only_ADLs_test))\n",
        "print(df_only_ADLs_val.head())\n",
        "print(len(df_only_ADLs_val))\n",
        "print(df_only_ADLs_train.head())\n",
        "print(len(df_only_ADLs_train))\n",
        "\n",
        "print(\"---------------------------\")\n"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "-------TOTAL--------------------\n",
            "1798\n",
            "-------train_split--------------------\n",
            "1258\n",
            "1258\n",
            "-------VAL_split--------------------\n",
            "359\n",
            "359\n",
            "-------Test_split--------------------\n",
            "181\n",
            "181\n",
            "-------SUMA de_splits--------------------\n",
            "1798\n",
            "-------HEADS: train + val + test --------------------\n",
            "                     0\n",
            "972   D15_SA05_R02.txt\n",
            "300   D17_SA14_R02.txt\n",
            "1608  D17_SA13_R03.txt\n",
            "732   D14_SE01_R01.txt\n",
            "1561  D09_SA19_R04.txt\n",
            "                     0\n",
            "196   D13_SA12_R03.txt\n",
            "730   D17_SA12_R03.txt\n",
            "1115  D15_SA14_R05.txt\n",
            "1472  D18_SA04_R05.txt\n",
            "1189  D09_SA15_R05.txt\n",
            "                     0\n",
            "1247  D05_SA13_R03.txt\n",
            "367   D16_SA20_R01.txt\n",
            "365   D05_SE07_R03.txt\n",
            "58    D12_SA05_R05.txt\n",
            "1423  D09_SE01_R02.txt\n",
            "---------------------------\n",
            "-------% train  --------------------\n",
            "0.699666295884316\n",
            "-------% val  --------------------\n",
            "0.19966629588431592\n",
            "-------% test  --------------------\n",
            "0.10066740823136819\n",
            "---------------------------\n",
            "---------------------------\n",
            "     0_Win_ID Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  \\\n",
            "656         0      D04      SA        D  D04_SA15_R01.txt       1.976798   \n",
            "657         1      D04      SA        D  D04_SA15_R01.txt       1.794956   \n",
            "658         2      D04      SA        D  D04_SA15_R01.txt       1.822103   \n",
            "659         3      D04      SA        D  D04_SA15_R01.txt       2.207989   \n",
            "660         4      D04      SA        D  D04_SA15_R01.txt       2.853556   \n",
            "\n",
            "     max_S1_X  mean_S1_X  min_S1_X  range_S1_X    ...     range_S1_N_VER  \\\n",
            "656       602    -14.525      -495        1097    ...            2.32987   \n",
            "657       602    -10.275      -495        1097    ...            2.32987   \n",
            "658       602    -16.935      -495        1097    ...            2.32987   \n",
            "659       602    -23.910      -495        1097    ...            2.32987   \n",
            "660       602    -35.175      -495        1097    ...            2.32987   \n",
            "\n",
            "     skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV   corr_NH  \\\n",
            "656           0.840731      0.531191      0.282164  0.761325  0.989170   \n",
            "657           0.782319      0.517529      0.267836  0.770340  0.989002   \n",
            "658           0.797434      0.515261      0.265494  0.758612  0.989219   \n",
            "659           0.807469      0.521031      0.271474  0.769937  0.989886   \n",
            "660           0.902939      0.513197      0.263371  0.784914  0.990043   \n",
            "\n",
            "      corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "656  0.812910  0.074325 -0.261369 -0.528707  \n",
            "657  0.821536  0.063637 -0.240133 -0.550371  \n",
            "658  0.810495  0.030608 -0.224860 -0.555241  \n",
            "659  0.818470 -0.023804 -0.196976 -0.534093  \n",
            "660  0.831014 -0.053679 -0.115764 -0.567833  \n",
            "\n",
            "[5 rows x 59 columns]\n",
            "7421\n",
            "     0_Win_ID Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  \\\n",
            "451         0      D09      SA        D  D09_SA16_R02.txt      -0.501160   \n",
            "452         1      D09      SA        D  D09_SA16_R02.txt      -0.321327   \n",
            "453         2      D09      SA        D  D09_SA16_R02.txt      -0.200045   \n",
            "454         3      D09      SA        D  D09_SA16_R02.txt      -0.303500   \n",
            "455         4      D09      SA        D  D09_SA16_R02.txt      -0.452534   \n",
            "\n",
            "     max_S1_X  mean_S1_X  min_S1_X  range_S1_X    ...     range_S1_N_VER  \\\n",
            "451        24       8.57       -11          35    ...           0.581810   \n",
            "452        24       8.06       -13          37    ...           0.594688   \n",
            "453        24       7.72       -14          38    ...           0.586584   \n",
            "454        24       7.90       -14          38    ...           0.641894   \n",
            "455        24       7.67       -14          38    ...           0.641894   \n",
            "\n",
            "     skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV   corr_NH  \\\n",
            "451           0.385679      0.185175      0.034290  0.312680  0.999821   \n",
            "452           0.246218      0.191359      0.036618  0.499402  0.999834   \n",
            "453           0.104850      0.192247      0.036959  0.618865  0.999832   \n",
            "454          -0.010624      0.196151      0.038475  0.697673  0.999830   \n",
            "455          -0.169098      0.194311      0.037757  0.772289  0.999843   \n",
            "\n",
            "      corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "451  0.316069  0.013156 -0.159564 -0.740315  \n",
            "452  0.504183  0.093951 -0.190170 -0.712571  \n",
            "453  0.623152  0.044874 -0.128653 -0.703720  \n",
            "454  0.701761  0.109529 -0.191202 -0.706882  \n",
            "455  0.775232  0.080999 -0.137352 -0.671835  \n",
            "\n",
            "[5 rows x 59 columns]\n",
            "14719\n",
            "   0_Win_ID Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  \\\n",
            "0         0      D08      SA        D  D08_SA06_R02.txt       6.620007   \n",
            "1         1      D08      SA        D  D08_SA06_R02.txt       6.485798   \n",
            "2         2      D08      SA        D  D08_SA06_R02.txt       6.611025   \n",
            "3         3      D08      SA        D  D08_SA06_R02.txt       6.466465   \n",
            "4         4      D08      SA        D  D08_SA06_R02.txt       6.697783   \n",
            "\n",
            "   max_S1_X  mean_S1_X  min_S1_X  range_S1_X    ...     range_S1_N_VER  \\\n",
            "0        20      6.900         0          20    ...           0.136842   \n",
            "1        20      6.910         0          20    ...           0.136842   \n",
            "2        20      6.920         0          20    ...           0.136842   \n",
            "3        20      6.925         0          20    ...           0.136842   \n",
            "4        20      6.840         0          20    ...           0.136842   \n",
            "\n",
            "   skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV   corr_NH  \\\n",
            "0           0.710918      0.015923      0.000254 -0.252352  0.999241   \n",
            "1           0.595432      0.015770      0.000249 -0.236370  0.999225   \n",
            "2           0.453075      0.016360      0.000268 -0.270590  0.999280   \n",
            "3           0.372169      0.017480      0.000306 -0.175740  0.999289   \n",
            "4           0.316928      0.018029      0.000325 -0.124505  0.999225   \n",
            "\n",
            "    corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "0 -0.239644  0.117341 -0.116463 -0.386754  \n",
            "1 -0.223211  0.120348 -0.124524 -0.371086  \n",
            "2 -0.257421  0.087563 -0.147341 -0.420663  \n",
            "3 -0.164122  0.088369 -0.107984 -0.345380  \n",
            "4 -0.113434  0.199635 -0.101410 -0.293257  \n",
            "\n",
            "[5 rows x 59 columns]\n",
            "51742\n",
            "---------------------------\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "3jJrDwJeLGwj",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Concat ADL+ FALLs data for Test Set"
      ]
    },
    {
      "metadata": {
        "id": "gl2R07zneomw",
        "colab_type": "code",
        "outputId": "0e4d4608-b601-4fbe-9a01-b4ba996d03de",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 1054
        }
      },
      "cell_type": "code",
      "source": [
        "# Prepare dataset with Test examplars\n",
        "\n",
        "frames = [df_only_Falls_test, df_only_ADLs_test]\n",
        "df_ADL_Falls_test = pd.concat(frames)\n",
        "print(\"Test ADLs: \"+ str(len(df_only_ADLs_test)))\n",
        "print(\"Test Falls: \"+ str(len(df_only_Falls_test)))\n",
        "print(\"Test ALL: \"+ str(len(df_ADL_Falls_test)))\n",
        "\n",
        "print(df_ADL_Falls_test.head())\n",
        "print(df_ADL_Falls_test.tail())\n"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Test ADLs: 7421\n",
            "Test Falls: 7421\n",
            "Test ALL: 14842\n",
            "       0_Win_ID Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  \\\n",
            "74210         0      F01      SA        F  F01_SA02_R04.txt       1.372828   \n",
            "74211         1      F01      SA        F  F01_SA02_R04.txt       0.236132   \n",
            "74212         2      F01      SA        F  F01_SA02_R04.txt       0.082957   \n",
            "74213         3      F01      SA        F  F01_SA02_R04.txt       0.036610   \n",
            "74214         4      F01      SA        F  F01_SA02_R04.txt      -0.081237   \n",
            "\n",
            "       max_S1_X  mean_S1_X  min_S1_X  range_S1_X    ...     range_S1_N_VER  \\\n",
            "74210       113     14.660      -109         222    ...           0.549762   \n",
            "74211       113     18.515       -81         194    ...           0.549762   \n",
            "74212       113     17.510       -35         148    ...           0.549762   \n",
            "74213       113     14.995       -44         157    ...           0.549762   \n",
            "74214       113     13.875       -44         157    ...           0.549762   \n",
            "\n",
            "       skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV   corr_NH  \\\n",
            "74210           0.613749      0.140211      0.019659  0.374884  0.996669   \n",
            "74211           0.812078      0.131952      0.017411  0.287184  0.996819   \n",
            "74212           0.930072      0.131743      0.017356  0.300993  0.996938   \n",
            "74213           1.004197      0.130710      0.017085  0.324425  0.996974   \n",
            "74214           1.037387      0.127712      0.016310  0.368552  0.997010   \n",
            "\n",
            "        corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "74210  0.431172  0.100111  0.000433 -0.101398  \n",
            "74211  0.343333 -0.222682  0.127680 -0.028122  \n",
            "74212  0.355332 -0.313782  0.120176 -0.060068  \n",
            "74213  0.377679 -0.313504  0.137816 -0.023554  \n",
            "74214  0.419866 -0.325779  0.159143 -0.034850  \n",
            "\n",
            "[5 rows x 59 columns]\n",
            "       0_Win_ID Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  \\\n",
            "73549        36      D10      SE        D  D10_SE11_R04.txt       0.426333   \n",
            "73550        37      D10      SE        D  D10_SE11_R04.txt       1.048984   \n",
            "73551        38      D10      SE        D  D10_SE11_R04.txt       1.696184   \n",
            "73552        39      D10      SE        D  D10_SE11_R04.txt       2.870866   \n",
            "73553        40      D10      SE        D  D10_SE11_R04.txt       2.702809   \n",
            "\n",
            "       max_S1_X  mean_S1_X  min_S1_X  range_S1_X    ...     range_S1_N_VER  \\\n",
            "73549        25     -7.990       -46          71    ...           0.745954   \n",
            "73550        25     -7.090       -46          71    ...           0.745954   \n",
            "73551        25     -6.470       -46          71    ...           0.745954   \n",
            "73552        25     -4.915       -45          70    ...           0.745954   \n",
            "73553        25     -4.120       -34          59    ...           0.745954   \n",
            "\n",
            "       skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV   corr_NH  \\\n",
            "73549           0.949921      0.104893      0.011002  0.457772  0.999941   \n",
            "73550           1.199228      0.098327      0.009668  0.446256  0.999944   \n",
            "73551           1.395630      0.096099      0.009235  0.442326  0.999950   \n",
            "73552           1.475837      0.095552      0.009130  0.439763  0.999968   \n",
            "73553           1.532257      0.093481      0.008739  0.596511  0.999966   \n",
            "\n",
            "        corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "73549  0.458179 -0.052097  0.469280  0.282204  \n",
            "73550  0.446181 -0.078424  0.439495  0.274793  \n",
            "73551  0.442016 -0.097364  0.465092  0.279252  \n",
            "73552  0.439022 -0.156866  0.267743  0.290371  \n",
            "73553  0.596228 -0.316924  0.222387  0.430219  \n",
            "\n",
            "[5 rows x 59 columns]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "ovTY8Ba6LQ6w",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Concat ADL+ FALLs data for VALIDATION Set"
      ]
    },
    {
      "metadata": {
        "id": "pWfSH0rYeoni",
        "colab_type": "code",
        "outputId": "71e2471c-84da-4fe9-ae11-315338b1e5d3",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 1054
        }
      },
      "cell_type": "code",
      "source": [
        "# Prepare dataset with VALIDATION examplars\n",
        "\n",
        "frames = [df_only_Falls_val, df_only_ADLs_val]\n",
        "df_ADL_Falls_val = pd.concat(frames)\n",
        "print(\"VAL ADLs: \"+ str(len(df_only_ADLs_val)))\n",
        "print(\"VAL Falls: \"+ str(len(df_only_Falls_val)))\n",
        "print(\"VAL ALL: \"+ str(len(df_ADL_Falls_val)))\n",
        "\n",
        "print(df_ADL_Falls_val.head())\n",
        "print(df_ADL_Falls_val.tail())\n"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "VAL ADLs: 14719\n",
            "VAL Falls: 14719\n",
            "VAL ALL: 29438\n",
            "       0_Win_ID Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  \\\n",
            "73882         0      F01      SA        F  F01_SA01_R01.txt       1.171828   \n",
            "73883         1      F01      SA        F  F01_SA01_R01.txt       1.165338   \n",
            "73884         2      F01      SA        F  F01_SA01_R01.txt       1.252298   \n",
            "73885         3      F01      SA        F  F01_SA01_R01.txt       1.573220   \n",
            "73886         4      F01      SA        F  F01_SA01_R01.txt       2.841073   \n",
            "\n",
            "       max_S1_X  mean_S1_X  min_S1_X  range_S1_X    ...     range_S1_N_VER  \\\n",
            "73882        85     -2.090      -114         199    ...           0.554633   \n",
            "73883        85     -2.670      -114         199    ...           0.554633   \n",
            "73884        85     -2.095      -114         199    ...           0.554633   \n",
            "73885        85      0.385      -114         199    ...           0.554633   \n",
            "73886        85      4.115      -114         199    ...           0.554633   \n",
            "\n",
            "       skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV   corr_NH  \\\n",
            "73882           0.446500      0.137164      0.018814  0.301081  0.996504   \n",
            "73883           0.413361      0.138937      0.019304  0.325666  0.996664   \n",
            "73884           0.412581      0.140126      0.019635  0.349794  0.996720   \n",
            "73885           0.584479      0.138831      0.019274  0.392907  0.996872   \n",
            "73886           0.730946      0.138403      0.019156  0.367269  0.996289   \n",
            "\n",
            "        corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "73882  0.335763  0.263937 -0.062947  0.245159  \n",
            "73883  0.359071  0.251497 -0.069058  0.238255  \n",
            "73884  0.382485  0.260917 -0.059155  0.248694  \n",
            "73885  0.425018  0.300276  0.016490  0.236701  \n",
            "73886  0.403227  0.146171 -0.003278  0.237217  \n",
            "\n",
            "[5 rows x 59 columns]\n",
            "       0_Win_ID Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  \\\n",
            "73754        36      D10      SA        D  D10_SA14_R04.txt       1.262008   \n",
            "73755        37      D10      SA        D  D10_SA14_R04.txt       1.300664   \n",
            "73756        38      D10      SA        D  D10_SA14_R04.txt       1.833354   \n",
            "73757        39      D10      SA        D  D10_SA14_R04.txt       2.259722   \n",
            "73758        40      D10      SA        D  D10_SA14_R04.txt       2.880143   \n",
            "\n",
            "       max_S1_X  mean_S1_X  min_S1_X  range_S1_X    ...     range_S1_N_VER  \\\n",
            "73754        70     10.515       -42         112    ...           0.360769   \n",
            "73755        52      9.205       -42          94    ...           0.271889   \n",
            "73756        52      8.005       -42          94    ...           0.271889   \n",
            "73757        52      7.050       -42          94    ...           0.271889   \n",
            "73758        31      5.750       -42          73    ...           0.271889   \n",
            "\n",
            "       skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV   corr_NH  \\\n",
            "73754          -0.267618      0.058477      0.003420 -0.148061  0.999903   \n",
            "73755          -0.827335      0.052459      0.002752 -0.250835  0.999934   \n",
            "73756          -0.893452      0.050944      0.002595 -0.301442  0.999936   \n",
            "73757          -1.085498      0.049826      0.002483 -0.243876  0.999924   \n",
            "73758          -1.248398      0.048019      0.002306 -0.127822  0.999929   \n",
            "\n",
            "        corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "73754 -0.143331 -0.455102 -0.542373  0.774255  \n",
            "73755 -0.250181 -0.403609 -0.450076  0.751024  \n",
            "73756 -0.302145 -0.319018 -0.323983  0.712290  \n",
            "73757 -0.244635 -0.243005 -0.234489  0.634301  \n",
            "73758 -0.128748 -0.080378 -0.021972  0.470435  \n",
            "\n",
            "[5 rows x 59 columns]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "CRFJgbUyLU7d",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Concat ADL+ FALLs data for TRAINING Set"
      ]
    },
    {
      "metadata": {
        "id": "hj9T1YLNeoof",
        "colab_type": "code",
        "outputId": "54208aa3-91df-4f01-b636-90da16ab047c",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 1054
        }
      },
      "cell_type": "code",
      "source": [
        "# Prepare dataset with Train examplars\n",
        "\n",
        "frames = [df_only_Falls_train, df_only_ADLs_train]\n",
        "df_ADL_Falls_train = pd.concat(frames)\n",
        "print(\"train ADLs: \"+ str(len(df_only_ADLs_train)))\n",
        "print(\"train Falls: \"+ str(len(df_only_Falls_train)))\n",
        "print(\"train ALL: \"+ str(len(df_ADL_Falls_train)))\n",
        "\n",
        "print(df_ADL_Falls_train.head())\n",
        "print(df_ADL_Falls_train.tail())\n"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "train ADLs: 51742\n",
            "train Falls: 51578\n",
            "train ALL: 103320\n",
            "       0_Win_ID Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  \\\n",
            "74005         0      F01      SA        F  F01_SA01_R04.txt       2.378725   \n",
            "74006         1      F01      SA        F  F01_SA01_R04.txt       1.758380   \n",
            "74007         2      F01      SA        F  F01_SA01_R04.txt       1.259635   \n",
            "74008         3      F01      SA        F  F01_SA01_R04.txt       1.228117   \n",
            "74009         4      F01      SA        F  F01_SA01_R04.txt       1.033344   \n",
            "\n",
            "       max_S1_X  mean_S1_X  min_S1_X  range_S1_X    ...     range_S1_N_VER  \\\n",
            "74005       114     12.435      -181         295    ...           1.137148   \n",
            "74006       114     10.215      -181         295    ...           1.137148   \n",
            "74007       114     14.545      -181         295    ...           1.137148   \n",
            "74008       114     17.245      -181         295    ...           1.105898   \n",
            "74009       114     16.290      -181         295    ...           1.124796   \n",
            "\n",
            "       skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV   corr_NH  \\\n",
            "74005           2.095565      0.187731      0.035243  0.490460  0.995227   \n",
            "74006           1.964441      0.188469      0.035521  0.478774  0.994868   \n",
            "74007           1.820719      0.187108      0.035009  0.414585  0.992573   \n",
            "74008           2.009225      0.179511      0.032224  0.414591  0.992878   \n",
            "74009           1.910983      0.182354      0.033253  0.436105  0.993287   \n",
            "\n",
            "        corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "74005  0.553179  0.047076  0.530257  0.288389  \n",
            "74006  0.543768  0.022320  0.534386  0.291252  \n",
            "74007  0.496643  0.088437  0.466868  0.261847  \n",
            "74008  0.494766  0.119705  0.479750  0.256974  \n",
            "74009  0.512598  0.093130  0.479493  0.279663  \n",
            "\n",
            "[5 rows x 59 columns]\n",
            "       0_Win_ID Act_Type Age_Cat Fall_ADL              File  kurtosis_S1_X  \\\n",
            "73877        36      D14      SA        D  D14_SA17_R04.txt      -0.210534   \n",
            "73878        37      D14      SA        D  D14_SA17_R04.txt      -0.447829   \n",
            "73879        38      D14      SA        D  D14_SA17_R04.txt      -0.284214   \n",
            "73880        39      D14      SA        D  D14_SA17_R04.txt      -0.346115   \n",
            "73881        40      D14      SA        D  D14_SA17_R04.txt      -0.435528   \n",
            "\n",
            "       max_S1_X  mean_S1_X  min_S1_X  range_S1_X    ...     range_S1_N_VER  \\\n",
            "73877       347    184.245        42         305    ...           1.657134   \n",
            "73878       347    190.090        42         305    ...           1.657134   \n",
            "73879       347    198.075        42         305    ...           1.657134   \n",
            "73880       347    207.485        49         298    ...           1.657134   \n",
            "73881       336    208.140        49         287    ...           1.657134   \n",
            "\n",
            "       skewness_S1_N_VER  std_S1_N_VER  var_S1_N_VER   corr_HV   corr_NH  \\\n",
            "73877           1.817618      0.248727      0.061865  0.634564  0.635461   \n",
            "73878           1.676049      0.248163      0.061585  0.584440  0.581614   \n",
            "73879           2.034859      0.232242      0.053936  0.558561  0.556817   \n",
            "73880           2.236304      0.222970      0.049716  0.570966  0.569200   \n",
            "73881           2.236037      0.182395      0.033268  0.406767  0.404551   \n",
            "\n",
            "        corr_NV   corr_XY   corr_XZ   corr_YZ  \n",
            "73877  0.999159  0.206554 -0.088792 -0.079303  \n",
            "73878  0.999086  0.319084 -0.174252 -0.187629  \n",
            "73879  0.998980  0.391342 -0.269796 -0.251746  \n",
            "73880  0.998893  0.421665 -0.312372 -0.302835  \n",
            "73881  0.998343  0.488371 -0.518035 -0.317396  \n",
            "\n",
            "[5 rows x 59 columns]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "FQxepAMiL_R1",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "## 6.3 Define and train the K-Neighbors Classifier"
      ]
    },
    {
      "metadata": {
        "id": "f1l-kg8heoqI",
        "colab_type": "code",
        "outputId": "904114d9-f174-430f-8850-74fc9774fe89",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "import numpy as np\n",
        "\n",
        "# The columns that we will be making predictions with.\n",
        "x_columns = ['kurtosis_S1_X','max_S1_X','mean_S1_X','min_S1_X','range_S1_X','skewness_S1_X','std_S1_X','var_S1_X',\n",
        "             'kurtosis_S1_Y','max_S1_Y','mean_S1_Y','min_S1_Y','range_S1_Y','skewness_S1_Y','std_S1_Y','var_S1_Y',\n",
        "             'kurtosis_S1_Z','max_S1_Z','mean_S1_Z','min_S1_Z','range_S1_Z','skewness_S1_Z','std_S1_Z','var_S1_Z',\n",
        "             'kurtosis_S1_N_XYZ','max_S1_N_XYZ','mean_S1_N_XYZ','min_S1_N_XYZ','range_S1_N_XYZ','skewness_S1_N_XYZ','std_S1_N_XYZ','var_S1_N_XYZ',\n",
        "             'kurtosis_S1_N_HOR','max_S1_N_HOR','mean_S1_N_HOR','min_S1_N_HOR','range_S1_N_HOR','skewness_S1_N_HOR','std_S1_N_HOR','var_S1_N_HOR',\n",
        "             'kurtosis_S1_N_VER','max_S1_N_VER','mean_S1_N_VER','min_S1_N_VER','range_S1_N_VER','skewness_S1_N_VER','std_S1_N_VER','var_S1_N_VER',\n",
        "             'corr_HV','corr_NH','corr_NV','corr_XY','corr_XZ','corr_YZ']\n",
        "# The column that we want to predict.\n",
        "y_column = [\"Fall_ADL\"]\n",
        "\n",
        "from sklearn.neighbors import KNeighborsClassifier\n",
        "# Create the knn model.\n",
        "# Look at the five closest neighbors.\n",
        "knn = KNeighborsClassifier(n_neighbors=5)\n",
        "# Fit the model on the training data.\n",
        "y = df_ADL_Falls_train.loc[:,['Fall_ADL']]\n",
        "train_y = np.array(y)\n",
        "knn.fit(df_ADL_Falls_train[x_columns], train_y.ravel())\n",
        "\n",
        "# Make point predictions on the test set using the fit model.\n",
        "predictions = knn.predict(df_ADL_Falls_val[x_columns])\n",
        "\n",
        "print(\"Performance on VALIDATION set:\")\n",
        "print(predictions)\n",
        "print(knn.score(df_ADL_Falls_val[x_columns], df_ADL_Falls_val[y_column]))\n",
        "\n",
        "# Make point predictions on the TRAINING set using the fit model.\n",
        "predictions = knn.predict(df_ADL_Falls_train[x_columns])\n",
        "\n",
        "print(\"Performance on TRAINING set:\")\n",
        "print(predictions)\n",
        "print(knn.score(df_ADL_Falls_train[x_columns], df_ADL_Falls_train[y_column]))\n",
        "\n",
        "# Make point predictions on the TEST set using the fit model.\n",
        "predictions = knn.predict(df_ADL_Falls_test[x_columns])\n",
        "\n",
        "print(\"Performance on TEST set:\")\n",
        "print(predictions)\n",
        "print(knn.score(df_ADL_Falls_test[x_columns], df_ADL_Falls_test[y_column]))\n",
        "\n"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Performance on VALIDATION set:\n",
            "['F' 'F' 'F' ... 'D' 'D' 'D']\n",
            "0.7702629254704804\n",
            "Performance on TRAINING set:\n",
            "['F' 'F' 'F' ... 'D' 'D' 'D']\n",
            "0.9074912891986062\n",
            "Performance on TEST set:\n",
            "['D' 'F' 'F' ... 'D' 'F' 'F']\n",
            "0.7910658940843552\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "JI0KyrCSMH7O",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Confusion matrix for KNN Classifier"
      ]
    },
    {
      "metadata": {
        "id": "ZNnzqeRzeoq7",
        "colab_type": "code",
        "outputId": "9a923ca4-ddbb-4a24-e693-d6149f173b35",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "from sklearn.metrics import confusion_matrix\n",
        "cm = confusion_matrix(df_ADL_Falls_test[y_column], predictions, labels=[\"D\", \"F\"])\n",
        "print(\"Confusion Matrix:\")\n",
        "print(\"-----------------\")\n",
        "print(cm)\n",
        "print(\"-----------------\")\n",
        "cm_norm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n",
        "print(\"Confusion Matrix (Normalized):\")\n",
        "print(\"-----------------------------\")\n",
        "print(cm_norm)\n",
        "print(\"-----------------------------\")"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Confusion Matrix:\n",
            "-----------------\n",
            "[[5988 1433]\n",
            " [1668 5753]]\n",
            "-----------------\n",
            "Confusion Matrix (Normalized):\n",
            "-----------------------------\n",
            "[[0.80689934 0.19310066]\n",
            " [0.22476755 0.77523245]]\n",
            "-----------------------------\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "WCFfmXlBMOf3",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Performance measurements for KNN Classifier"
      ]
    },
    {
      "metadata": {
        "id": "q7w56Weveorq",
        "colab_type": "code",
        "outputId": "6e81f4a4-ea19-4039-c5fb-cef7e1255ab8",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "# calculations of measurements of performance\n",
        "\n",
        "knn_TP = cm[1,1]\n",
        "knn_FP = cm[1,0]\n",
        "knn_TN = cm[0,0]\n",
        "knn_FN = cm[0,1]\n",
        "\n",
        "# SENSITIVITY = TP / (TP + FN)\n",
        "knn_Sensitivity = knn_TP / (knn_TP + knn_FN)\n",
        "print(\"knn_Sensitivity = \"+ str(knn_Sensitivity))\n",
        "\n",
        "# SPECIFICITY = TN / (FP + TN)\n",
        "knn_Specificity = knn_TN / (knn_FP + knn_TN)\n",
        "print(\"knn_Specificity = \"+ str(knn_Specificity))\n",
        "\n",
        "# Precision = TP / (TP + FP)\n",
        "knn_Precision = knn_TP / (knn_TP + knn_FP)\n",
        "print(\"knn_Precision = \"+ str(knn_Precision))\n",
        "\n",
        "# Accuracy = (TP + TN) / (TP + FP + TN + FN)\n",
        "knn_Accuracy = (knn_TP + knn_TN) / (knn_TP + knn_FP + knn_TN + knn_FN)\n",
        "print(\"knn_Accuracy = \"+ str(knn_Accuracy))"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "knn_Sensitivity = 0.8005844698023935\n",
            "knn_Specificity = 0.7821316614420063\n",
            "knn_Precision = 0.7752324484570813\n",
            "knn_Accuracy = 0.7910658940843552\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "Zmto4zTUMZwn",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "## 6.4 Another KNN Model with adjusted parameters"
      ]
    },
    {
      "metadata": {
        "id": "5a2QvVsBeosd",
        "colab_type": "code",
        "outputId": "ca0be370-27b3-44c7-fc6e-903cc6ac7235",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "\"\"\"\n",
        "KNN MODEL WITH K = 15\n",
        "\n",
        "\"\"\"\n",
        "\n",
        "# The columns that we will be making predictions with.\n",
        "x_columns = ['kurtosis_S1_X','max_S1_X','mean_S1_X','min_S1_X','range_S1_X','skewness_S1_X','std_S1_X','var_S1_X',\n",
        "             'kurtosis_S1_Y','max_S1_Y','mean_S1_Y','min_S1_Y','range_S1_Y','skewness_S1_Y','std_S1_Y','var_S1_Y',\n",
        "             'kurtosis_S1_Z','max_S1_Z','mean_S1_Z','min_S1_Z','range_S1_Z','skewness_S1_Z','std_S1_Z','var_S1_Z',\n",
        "             'kurtosis_S1_N_XYZ','max_S1_N_XYZ','mean_S1_N_XYZ','min_S1_N_XYZ','range_S1_N_XYZ','skewness_S1_N_XYZ','std_S1_N_XYZ','var_S1_N_XYZ',\n",
        "             'kurtosis_S1_N_HOR','max_S1_N_HOR','mean_S1_N_HOR','min_S1_N_HOR','range_S1_N_HOR','skewness_S1_N_HOR','std_S1_N_HOR','var_S1_N_HOR',\n",
        "             'kurtosis_S1_N_VER','max_S1_N_VER','mean_S1_N_VER','min_S1_N_VER','range_S1_N_VER','skewness_S1_N_VER','std_S1_N_VER','var_S1_N_VER',\n",
        "             'corr_HV','corr_NH','corr_NV','corr_XY','corr_XZ','corr_YZ']\n",
        "# The column that we want to predict.\n",
        "y_column = [\"Fall_ADL\"]\n",
        "\n",
        "from sklearn.neighbors import KNeighborsClassifier\n",
        "# Create the knn model.\n",
        "# Look at the five closest neighbors.\n",
        "knn = KNeighborsClassifier(n_neighbors=15)\n",
        "# Fit the model on the training data.\n",
        "y = df_ADL_Falls_train.loc[:,['Fall_ADL']]\n",
        "train_y = np.array(y)\n",
        "knn.fit(df_ADL_Falls_train[x_columns], train_y.ravel())\n"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "KNeighborsClassifier(algorithm='auto', leaf_size=30, metric='minkowski',\n",
              "           metric_params=None, n_jobs=1, n_neighbors=15, p=2,\n",
              "           weights='uniform')"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 65
        }
      ]
    },
    {
      "metadata": {
        "id": "kGdDqH1XMjXS",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Confusion matrix and performance measurements on TRAINING SET"
      ]
    },
    {
      "metadata": {
        "id": "Z4MQ6iIneotU",
        "colab_type": "code",
        "outputId": "b08bc3f3-fd62-4a88-85d4-996c00ecab60",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "# Make point predictions on the TRAINING set using the fit model.\n",
        "predictions = knn.predict(df_ADL_Falls_train[x_columns])\n",
        "\n",
        "print(\"KNN Performance on TRAINING set:\")\n",
        "print(\"-----------------------------\")\n",
        "print(predictions)\n",
        "print(knn.score(df_ADL_Falls_train[x_columns], df_ADL_Falls_train[y_column]))\n",
        "\n",
        "from sklearn.metrics import confusion_matrix\n",
        "\n",
        "cm = confusion_matrix(df_ADL_Falls_train[y_column], predictions, labels=[\"D\", \"F\"])\n",
        "print(\"Confusion Matrix:\")\n",
        "print(\"-----------------\")\n",
        "print(cm)\n",
        "print(\"-----------------\")\n",
        "cm_norm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n",
        "print(\"Confusion Matrix (Normalized):\")\n",
        "print(\"-----------------------------\")\n",
        "print(cm_norm)\n",
        "print(\"-----------------------------\")\n",
        "print(\"-----------------------------\")\n",
        "# calculations of measurements of performance\n",
        "\n",
        "knn_TP = cm[1,1]\n",
        "knn_FP = cm[1,0]\n",
        "knn_TN = cm[0,0]\n",
        "knn_FN = cm[0,1]\n",
        "\n",
        "# SENSITIVITY = TP / (TP + FN)\n",
        "knn_Sensitivity = knn_TP / (knn_TP + knn_FN)\n",
        "print(\"knn_Sensitivity = \"+ str(knn_Sensitivity))\n",
        "\n",
        "# SPECIFICITY = TN / (FP + TN)\n",
        "knn_Specificity = knn_TN / (knn_FP + knn_TN)\n",
        "print(\"knn_Specificity = \"+ str(knn_Specificity))\n",
        "\n",
        "# Precision = TP / (TP + FP)\n",
        "knn_Precision = knn_TP / (knn_TP + knn_FP)\n",
        "print(\"knn_Precision = \"+ str(knn_Precision))\n",
        "\n",
        "# Accuracy = (TP + TN) / (TP + FP + TN + FN)\n",
        "knn_Accuracy = (knn_TP + knn_TN) / (knn_TP + knn_FP + knn_TN + knn_FN)\n",
        "print(\"knn_Accuracy = \"+ str(knn_Accuracy))"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Performance on TRAINING set:\n",
            "-----------------------------\n",
            "['F' 'F' 'F' ... 'D' 'D' 'D']\n",
            "0.8608207510646535\n",
            "Confusion Matrix:\n",
            "-----------------\n",
            "[[46175  5567]\n",
            " [ 8813 42765]]\n",
            "-----------------\n",
            "Confusion Matrix (Normalized):\n",
            "-----------------------------\n",
            "[[0.89240849 0.10759151]\n",
            " [0.17086742 0.82913258]]\n",
            "-----------------------------\n",
            "-----------------------------\n",
            "knn_Sensitivity = 0.8848175122072333\n",
            "knn_Specificity = 0.8397286680730341\n",
            "knn_Precision = 0.8291325759044554\n",
            "knn_Accuracy = 0.8608207510646535\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "-5jaZA23Muua",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Confusion matrix and performance measurements on VALIDATION SET"
      ]
    },
    {
      "metadata": {
        "id": "gieeuvVQeqN0",
        "colab_type": "code",
        "outputId": "13112c57-0e1b-4aca-b0a0-6ceccd92e4c9",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "# Make point predictions on the VALIDATION set using the fit model.\n",
        "predictions = knn.predict(df_ADL_Falls_val[x_columns])\n",
        "\n",
        "print(\"KNN Performance on VALIDATION set:\")\n",
        "print(\"-----------------------------\")\n",
        "print(predictions)\n",
        "print(knn.score(df_ADL_Falls_val[x_columns], df_ADL_Falls_val[y_column]))\n",
        "\n",
        "from sklearn.metrics import confusion_matrix\n",
        "\n",
        "cm = confusion_matrix(df_ADL_Falls_val[y_column], predictions, labels=[\"D\", \"F\"])\n",
        "print(\"Confusion Matrix:\")\n",
        "print(\"-----------------\")\n",
        "print(cm)\n",
        "print(\"-----------------\")\n",
        "cm_norm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n",
        "print(\"Confusion Matrix (Normalized):\")\n",
        "print(\"-----------------------------\")\n",
        "print(cm_norm)\n",
        "print(\"-----------------------------\")\n",
        "print(\"-----------------------------\")\n",
        "# calculations of measurements of performance\n",
        "\n",
        "knn_TP = cm[1,1]\n",
        "knn_FP = cm[1,0]\n",
        "knn_TN = cm[0,0]\n",
        "knn_FN = cm[0,1]\n",
        "\n",
        "# SENSITIVITY = TP / (TP + FN)\n",
        "knn_Sensitivity = knn_TP / (knn_TP + knn_FN)\n",
        "print(\"knn_Sensitivity = \"+ str(knn_Sensitivity))\n",
        "\n",
        "# SPECIFICITY = TN / (FP + TN)\n",
        "knn_Specificity = knn_TN / (knn_FP + knn_TN)\n",
        "print(\"knn_Specificity = \"+ str(knn_Specificity))\n",
        "\n",
        "# Precision = TP / (TP + FP)\n",
        "knn_Precision = knn_TP / (knn_TP + knn_FP)\n",
        "print(\"knn_Precision = \"+ str(knn_Precision))\n",
        "\n",
        "# Accuracy = (TP + TN) / (TP + FP + TN + FN)\n",
        "knn_Accuracy = (knn_TP + knn_TN) / (knn_TP + knn_FP + knn_TN + knn_FN)\n",
        "print(\"knn_Accuracy = \"+ str(knn_Accuracy))"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Performance on VALIDATION set:\n",
            "-----------------------------\n",
            "['F' 'F' 'F' ... 'D' 'D' 'D']\n",
            "0.776649228887832\n",
            "Confusion Matrix:\n",
            "-----------------\n",
            "[[11669  3050]\n",
            " [ 3525 11194]]\n",
            "-----------------\n",
            "Confusion Matrix (Normalized):\n",
            "-----------------------------\n",
            "[[0.79278484 0.20721516]\n",
            " [0.23948638 0.76051362]]\n",
            "-----------------------------\n",
            "-----------------------------\n",
            "knn_Sensitivity = 0.7858747542825049\n",
            "knn_Specificity = 0.7680005265236277\n",
            "knn_Precision = 0.7605136218493104\n",
            "knn_Accuracy = 0.776649228887832\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "rOfhlvhzM2KS",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Confusion matrix and performance measurements on TEST SET"
      ]
    },
    {
      "metadata": {
        "id": "h5hJrmUIeqN9",
        "colab_type": "code",
        "outputId": "83bc2e20-fc99-4840-fee7-e0d929d1416f",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "# Make point predictions on the TEST set using the fit model.\n",
        "predictions = knn.predict(df_ADL_Falls_test[x_columns])\n",
        "\n",
        "print(\"KNN Performance on TEST set:\")\n",
        "print(\"-----------------------------\")\n",
        "print(predictions)\n",
        "print(knn.score(df_ADL_Falls_test[x_columns], df_ADL_Falls_test[y_column]))\n",
        "\n",
        "from sklearn.metrics import confusion_matrix\n",
        "\n",
        "cm = confusion_matrix(df_ADL_Falls_test[y_column], predictions, labels=[\"D\", \"F\"])\n",
        "print(\"Confusion Matrix:\")\n",
        "print(\"-----------------\")\n",
        "print(cm)\n",
        "print(\"-----------------\")\n",
        "cm_norm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n",
        "print(\"Confusion Matrix (Normalized):\")\n",
        "print(\"-----------------------------\")\n",
        "print(cm_norm)\n",
        "print(\"-----------------------------\")\n",
        "print(\"-----------------------------\")\n",
        "# calculations of measurements of performance\n",
        "\n",
        "knn_TP = cm[1,1]\n",
        "knn_FP = cm[1,0]\n",
        "knn_TN = cm[0,0]\n",
        "knn_FN = cm[0,1]\n",
        "\n",
        "# SENSITIVITY = TP / (TP + FN)\n",
        "knn_Sensitivity = knn_TP / (knn_TP + knn_FN)\n",
        "print(\"knn_Sensitivity = \"+ str(knn_Sensitivity))\n",
        "\n",
        "# SPECIFICITY = TN / (FP + TN)\n",
        "knn_Specificity = knn_TN / (knn_FP + knn_TN)\n",
        "print(\"knn_Specificity = \"+ str(knn_Specificity))\n",
        "\n",
        "# Precision = TP / (TP + FP)\n",
        "knn_Precision = knn_TP / (knn_TP + knn_FP)\n",
        "print(\"knn_Precision = \"+ str(knn_Precision))\n",
        "\n",
        "# Accuracy = (TP + TN) / (TP + FP + TN + FN)\n",
        "knn_Accuracy = (knn_TP + knn_TN) / (knn_TP + knn_FP + knn_TN + knn_FN)\n",
        "print(\"knn_Accuracy = \"+ str(knn_Accuracy))"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "KNN Performance on TEST set:\n",
            "-----------------------------\n",
            "['D' 'D' 'F' ... 'D' 'F' 'D']\n",
            "0.7864169249427301\n",
            "Confusion Matrix:\n",
            "-----------------\n",
            "[[6126 1295]\n",
            " [1875 5546]]\n",
            "-----------------\n",
            "Confusion Matrix (Normalized):\n",
            "-----------------------------\n",
            "[[0.82549522 0.17450478]\n",
            " [0.25266137 0.74733863]]\n",
            "-----------------------------\n",
            "-----------------------------\n",
            "knn_Sensitivity = 0.8107001900306973\n",
            "knn_Specificity = 0.7656542932133483\n",
            "knn_Precision = 0.7473386336073305\n",
            "knn_Accuracy = 0.7864169249427301\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "colab_type": "text",
        "id": "gvdG8GzIyQN_"
      },
      "cell_type": "markdown",
      "source": [
        "## 6.5 Define and train a Support Vector Machine (SVM)-like Classifier\n",
        "Below we use LinearSVC from sklearn.svm, experimenting with various parameter settings. "
      ]
    },
    {
      "metadata": {
        "id": "7zDb-ARCerqn",
        "colab_type": "code",
        "outputId": "81e31cf5-f6eb-4534-8d22-a7c44f0cd68d",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "# Here we use LinearSVC\n",
        "from sklearn.svm import LinearSVC\n",
        "\n",
        "# define the classifier\n",
        "clf = LinearSVC(random_state=0, tol=1e-8, dual=False)\n",
        "\n",
        "# prepare/get the columns\n",
        "X= df_ADL_Falls_train[x_columns]\n",
        "y= train_y.ravel()\n",
        "\n",
        "# Fit/train classifier\n",
        "clf.fit(X, y)\n",
        "\n",
        "print(clf.coef_)\n",
        "print(clf.intercept_)"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "[[-1.05033505e-02 -4.23368764e-04  1.76724819e-03  8.85471908e-06\n",
            "  -4.32237435e-04 -2.68149378e-03  2.38105252e-02 -7.41567816e-05\n",
            "  -4.57191831e-03  9.99616917e-04 -1.08029026e-03  5.86370072e-04\n",
            "   4.13282660e-04  2.90590822e-02 -2.52766703e-03  2.03523873e-05\n",
            "  -4.06791563e-03 -2.91022656e-04 -2.08835661e-03  3.56189983e-04\n",
            "  -6.47188459e-04  7.24611850e-02  1.24560919e-02 -3.33316071e-06\n",
            "   9.33068137e-03  4.51149369e-01 -5.70279625e+00  7.27338036e-01\n",
            "  -2.76188667e-01 -1.83816718e-01  4.96369909e+00  1.69120970e+00\n",
            "   1.58510370e-03  1.34567746e-01 -1.83309225e-01  2.45237571e-01\n",
            "  -1.10669824e-01  2.96255923e-02 -4.45931857e+00 -3.03663876e+00\n",
            "  -8.65358579e-03 -2.15839802e-01  1.00843876e+00 -5.79099244e-01\n",
            "   3.63259442e-01  4.81706869e-02 -2.19374205e+00  2.08366538e+00\n",
            "  -1.21374764e+00  2.55618706e+00  8.92331323e-01  4.80117616e-02\n",
            "  -8.00491181e-02  1.24074230e-01]]\n",
            "[1.13719496]\n",
            "['D' 'D' 'D' ... 'D' 'D' 'D']\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "colab_type": "text",
        "id": "6k4WxGf4N9JA"
      },
      "cell_type": "markdown",
      "source": [
        "### Confusion matrix and performance measurements on TRAINING SET"
      ]
    },
    {
      "metadata": {
        "id": "6dgegknMerq0",
        "colab_type": "code",
        "outputId": "9b936cf8-66e3-4234-c50e-f658e6fc7084",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "from sklearn.metrics import confusion_matrix\n",
        "\n",
        "print(\"SVC PREDICTIONS ON THE TRAINING SET:\")\n",
        "predictions_SVC = clf.predict(df_ADL_Falls_train[x_columns])\n",
        "print(predictions_SVC)\n",
        "print(\"-----------------------------\")\n",
        "\n",
        "cm = confusion_matrix(df_ADL_Falls_train[y_column], predictions_SVC, labels=[\"D\", \"F\"])\n",
        "print(\"Confusion Matrix:\")\n",
        "print(\"-----------------\")\n",
        "print(cm)\n",
        "print(\"-----------------\")\n",
        "cm_norm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n",
        "print(\"Confusion Matrix (Normalized):\")\n",
        "print(\"-----------------------------\")\n",
        "print(cm_norm)\n",
        "print(\"-----------------------------\")\n",
        "print(\"-----------------------------\")\n",
        "\n",
        "# calculations of measurements of performance\n",
        "\n",
        "n_TP = cm[1,1]\n",
        "n_FP = cm[1,0]\n",
        "n_TN = cm[0,0]\n",
        "n_FN = cm[0,1]\n",
        "\n",
        "# SENSITIVITY = TP / (TP + FN)\n",
        "svc_Sensitivity = n_TP / (n_TP + n_FN)\n",
        "print(\"svc_Sensitivity = \"+ str(svc_Sensitivity))\n",
        "\n",
        "# SPECIFICITY = TN / (FP + TN)\n",
        "svc_Specificity = n_TN / (n_FP + n_TN)\n",
        "print(\"svc_Specificity = \"+ str(svc_Specificity))\n",
        "\n",
        "# Precision = TP / (TP + FP)\n",
        "svc_Precision = n_TP / (n_TP + n_FP)\n",
        "print(\"svc_Precision = \"+ str(svc_Precision))\n",
        "\n",
        "# Accuracy = (TP + TN) / (TP + FP + TN + FN)\n",
        "svc_Accuracy = (n_TP + n_TN) / (n_TP + n_FP + n_TN + n_FN)\n",
        "print(\"svc_Accuracy = \"+ str(svc_Accuracy))"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "SVC PREDICTIONS ON THE TRAINING SET:\n",
            "['D' 'D' 'D' ... 'D' 'D' 'D']\n",
            "-----------------------------\n",
            "Confusion Matrix:\n",
            "-----------------\n",
            "[[45025  6717]\n",
            " [12446 39132]]\n",
            "-----------------\n",
            "Confusion Matrix (Normalized):\n",
            "-----------------------------\n",
            "[[0.87018283 0.12981717]\n",
            " [0.24130443 0.75869557]]\n",
            "-----------------------------\n",
            "-----------------------------\n",
            "svc_Sensitivity = 0.8534973499967284\n",
            "svc_Specificity = 0.7834386038175776\n",
            "svc_Precision = 0.7586955678777774\n",
            "svc_Accuracy = 0.8145276809910956\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "colab_type": "text",
        "id": "Oj8ocNZ5OAWw"
      },
      "cell_type": "markdown",
      "source": [
        "### Confusion matrix and performance measurements on VALIDATION SET"
      ]
    },
    {
      "metadata": {
        "id": "2en2uRuHes0t",
        "colab_type": "code",
        "outputId": "646a5c88-6a8d-4b4b-930d-6c98e107af9e",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "from sklearn.metrics import confusion_matrix\n",
        "\n",
        "print(\"SVC PREDICTIONS ON THE VALIDATION SET:\")\n",
        "predictions_SVC = clf.predict(df_ADL_Falls_val[x_columns])\n",
        "print(predictions_SVC)\n",
        "print(\"-----------------------------\")\n",
        "\n",
        "cm = confusion_matrix(df_ADL_Falls_val[y_column], predictions_SVC, labels=[\"D\", \"F\"])\n",
        "print(\"Confusion Matrix:\")\n",
        "print(\"-----------------\")\n",
        "print(cm)\n",
        "print(\"-----------------\")\n",
        "cm_norm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n",
        "print(\"Confusion Matrix (Normalized):\")\n",
        "print(\"-----------------------------\")\n",
        "print(cm_norm)\n",
        "print(\"-----------------------------\")\n",
        "print(\"-----------------------------\")\n",
        "\n",
        "# calculations of measurements of performance\n",
        "\n",
        "n_TP = cm[1,1]\n",
        "n_FP = cm[1,0]\n",
        "n_TN = cm[0,0]\n",
        "n_FN = cm[0,1]\n",
        "\n",
        "# SENSITIVITY = TP / (TP + FN)\n",
        "svc_Sensitivity = n_TP / (n_TP + n_FN)\n",
        "print(\"svc_Sensitivity = \"+ str(svc_Sensitivity))\n",
        "\n",
        "# SPECIFICITY = TN / (FP + TN)\n",
        "svc_Specificity = n_TN / (n_FP + n_TN)\n",
        "print(\"svc_Specificity = \"+ str(svc_Specificity))\n",
        "\n",
        "# Precision = TP / (TP + FP)\n",
        "svc_Precision = n_TP / (n_TP + n_FP)\n",
        "print(\"svc_Precision = \"+ str(svc_Precision))\n",
        "\n",
        "# Accuracy = (TP + TN) / (TP + FP + TN + FN)\n",
        "svc_Accuracy = (n_TP + n_TN) / (n_TP + n_FP + n_TN + n_FN)\n",
        "print(\"svc_Accuracy = \"+ str(svc_Accuracy))"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "SVC PREDICTIONS ON THE VALIDATION SET:\n",
            "['D' 'D' 'D' ... 'D' 'D' 'D']\n",
            "-----------------------------\n",
            "Confusion Matrix:\n",
            "-----------------\n",
            "[[12654  2065]\n",
            " [ 3436 11283]]\n",
            "-----------------\n",
            "Confusion Matrix (Normalized):\n",
            "-----------------------------\n",
            "[[0.85970514 0.14029486]\n",
            " [0.23343977 0.76656023]]\n",
            "-----------------------------\n",
            "-----------------------------\n",
            "svc_Sensitivity = 0.8452951753071621\n",
            "svc_Specificity = 0.7864512119328776\n",
            "svc_Precision = 0.7665602282763775\n",
            "svc_Accuracy = 0.8131326856444052\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "colab_type": "text",
        "id": "zK7QzrchOEf_"
      },
      "cell_type": "markdown",
      "source": [
        "### Confusion matrix and performance measurements on TEST SET"
      ]
    },
    {
      "metadata": {
        "id": "SlivLaibes02",
        "colab_type": "code",
        "outputId": "3599f5fa-eec1-4d27-bca9-522cada2347a",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "from sklearn.metrics import confusion_matrix\n",
        "\n",
        "print(\"SVC PREDICTIONS ON THE TEST SET:\")\n",
        "predictions_SVC = clf.predict(df_ADL_Falls_test[x_columns])\n",
        "print(predictions_SVC)\n",
        "print(\"-----------------------------\")\n",
        "\n",
        "cm = confusion_matrix(df_ADL_Falls_test[y_column], predictions_SVC, labels=[\"D\", \"F\"])\n",
        "print(\"Confusion Matrix:\")\n",
        "print(\"-----------------\")\n",
        "print(cm)\n",
        "print(\"-----------------\")\n",
        "cm_norm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n",
        "print(\"Confusion Matrix (Normalized):\")\n",
        "print(\"-----------------------------\")\n",
        "print(cm_norm)\n",
        "print(\"-----------------------------\")\n",
        "print(\"-----------------------------\")\n",
        "\n",
        "# calculations of measurements of performance\n",
        "\n",
        "n_TP = cm[1,1]\n",
        "n_FP = cm[1,0]\n",
        "n_TN = cm[0,0]\n",
        "n_FN = cm[0,1]\n",
        "\n",
        "# SENSITIVITY = TP / (TP + FN)\n",
        "svc_Sensitivity = n_TP / (n_TP + n_FN)\n",
        "print(\"svc_Sensitivity = \"+ str(svc_Sensitivity))\n",
        "\n",
        "# SPECIFICITY = TN / (FP + TN)\n",
        "svc_Specificity = n_TN / (n_FP + n_TN)\n",
        "print(\"svc_Specificity = \"+ str(svc_Specificity))\n",
        "\n",
        "# Precision = TP / (TP + FP)\n",
        "svc_Precision = n_TP / (n_TP + n_FP)\n",
        "print(\"svc_Precision = \"+ str(svc_Precision))\n",
        "\n",
        "# Accuracy = (TP + TN) / (TP + FP + TN + FN)\n",
        "svc_Accuracy = (n_TP + n_TN) / (n_TP + n_FP + n_TN + n_FN)\n",
        "print(\"svc_Accuracy = \"+ str(svc_Accuracy))"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "SVC PREDICTIONS ON THE TEST SET:\n",
            "['D' 'D' 'D' ... 'D' 'D' 'D']\n",
            "-----------------------------\n",
            "Confusion Matrix:\n",
            "-----------------\n",
            "[[6598  823]\n",
            " [1809 5612]]\n",
            "-----------------\n",
            "Confusion Matrix (Normalized):\n",
            "-----------------------------\n",
            "[[0.8890985  0.1109015 ]\n",
            " [0.24376769 0.75623231]]\n",
            "-----------------------------\n",
            "-----------------------------\n",
            "svc_Sensitivity = 0.8721056721056721\n",
            "svc_Specificity = 0.7848221719995242\n",
            "svc_Precision = 0.7562323137043525\n",
            "svc_Accuracy = 0.8226654089745318\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "_HX8lpYOAenz",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "## 6.6 Utility function to display training/validation performance data\n",
        "Note: the code in the following cell was adapted/extended by from a code-sample shared in class."
      ]
    },
    {
      "metadata": {
        "id": "-gcu5y6C5Kar",
        "colab_type": "code",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "import matplotlib\n",
        "import matplotlib.pyplot as plt\n",
        "\n",
        "def plot_acc(history, title=\"Model Accuracy\"):\n",
        "    \"\"\"Displays a grapth with the accuracy per epoch, obtained in a training session\"\"\"\n",
        "    plt.plot(history.history['acc'])\n",
        "    plt.plot(history.history['val_acc'])\n",
        "    plt.title(title)\n",
        "    plt.ylabel('Accuracy')\n",
        "    plt.xlabel('Epoch')\n",
        "    plt.legend(['Train', 'Val'], loc='upper left')\n",
        "    plt.show()\n",
        "    \n",
        "def plot_loss(history, title=\"Model Loss\"):\n",
        "    \"\"\"Displays a grapth with the loss per epoch, obtained in a training session\"\"\"\n",
        "    plt.plot(history.history['loss'])\n",
        "    plt.plot(history.history['val_loss'])\n",
        "    plt.title(title)\n",
        "    plt.ylabel('Loss')\n",
        "    plt.xlabel('Epoch')\n",
        "    plt.legend(['Train', 'Val'], loc='upper right')\n",
        "    plt.show()\n",
        "    \n",
        "def plot_compare_losses(history1, history2, name1=\"Red 1\",\n",
        "                        name2=\"Red 2\", title=\"Graph title\"):\n",
        "    \"\"\"Compares losses trainings: name1 y name2\"\"\"\n",
        "    plt.plot(history1.history['loss'], color=\"green\")\n",
        "    plt.plot(history1.history['val_loss'], 'r--', color=\"green\")\n",
        "    plt.plot(history2.history['loss'], color=\"blue\")\n",
        "    plt.plot(history2.history['val_loss'], 'r--', color=\"blue\")\n",
        "    plt.title(title)\n",
        "    plt.ylabel('Loss')\n",
        "    plt.xlabel('Epoch')\n",
        "    plt.legend(['Train ' + name1, 'Val ' + name1, \n",
        "                'Train ' + name2, 'Val ' + name2],\n",
        "               loc='upper right')\n",
        "    plt.show()\n",
        "    \n",
        "def plot_compare_accs(history1, history2, name1=\"Red 1\",\n",
        "                      name2=\"Red 2\", title=\"Graph title\"):\n",
        "    \"\"\"Compares accuracies of trainings: nombres name1 y name2\"\"\"\n",
        "    plt.plot(history1.history['acc'], color=\"green\")\n",
        "    plt.plot(history1.history['val_acc'], 'r--', color=\"green\")\n",
        "    plt.plot(history2.history['acc'], color=\"blue\")\n",
        "    plt.plot(history2.history['val_acc'], 'r--', color=\"blue\")\n",
        "    plt.title(title)\n",
        "    plt.ylabel('Accuracy')\n",
        "    plt.xlabel('Epoch')\n",
        "    plt.legend(['Train ' + name1, 'Val ' + name1, \n",
        "                'Train ' + name2, 'Val ' + name2], \n",
        "               loc='lower right')\n",
        "    plt.show()\n",
        "    \n",
        "    \n",
        "def plot_compare_losses3(history1, history2, history3, name1=\"Red 1\",\n",
        "                        name2=\"Red 2\", name3=\"Red 3\", title=\"Graph title\"):\n",
        "    \"\"\"Compares losses of: namea, name2 y name3\"\"\"\n",
        "    plt.plot(history1.history['loss'], color=\"green\")\n",
        "    plt.plot(history1.history['val_loss'], 'r--', color=\"green\")\n",
        "    plt.plot(history2.history['loss'], color=\"blue\")\n",
        "    plt.plot(history2.history['val_loss'], 'r--', color=\"blue\")\n",
        "    plt.plot(history3.history['loss'], color=\"purple\")\n",
        "    plt.plot(history3.history['val_loss'], 'r--', color=\"purple\")\n",
        "\n",
        "    plt.title(title)\n",
        "    plt.ylabel('Loss')\n",
        "    plt.xlabel('Epoch')\n",
        "    plt.legend(['Train ' + name1, 'Val ' + name1, \n",
        "                'Train ' + name2, 'Val ' + name2,\n",
        "                'Train ' + name3, 'Val ' + name3],               \n",
        "               loc='upper right')\n",
        "    plt.show()\n",
        "    \n",
        "def plot_compare_accs3(history1, history2, history3, name1=\"Red 1\",\n",
        "                        name2=\"Red 2\", name3=\"Red 3\", title=\"Graph title\"):\n",
        "    \"\"\"Compares accuracies of: namea, name2 y name3\"\"\"\n",
        "    plt.plot(history1.history['acc'], color=\"green\")\n",
        "    plt.plot(history1.history['val_acc'], 'r--', color=\"green\")\n",
        "    plt.plot(history2.history['acc'], color=\"blue\")\n",
        "    plt.plot(history2.history['val_acc'], 'r--', color=\"blue\")\n",
        "    plt.plot(history3.history['acc'], color=\"purple\")\n",
        "    plt.plot(history3.history['val_acc'], 'r--', color=\"purple\")\n",
        "    plt.title(title)\n",
        "    plt.ylabel('Accuracy')\n",
        "    plt.xlabel('Epoch')\n",
        "    plt.legend(['Train ' + name1, 'Val ' + name1, \n",
        "                'Train ' + name2, 'Val ' + name2, \n",
        "               'Train ' + name3, 'Val ' + name3], \n",
        "               loc='lower right')\n",
        "    plt.show()"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "metadata": {
        "id": "uWyjc_UhOhbr",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "## 6.7 Define, compile and train a Neural Model using TensorFlow y Keras"
      ]
    },
    {
      "metadata": {
        "id": "g6clVRfkes1C",
        "colab_type": "code",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "# import TensorFlow & tf.keras\n",
        "import tensorflow as tf\n",
        "from tensorflow import keras\n",
        "\n",
        "# The columns that we will be making predictions with.\n",
        "x_columns = ['kurtosis_S1_X','max_S1_X','mean_S1_X','min_S1_X','range_S1_X','skewness_S1_X','std_S1_X','var_S1_X',\n",
        "             'kurtosis_S1_Y','max_S1_Y','mean_S1_Y','min_S1_Y','range_S1_Y','skewness_S1_Y','std_S1_Y','var_S1_Y',\n",
        "             'kurtosis_S1_Z','max_S1_Z','mean_S1_Z','min_S1_Z','range_S1_Z','skewness_S1_Z','std_S1_Z','var_S1_Z',\n",
        "             'kurtosis_S1_N_XYZ','max_S1_N_XYZ','mean_S1_N_XYZ','min_S1_N_XYZ','range_S1_N_XYZ','skewness_S1_N_XYZ','std_S1_N_XYZ','var_S1_N_XYZ',\n",
        "             'kurtosis_S1_N_HOR','max_S1_N_HOR','mean_S1_N_HOR','min_S1_N_HOR','range_S1_N_HOR','skewness_S1_N_HOR','std_S1_N_HOR','var_S1_N_HOR',\n",
        "             'kurtosis_S1_N_VER','max_S1_N_VER','mean_S1_N_VER','min_S1_N_VER','range_S1_N_VER','skewness_S1_N_VER','std_S1_N_VER','var_S1_N_VER',\n",
        "             'corr_HV','corr_NH','corr_NV','corr_XY','corr_XZ','corr_YZ']\n",
        "\n",
        "# The column that we want to predict.\n",
        "y_column = [\"Fall_ADL_BIN\"]\n",
        "\n",
        "# prepare several columns we need for the training:\n",
        "fn_Fall_ADL_BIN = lambda row: (1) if row.Fall_ADL == \"F\" else (0)\n",
        "col = df_ADL_Falls_train.apply(fn_Fall_ADL_BIN, axis=1) \n",
        "df_ADL_Falls_train = df_ADL_Falls_train.assign(Fall_ADL_BIN=col.values) \n",
        "\n",
        "col = df_ADL_Falls_val.apply(fn_Fall_ADL_BIN, axis=1) \n",
        "df_ADL_Falls_val = df_ADL_Falls_val.assign(Fall_ADL_BIN=col.values) \n",
        "\n",
        "col = df_ADL_Falls_test.apply(fn_Fall_ADL_BIN, axis=1) \n",
        "df_ADL_Falls_test = df_ADL_Falls_test.assign(Fall_ADL_BIN=col.values) \n",
        "\n",
        "# define the neuronal network\n",
        "my_model_NN = keras.Sequential()\n",
        "my_model_NN.add(keras.layers.Dense(32, input_shape=(len(x_columns),)))\n",
        "my_model_NN.add(keras.layers.Dropout(0.2))\n",
        "my_model_NN.add(keras.layers.Dense(128, activation=tf.nn.relu))\n",
        "my_model_NN.add(keras.layers.BatchNormalization())\n",
        "my_model_NN.add(keras.layers.Dropout(0.2))\n",
        "my_model_NN.add(keras.layers.Dense(256, activation=tf.nn.relu))\n",
        "my_model_NN.add(keras.layers.BatchNormalization())\n",
        "my_model_NN.add(keras.layers.Dense(1024, activation=tf.nn.relu))\n",
        "my_model_NN.add(keras.layers.BatchNormalization())\n",
        "\n",
        "my_model_NN.add(keras.layers.Dense(2, activation='softmax'))\n"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "metadata": {
        "id": "yODvj8NPet-5",
        "colab_type": "code",
        "outputId": "ddc2fb0f-2590-40eb-cbdd-1674a4a91fff",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 476
        }
      },
      "cell_type": "code",
      "source": [
        "# See/display our net architecture\n",
        "my_model_NN.summary()"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "_________________________________________________________________\n",
            "Layer (type)                 Output Shape              Param #   \n",
            "=================================================================\n",
            "dense (Dense)                (None, 32)                1760      \n",
            "_________________________________________________________________\n",
            "dropout (Dropout)            (None, 32)                0         \n",
            "_________________________________________________________________\n",
            "dense_1 (Dense)              (None, 128)               4224      \n",
            "_________________________________________________________________\n",
            "batch_normalization (BatchNo (None, 128)               512       \n",
            "_________________________________________________________________\n",
            "dropout_1 (Dropout)          (None, 128)               0         \n",
            "_________________________________________________________________\n",
            "dense_2 (Dense)              (None, 256)               33024     \n",
            "_________________________________________________________________\n",
            "batch_normalization_1 (Batch (None, 256)               1024      \n",
            "_________________________________________________________________\n",
            "dense_3 (Dense)              (None, 1024)              263168    \n",
            "_________________________________________________________________\n",
            "batch_normalization_2 (Batch (None, 1024)              4096      \n",
            "_________________________________________________________________\n",
            "dense_4 (Dense)              (None, 2)                 2050      \n",
            "=================================================================\n",
            "Total params: 309,858\n",
            "Trainable params: 307,042\n",
            "Non-trainable params: 2,816\n",
            "_________________________________________________________________\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "_U-LnGgXet_B",
        "colab_type": "code",
        "colab": {}
      },
      "cell_type": "code",
      "source": [
        "# In this case, we use optimizer SGD with Momentum\n",
        "sgdMN = keras.optimizers.SGD(lr=0.01, decay=1e-8, momentum=0.9, nesterov=False)\n",
        "\n",
        "\n",
        "# compile the model\n",
        "my_model_NN.compile(optimizer='SGD', \n",
        "              loss='sparse_categorical_crossentropy',\n",
        "              metrics=['accuracy'])\n"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "metadata": {
        "id": "L6tS2tNWet_J",
        "colab_type": "code",
        "outputId": "1f21bd1f-b7cf-46ca-a135-6334fd1e8dd8",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 136
        }
      },
      "cell_type": "code",
      "source": [
        "# for clarity, lets name the training, val, test sets:\n",
        "import numpy as np\n",
        "\n",
        "x_train = df_ADL_Falls_train[x_columns]\n",
        "y = df_ADL_Falls_train.loc[:,['Fall_ADL_BIN']]\n",
        "y_train = np.array(y)\n",
        "\n",
        "x_val = df_ADL_Falls_val[x_columns]\n",
        "y = df_ADL_Falls_val.loc[:,['Fall_ADL_BIN']]\n",
        "y_val = np.array(y)\n",
        "\n",
        "print(y_val)"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "[[1]\n",
            " [1]\n",
            " [1]\n",
            " ...\n",
            " [0]\n",
            " [0]\n",
            " [0]]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "f967RhjOet_P",
        "colab_type": "code",
        "outputId": "574109c5-b0f6-4b5f-d240-cfb78bad7dec",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 11934
        }
      },
      "cell_type": "code",
      "source": [
        "# Train the model \"n\" epochs (in this case n=350 )\n",
        "history = my_model_NN.fit(x_train, y_train, epochs=350, batch_size=256, \n",
        "                          validation_data=(x_val, y_val),) # using X_val/y_val for validation"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Train on 103279 samples, validate on 29438 samples\n",
            "Epoch 1/350\n",
            "103279/103279 [==============================] - 4s 39us/step - loss: 0.2474 - acc: 0.8880 - val_loss: 0.3781 - val_acc: 0.8247\n",
            "Epoch 2/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2472 - acc: 0.8892 - val_loss: 0.3557 - val_acc: 0.8261\n",
            "Epoch 3/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2442 - acc: 0.8906 - val_loss: 0.3266 - val_acc: 0.8543\n",
            "Epoch 4/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2472 - acc: 0.8890 - val_loss: 0.3239 - val_acc: 0.8557\n",
            "Epoch 5/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2474 - acc: 0.8875 - val_loss: 0.3487 - val_acc: 0.8447\n",
            "Epoch 6/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2450 - acc: 0.8891 - val_loss: 0.3554 - val_acc: 0.8460\n",
            "Epoch 7/350\n",
            "103279/103279 [==============================] - 4s 40us/step - loss: 0.2472 - acc: 0.8874 - val_loss: 0.3379 - val_acc: 0.8402\n",
            "Epoch 8/350\n",
            "103279/103279 [==============================] - 4s 40us/step - loss: 0.2468 - acc: 0.8884 - val_loss: 0.3432 - val_acc: 0.8465\n",
            "Epoch 9/350\n",
            "103279/103279 [==============================] - 4s 40us/step - loss: 0.2456 - acc: 0.8891 - val_loss: 0.3154 - val_acc: 0.8578\n",
            "Epoch 10/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2462 - acc: 0.8881 - val_loss: 0.3284 - val_acc: 0.8547\n",
            "Epoch 11/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2455 - acc: 0.8883 - val_loss: 0.3277 - val_acc: 0.8556\n",
            "Epoch 12/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2452 - acc: 0.8896 - val_loss: 0.3293 - val_acc: 0.8545\n",
            "Epoch 13/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2453 - acc: 0.8895 - val_loss: 0.3518 - val_acc: 0.8391\n",
            "Epoch 14/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2458 - acc: 0.8896 - val_loss: 0.4352 - val_acc: 0.7930\n",
            "Epoch 15/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2479 - acc: 0.8880 - val_loss: 0.3742 - val_acc: 0.8194\n",
            "Epoch 16/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2448 - acc: 0.8895 - val_loss: 0.3695 - val_acc: 0.8451\n",
            "Epoch 17/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2460 - acc: 0.8899 - val_loss: 0.3243 - val_acc: 0.8570\n",
            "Epoch 18/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2460 - acc: 0.8890 - val_loss: 0.3780 - val_acc: 0.8165\n",
            "Epoch 19/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2463 - acc: 0.8888 - val_loss: 0.3422 - val_acc: 0.8446\n",
            "Epoch 20/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2458 - acc: 0.8885 - val_loss: 0.3272 - val_acc: 0.8547\n",
            "Epoch 21/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2450 - acc: 0.8882 - val_loss: 0.3540 - val_acc: 0.8420\n",
            "Epoch 22/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2459 - acc: 0.8887 - val_loss: 0.3393 - val_acc: 0.8498\n",
            "Epoch 23/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2462 - acc: 0.8884 - val_loss: 0.3916 - val_acc: 0.8010\n",
            "Epoch 24/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2453 - acc: 0.8889 - val_loss: 0.3281 - val_acc: 0.8564\n",
            "Epoch 25/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2446 - acc: 0.8899 - val_loss: 0.3466 - val_acc: 0.8462\n",
            "Epoch 26/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2440 - acc: 0.8903 - val_loss: 0.3627 - val_acc: 0.8316\n",
            "Epoch 27/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2457 - acc: 0.8890 - val_loss: 0.3919 - val_acc: 0.8069\n",
            "Epoch 28/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2433 - acc: 0.8908 - val_loss: 0.3255 - val_acc: 0.8563\n",
            "Epoch 29/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2453 - acc: 0.8895 - val_loss: 0.3370 - val_acc: 0.8545\n",
            "Epoch 30/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2465 - acc: 0.8889 - val_loss: 0.3194 - val_acc: 0.8594\n",
            "Epoch 31/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2431 - acc: 0.8896 - val_loss: 0.3831 - val_acc: 0.8291\n",
            "Epoch 32/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2436 - acc: 0.8901 - val_loss: 0.3807 - val_acc: 0.8374\n",
            "Epoch 33/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2451 - acc: 0.8899 - val_loss: 0.3576 - val_acc: 0.8478\n",
            "Epoch 34/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2454 - acc: 0.8900 - val_loss: 0.3999 - val_acc: 0.8096\n",
            "Epoch 35/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2460 - acc: 0.8894 - val_loss: 0.3508 - val_acc: 0.8478\n",
            "Epoch 36/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2448 - acc: 0.8903 - val_loss: 0.3387 - val_acc: 0.8503\n",
            "Epoch 37/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2431 - acc: 0.8905 - val_loss: 0.4517 - val_acc: 0.8259\n",
            "Epoch 38/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2421 - acc: 0.8913 - val_loss: 0.3310 - val_acc: 0.8545\n",
            "Epoch 39/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2457 - acc: 0.8894 - val_loss: 0.3606 - val_acc: 0.8284\n",
            "Epoch 40/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2448 - acc: 0.8899 - val_loss: 0.3554 - val_acc: 0.8375\n",
            "Epoch 41/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2455 - acc: 0.8899 - val_loss: 0.3196 - val_acc: 0.8543\n",
            "Epoch 42/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2456 - acc: 0.8886 - val_loss: 0.4018 - val_acc: 0.8370\n",
            "Epoch 43/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2445 - acc: 0.8890 - val_loss: 0.3433 - val_acc: 0.8433\n",
            "Epoch 44/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2440 - acc: 0.8902 - val_loss: 0.4247 - val_acc: 0.8197\n",
            "Epoch 45/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2447 - acc: 0.8903 - val_loss: 0.3477 - val_acc: 0.8464\n",
            "Epoch 46/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2442 - acc: 0.8906 - val_loss: 0.3226 - val_acc: 0.8540\n",
            "Epoch 47/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2463 - acc: 0.8892 - val_loss: 0.3797 - val_acc: 0.8134\n",
            "Epoch 48/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2430 - acc: 0.8904 - val_loss: 0.3724 - val_acc: 0.8288\n",
            "Epoch 49/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2459 - acc: 0.8894 - val_loss: 0.3539 - val_acc: 0.8422\n",
            "Epoch 50/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2438 - acc: 0.8897 - val_loss: 0.3328 - val_acc: 0.8492\n",
            "Epoch 51/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2442 - acc: 0.8902 - val_loss: 0.3328 - val_acc: 0.8538\n",
            "Epoch 52/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2427 - acc: 0.8913 - val_loss: 0.3337 - val_acc: 0.8541\n",
            "Epoch 53/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2416 - acc: 0.8915 - val_loss: 0.3147 - val_acc: 0.8610\n",
            "Epoch 54/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2438 - acc: 0.8909 - val_loss: 0.3826 - val_acc: 0.8352\n",
            "Epoch 55/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2404 - acc: 0.8918 - val_loss: 0.3412 - val_acc: 0.8532\n",
            "Epoch 56/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2413 - acc: 0.8919 - val_loss: 0.3984 - val_acc: 0.8164\n",
            "Epoch 57/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2433 - acc: 0.8905 - val_loss: 0.3745 - val_acc: 0.8373\n",
            "Epoch 58/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2434 - acc: 0.8914 - val_loss: 0.3345 - val_acc: 0.8437\n",
            "Epoch 59/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2424 - acc: 0.8917 - val_loss: 0.3308 - val_acc: 0.8581\n",
            "Epoch 60/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2446 - acc: 0.8902 - val_loss: 0.3983 - val_acc: 0.8300\n",
            "Epoch 61/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2428 - acc: 0.8911 - val_loss: 0.3249 - val_acc: 0.8543\n",
            "Epoch 62/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2433 - acc: 0.8899 - val_loss: 0.3514 - val_acc: 0.8457\n",
            "Epoch 63/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2447 - acc: 0.8889 - val_loss: 0.3464 - val_acc: 0.8456\n",
            "Epoch 64/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2415 - acc: 0.8917 - val_loss: 0.3264 - val_acc: 0.8575\n",
            "Epoch 65/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2429 - acc: 0.8905 - val_loss: 0.3760 - val_acc: 0.8225\n",
            "Epoch 66/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2436 - acc: 0.8899 - val_loss: 0.3624 - val_acc: 0.8399\n",
            "Epoch 67/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2426 - acc: 0.8916 - val_loss: 0.3515 - val_acc: 0.8362\n",
            "Epoch 68/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2447 - acc: 0.8896 - val_loss: 0.3212 - val_acc: 0.8596\n",
            "Epoch 69/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2429 - acc: 0.8908 - val_loss: 0.3714 - val_acc: 0.8417\n",
            "Epoch 70/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2417 - acc: 0.8920 - val_loss: 0.3565 - val_acc: 0.8462\n",
            "Epoch 71/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2423 - acc: 0.8906 - val_loss: 0.4921 - val_acc: 0.8136\n",
            "Epoch 72/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2424 - acc: 0.8911 - val_loss: 0.3660 - val_acc: 0.8482\n",
            "Epoch 73/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2441 - acc: 0.8897 - val_loss: 0.3308 - val_acc: 0.8548\n",
            "Epoch 74/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2414 - acc: 0.8913 - val_loss: 0.3519 - val_acc: 0.8412\n",
            "Epoch 75/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2441 - acc: 0.8893 - val_loss: 0.3407 - val_acc: 0.8438\n",
            "Epoch 76/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2423 - acc: 0.8906 - val_loss: 0.3984 - val_acc: 0.8147\n",
            "Epoch 77/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2404 - acc: 0.8921 - val_loss: 0.3421 - val_acc: 0.8457\n",
            "Epoch 78/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2423 - acc: 0.8907 - val_loss: 0.3519 - val_acc: 0.8376\n",
            "Epoch 79/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2428 - acc: 0.8903 - val_loss: 0.3147 - val_acc: 0.8594\n",
            "Epoch 80/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2407 - acc: 0.8919 - val_loss: 0.3608 - val_acc: 0.8406\n",
            "Epoch 81/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2424 - acc: 0.8908 - val_loss: 0.3904 - val_acc: 0.8322\n",
            "Epoch 82/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2431 - acc: 0.8918 - val_loss: 0.3561 - val_acc: 0.8426\n",
            "Epoch 83/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2418 - acc: 0.8911 - val_loss: 0.3673 - val_acc: 0.8294\n",
            "Epoch 84/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2422 - acc: 0.8910 - val_loss: 0.3362 - val_acc: 0.8526\n",
            "Epoch 85/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2435 - acc: 0.8910 - val_loss: 0.3231 - val_acc: 0.8548\n",
            "Epoch 86/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2405 - acc: 0.8926 - val_loss: 0.3742 - val_acc: 0.8380\n",
            "Epoch 87/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2406 - acc: 0.8927 - val_loss: 0.3444 - val_acc: 0.8522\n",
            "Epoch 88/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2408 - acc: 0.8919 - val_loss: 0.3581 - val_acc: 0.8429\n",
            "Epoch 89/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2416 - acc: 0.8920 - val_loss: 0.3494 - val_acc: 0.8486\n",
            "Epoch 90/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2412 - acc: 0.8920 - val_loss: 0.3587 - val_acc: 0.8455\n",
            "Epoch 91/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2422 - acc: 0.8912 - val_loss: 0.3448 - val_acc: 0.8452\n",
            "Epoch 92/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2426 - acc: 0.8911 - val_loss: 0.3361 - val_acc: 0.8478\n",
            "Epoch 93/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2424 - acc: 0.8910 - val_loss: 0.3544 - val_acc: 0.8466\n",
            "Epoch 94/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2414 - acc: 0.8924 - val_loss: 0.3215 - val_acc: 0.8565\n",
            "Epoch 95/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2411 - acc: 0.8923 - val_loss: 0.3908 - val_acc: 0.8157\n",
            "Epoch 96/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2399 - acc: 0.8918 - val_loss: 0.3392 - val_acc: 0.8541\n",
            "Epoch 97/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2427 - acc: 0.8908 - val_loss: 0.3142 - val_acc: 0.8596\n",
            "Epoch 98/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2418 - acc: 0.8920 - val_loss: 0.3594 - val_acc: 0.8455\n",
            "Epoch 99/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2411 - acc: 0.8921 - val_loss: 0.3288 - val_acc: 0.8557\n",
            "Epoch 100/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2407 - acc: 0.8919 - val_loss: 0.3297 - val_acc: 0.8553\n",
            "Epoch 101/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2409 - acc: 0.8919 - val_loss: 0.3787 - val_acc: 0.8326\n",
            "Epoch 102/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2419 - acc: 0.8914 - val_loss: 0.3461 - val_acc: 0.8535\n",
            "Epoch 103/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2410 - acc: 0.8908 - val_loss: 0.3203 - val_acc: 0.8594\n",
            "Epoch 104/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2417 - acc: 0.8912 - val_loss: 0.3519 - val_acc: 0.8440\n",
            "Epoch 105/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2401 - acc: 0.8931 - val_loss: 0.3496 - val_acc: 0.8422\n",
            "Epoch 106/350\n",
            "103279/103279 [==============================] - 5s 44us/step - loss: 0.2401 - acc: 0.8907 - val_loss: 0.3323 - val_acc: 0.8527\n",
            "Epoch 107/350\n",
            "103279/103279 [==============================] - 5s 44us/step - loss: 0.2415 - acc: 0.8909 - val_loss: 0.3444 - val_acc: 0.8475\n",
            "Epoch 108/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2407 - acc: 0.8921 - val_loss: 0.3627 - val_acc: 0.8368\n",
            "Epoch 109/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2414 - acc: 0.8914 - val_loss: 0.3329 - val_acc: 0.8543\n",
            "Epoch 110/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2402 - acc: 0.8921 - val_loss: 0.3636 - val_acc: 0.8416\n",
            "Epoch 111/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2422 - acc: 0.8915 - val_loss: 0.3299 - val_acc: 0.8531\n",
            "Epoch 112/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2402 - acc: 0.8923 - val_loss: 0.3446 - val_acc: 0.8433\n",
            "Epoch 113/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2410 - acc: 0.8927 - val_loss: 0.3361 - val_acc: 0.8535\n",
            "Epoch 114/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2422 - acc: 0.8910 - val_loss: 0.3545 - val_acc: 0.8409\n",
            "Epoch 115/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2408 - acc: 0.8910 - val_loss: 0.3271 - val_acc: 0.8559\n",
            "Epoch 116/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2403 - acc: 0.8914 - val_loss: 0.4045 - val_acc: 0.8144\n",
            "Epoch 117/350\n",
            "103279/103279 [==============================] - 5s 44us/step - loss: 0.2407 - acc: 0.8926 - val_loss: 0.3524 - val_acc: 0.8424\n",
            "Epoch 118/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2402 - acc: 0.8919 - val_loss: 0.3789 - val_acc: 0.8429\n",
            "Epoch 119/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2416 - acc: 0.8908 - val_loss: 0.4045 - val_acc: 0.8195\n",
            "Epoch 120/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2392 - acc: 0.8930 - val_loss: 0.3859 - val_acc: 0.8210\n",
            "Epoch 121/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2384 - acc: 0.8918 - val_loss: 0.3387 - val_acc: 0.8528\n",
            "Epoch 122/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2397 - acc: 0.8923 - val_loss: 0.3440 - val_acc: 0.8488\n",
            "Epoch 123/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2403 - acc: 0.8928 - val_loss: 0.3659 - val_acc: 0.8199\n",
            "Epoch 124/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2393 - acc: 0.8926 - val_loss: 0.3439 - val_acc: 0.8494\n",
            "Epoch 125/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2397 - acc: 0.8922 - val_loss: 0.3173 - val_acc: 0.8592\n",
            "Epoch 126/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2415 - acc: 0.8916 - val_loss: 0.3225 - val_acc: 0.8532\n",
            "Epoch 127/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2412 - acc: 0.8917 - val_loss: 0.3447 - val_acc: 0.8503\n",
            "Epoch 128/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2415 - acc: 0.8923 - val_loss: 0.3672 - val_acc: 0.8284\n",
            "Epoch 129/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2388 - acc: 0.8931 - val_loss: 0.3404 - val_acc: 0.8472\n",
            "Epoch 130/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2413 - acc: 0.8916 - val_loss: 0.4345 - val_acc: 0.8242\n",
            "Epoch 131/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2401 - acc: 0.8921 - val_loss: 0.3961 - val_acc: 0.8199\n",
            "Epoch 132/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2377 - acc: 0.8935 - val_loss: 0.3858 - val_acc: 0.8364\n",
            "Epoch 133/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2396 - acc: 0.8930 - val_loss: 0.3134 - val_acc: 0.8603\n",
            "Epoch 134/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2412 - acc: 0.8914 - val_loss: 0.3279 - val_acc: 0.8569\n",
            "Epoch 135/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2398 - acc: 0.8928 - val_loss: 0.3561 - val_acc: 0.8370\n",
            "Epoch 136/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2395 - acc: 0.8928 - val_loss: 0.3624 - val_acc: 0.8429\n",
            "Epoch 137/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2404 - acc: 0.8924 - val_loss: 0.3459 - val_acc: 0.8454\n",
            "Epoch 138/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2409 - acc: 0.8917 - val_loss: 0.3257 - val_acc: 0.8610\n",
            "Epoch 139/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2398 - acc: 0.8909 - val_loss: 0.3602 - val_acc: 0.8426\n",
            "Epoch 140/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2383 - acc: 0.8938 - val_loss: 0.3243 - val_acc: 0.8528\n",
            "Epoch 141/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2399 - acc: 0.8928 - val_loss: 0.3436 - val_acc: 0.8512\n",
            "Epoch 142/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2397 - acc: 0.8930 - val_loss: 0.4014 - val_acc: 0.8365\n",
            "Epoch 143/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2410 - acc: 0.8913 - val_loss: 0.3550 - val_acc: 0.8490\n",
            "Epoch 144/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2380 - acc: 0.8931 - val_loss: 0.3672 - val_acc: 0.8357\n",
            "Epoch 145/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2391 - acc: 0.8933 - val_loss: 0.3448 - val_acc: 0.8515\n",
            "Epoch 146/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2392 - acc: 0.8925 - val_loss: 0.4020 - val_acc: 0.8133\n",
            "Epoch 147/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2407 - acc: 0.8918 - val_loss: 0.3837 - val_acc: 0.8176\n",
            "Epoch 148/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2411 - acc: 0.8914 - val_loss: 0.3763 - val_acc: 0.8206\n",
            "Epoch 149/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2391 - acc: 0.8934 - val_loss: 0.3354 - val_acc: 0.8452\n",
            "Epoch 150/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2380 - acc: 0.8931 - val_loss: 0.3709 - val_acc: 0.8379\n",
            "Epoch 151/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2387 - acc: 0.8928 - val_loss: 0.3385 - val_acc: 0.8512\n",
            "Epoch 152/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2405 - acc: 0.8911 - val_loss: 0.3747 - val_acc: 0.8319\n",
            "Epoch 153/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2391 - acc: 0.8935 - val_loss: 0.3575 - val_acc: 0.8445\n",
            "Epoch 154/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2387 - acc: 0.8933 - val_loss: 0.3508 - val_acc: 0.8445\n",
            "Epoch 155/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2386 - acc: 0.8927 - val_loss: 0.3869 - val_acc: 0.8296\n",
            "Epoch 156/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2399 - acc: 0.8927 - val_loss: 0.3077 - val_acc: 0.8611\n",
            "Epoch 157/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2401 - acc: 0.8923 - val_loss: 0.3491 - val_acc: 0.8479\n",
            "Epoch 158/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2381 - acc: 0.8939 - val_loss: 0.3792 - val_acc: 0.8258\n",
            "Epoch 159/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2383 - acc: 0.8930 - val_loss: 0.4038 - val_acc: 0.8188\n",
            "Epoch 160/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2409 - acc: 0.8919 - val_loss: 0.3340 - val_acc: 0.8514\n",
            "Epoch 161/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2406 - acc: 0.8918 - val_loss: 0.3429 - val_acc: 0.8519\n",
            "Epoch 162/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2370 - acc: 0.8922 - val_loss: 0.3371 - val_acc: 0.8521\n",
            "Epoch 163/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2378 - acc: 0.8924 - val_loss: 0.3226 - val_acc: 0.8563\n",
            "Epoch 164/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2380 - acc: 0.8933 - val_loss: 0.3509 - val_acc: 0.8458\n",
            "Epoch 165/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2397 - acc: 0.8917 - val_loss: 0.3517 - val_acc: 0.8430\n",
            "Epoch 166/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2404 - acc: 0.8920 - val_loss: 0.3544 - val_acc: 0.8368\n",
            "Epoch 167/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2381 - acc: 0.8945 - val_loss: 0.3485 - val_acc: 0.8419\n",
            "Epoch 168/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2393 - acc: 0.8939 - val_loss: 0.3831 - val_acc: 0.8424\n",
            "Epoch 169/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2394 - acc: 0.8924 - val_loss: 0.3858 - val_acc: 0.8175\n",
            "Epoch 170/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2383 - acc: 0.8933 - val_loss: 0.3534 - val_acc: 0.8479\n",
            "Epoch 171/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2365 - acc: 0.8943 - val_loss: 0.3158 - val_acc: 0.8580\n",
            "Epoch 172/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2376 - acc: 0.8935 - val_loss: 0.3471 - val_acc: 0.8485\n",
            "Epoch 173/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2386 - acc: 0.8935 - val_loss: 0.3680 - val_acc: 0.8341\n",
            "Epoch 174/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2374 - acc: 0.8932 - val_loss: 0.3451 - val_acc: 0.8442\n",
            "Epoch 175/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2373 - acc: 0.8940 - val_loss: 0.3669 - val_acc: 0.8295\n",
            "Epoch 176/350\n",
            "103279/103279 [==============================] - 5s 44us/step - loss: 0.2381 - acc: 0.8933 - val_loss: 0.4077 - val_acc: 0.8177\n",
            "Epoch 177/350\n",
            "103279/103279 [==============================] - 5s 44us/step - loss: 0.2396 - acc: 0.8913 - val_loss: 0.3771 - val_acc: 0.8355\n",
            "Epoch 178/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2369 - acc: 0.8939 - val_loss: 0.3449 - val_acc: 0.8526\n",
            "Epoch 179/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2380 - acc: 0.8926 - val_loss: 0.4014 - val_acc: 0.8268\n",
            "Epoch 180/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2374 - acc: 0.8930 - val_loss: 0.3528 - val_acc: 0.8473\n",
            "Epoch 181/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2373 - acc: 0.8942 - val_loss: 0.3683 - val_acc: 0.8379\n",
            "Epoch 182/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2379 - acc: 0.8930 - val_loss: 0.3376 - val_acc: 0.8455\n",
            "Epoch 183/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2351 - acc: 0.8945 - val_loss: 0.3699 - val_acc: 0.8346\n",
            "Epoch 184/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2356 - acc: 0.8951 - val_loss: 0.3331 - val_acc: 0.8529\n",
            "Epoch 185/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2376 - acc: 0.8937 - val_loss: 0.3599 - val_acc: 0.8344\n",
            "Epoch 186/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2359 - acc: 0.8934 - val_loss: 0.4621 - val_acc: 0.8077\n",
            "Epoch 187/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2364 - acc: 0.8940 - val_loss: 0.3823 - val_acc: 0.8188\n",
            "Epoch 188/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2362 - acc: 0.8941 - val_loss: 0.4466 - val_acc: 0.8307\n",
            "Epoch 189/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2356 - acc: 0.8948 - val_loss: 0.3556 - val_acc: 0.8345\n",
            "Epoch 190/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2362 - acc: 0.8945 - val_loss: 0.3857 - val_acc: 0.8397\n",
            "Epoch 191/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2390 - acc: 0.8921 - val_loss: 0.3354 - val_acc: 0.8537\n",
            "Epoch 192/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2366 - acc: 0.8936 - val_loss: 0.3657 - val_acc: 0.8366\n",
            "Epoch 193/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2378 - acc: 0.8933 - val_loss: 0.3820 - val_acc: 0.8294\n",
            "Epoch 194/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2364 - acc: 0.8941 - val_loss: 0.3621 - val_acc: 0.8448\n",
            "Epoch 195/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2355 - acc: 0.8945 - val_loss: 0.3425 - val_acc: 0.8521\n",
            "Epoch 196/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2367 - acc: 0.8941 - val_loss: 0.3867 - val_acc: 0.8305\n",
            "Epoch 197/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2363 - acc: 0.8932 - val_loss: 0.3406 - val_acc: 0.8505\n",
            "Epoch 198/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2382 - acc: 0.8933 - val_loss: 0.3372 - val_acc: 0.8525\n",
            "Epoch 199/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2373 - acc: 0.8947 - val_loss: 0.3409 - val_acc: 0.8463\n",
            "Epoch 200/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2364 - acc: 0.8940 - val_loss: 0.3650 - val_acc: 0.8287\n",
            "Epoch 201/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2361 - acc: 0.8938 - val_loss: 0.3249 - val_acc: 0.8556\n",
            "Epoch 202/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2364 - acc: 0.8949 - val_loss: 0.4468 - val_acc: 0.8075\n",
            "Epoch 203/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2362 - acc: 0.8943 - val_loss: 0.3609 - val_acc: 0.8413\n",
            "Epoch 204/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2375 - acc: 0.8944 - val_loss: 0.3891 - val_acc: 0.8184\n",
            "Epoch 205/350\n",
            "103279/103279 [==============================] - 4s 41us/step - loss: 0.2357 - acc: 0.8942 - val_loss: 0.3368 - val_acc: 0.8501\n",
            "Epoch 206/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2394 - acc: 0.8928 - val_loss: 0.3628 - val_acc: 0.8481\n",
            "Epoch 207/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2378 - acc: 0.8933 - val_loss: 0.4278 - val_acc: 0.7974\n",
            "Epoch 208/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2373 - acc: 0.8927 - val_loss: 0.3268 - val_acc: 0.8567\n",
            "Epoch 209/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2350 - acc: 0.8948 - val_loss: 0.3544 - val_acc: 0.8443\n",
            "Epoch 210/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2367 - acc: 0.8941 - val_loss: 0.3359 - val_acc: 0.8544\n",
            "Epoch 211/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2374 - acc: 0.8936 - val_loss: 0.3693 - val_acc: 0.8334\n",
            "Epoch 212/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2377 - acc: 0.8930 - val_loss: 0.3128 - val_acc: 0.8614\n",
            "Epoch 213/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2342 - acc: 0.8958 - val_loss: 0.3428 - val_acc: 0.8510\n",
            "Epoch 214/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2375 - acc: 0.8938 - val_loss: 0.3604 - val_acc: 0.8469\n",
            "Epoch 215/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2358 - acc: 0.8945 - val_loss: 0.3672 - val_acc: 0.8384\n",
            "Epoch 216/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2354 - acc: 0.8946 - val_loss: 0.3419 - val_acc: 0.8490\n",
            "Epoch 217/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2357 - acc: 0.8935 - val_loss: 0.3494 - val_acc: 0.8459\n",
            "Epoch 218/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2362 - acc: 0.8942 - val_loss: 0.3238 - val_acc: 0.8556\n",
            "Epoch 219/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2376 - acc: 0.8936 - val_loss: 0.3378 - val_acc: 0.8558\n",
            "Epoch 220/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2360 - acc: 0.8948 - val_loss: 0.3772 - val_acc: 0.8356\n",
            "Epoch 221/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2363 - acc: 0.8932 - val_loss: 0.3516 - val_acc: 0.8474\n",
            "Epoch 222/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2355 - acc: 0.8941 - val_loss: 0.3498 - val_acc: 0.8457\n",
            "Epoch 223/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2355 - acc: 0.8939 - val_loss: 0.3624 - val_acc: 0.8396\n",
            "Epoch 224/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2354 - acc: 0.8948 - val_loss: 0.3784 - val_acc: 0.8349\n",
            "Epoch 225/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2360 - acc: 0.8950 - val_loss: 0.3811 - val_acc: 0.8355\n",
            "Epoch 226/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2358 - acc: 0.8938 - val_loss: 0.3597 - val_acc: 0.8444\n",
            "Epoch 227/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2351 - acc: 0.8943 - val_loss: 0.3141 - val_acc: 0.8628\n",
            "Epoch 228/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2345 - acc: 0.8940 - val_loss: 0.3208 - val_acc: 0.8612\n",
            "Epoch 229/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2348 - acc: 0.8954 - val_loss: 0.3490 - val_acc: 0.8458\n",
            "Epoch 230/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2358 - acc: 0.8948 - val_loss: 0.3810 - val_acc: 0.8300\n",
            "Epoch 231/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2360 - acc: 0.8938 - val_loss: 0.3657 - val_acc: 0.8387\n",
            "Epoch 232/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2367 - acc: 0.8943 - val_loss: 0.3542 - val_acc: 0.8453\n",
            "Epoch 233/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2354 - acc: 0.8936 - val_loss: 0.3367 - val_acc: 0.8498\n",
            "Epoch 234/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2358 - acc: 0.8943 - val_loss: 0.3521 - val_acc: 0.8446\n",
            "Epoch 235/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2345 - acc: 0.8948 - val_loss: 0.3855 - val_acc: 0.8183\n",
            "Epoch 236/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2347 - acc: 0.8961 - val_loss: 0.3960 - val_acc: 0.8150\n",
            "Epoch 237/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2348 - acc: 0.8946 - val_loss: 0.3411 - val_acc: 0.8457\n",
            "Epoch 238/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2356 - acc: 0.8944 - val_loss: 0.3626 - val_acc: 0.8404\n",
            "Epoch 239/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2345 - acc: 0.8949 - val_loss: 0.3520 - val_acc: 0.8463\n",
            "Epoch 240/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2345 - acc: 0.8950 - val_loss: 0.3648 - val_acc: 0.8302\n",
            "Epoch 241/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2350 - acc: 0.8951 - val_loss: 0.4065 - val_acc: 0.8217\n",
            "Epoch 242/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2337 - acc: 0.8961 - val_loss: 0.3418 - val_acc: 0.8422\n",
            "Epoch 243/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2350 - acc: 0.8955 - val_loss: 0.3714 - val_acc: 0.8368\n",
            "Epoch 244/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2352 - acc: 0.8940 - val_loss: 0.3407 - val_acc: 0.8499\n",
            "Epoch 245/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2335 - acc: 0.8957 - val_loss: 0.3893 - val_acc: 0.8293\n",
            "Epoch 246/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2345 - acc: 0.8948 - val_loss: 0.4317 - val_acc: 0.8207\n",
            "Epoch 247/350\n",
            "103279/103279 [==============================] - 4s 44us/step - loss: 0.2342 - acc: 0.8944 - val_loss: 0.3749 - val_acc: 0.8312\n",
            "Epoch 248/350\n",
            "103279/103279 [==============================] - 5s 44us/step - loss: 0.2379 - acc: 0.8930 - val_loss: 0.3473 - val_acc: 0.8486\n",
            "Epoch 249/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2348 - acc: 0.8948 - val_loss: 0.3989 - val_acc: 0.8145\n",
            "Epoch 250/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2348 - acc: 0.8949 - val_loss: 0.3533 - val_acc: 0.8362\n",
            "Epoch 251/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2355 - acc: 0.8942 - val_loss: 0.3383 - val_acc: 0.8504\n",
            "Epoch 252/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2353 - acc: 0.8958 - val_loss: 0.3602 - val_acc: 0.8391\n",
            "Epoch 253/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2348 - acc: 0.8951 - val_loss: 0.3712 - val_acc: 0.8406\n",
            "Epoch 254/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2339 - acc: 0.8949 - val_loss: 0.3332 - val_acc: 0.8562\n",
            "Epoch 255/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2340 - acc: 0.8955 - val_loss: 0.4097 - val_acc: 0.8175\n",
            "Epoch 256/350\n",
            "103279/103279 [==============================] - 5s 44us/step - loss: 0.2342 - acc: 0.8957 - val_loss: 0.3505 - val_acc: 0.8398\n",
            "Epoch 257/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2340 - acc: 0.8961 - val_loss: 0.3931 - val_acc: 0.8195\n",
            "Epoch 258/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2349 - acc: 0.8942 - val_loss: 0.3248 - val_acc: 0.8572\n",
            "Epoch 259/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2338 - acc: 0.8949 - val_loss: 0.3540 - val_acc: 0.8478\n",
            "Epoch 260/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2330 - acc: 0.8958 - val_loss: 0.4138 - val_acc: 0.8065\n",
            "Epoch 261/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2338 - acc: 0.8962 - val_loss: 0.3880 - val_acc: 0.8280\n",
            "Epoch 262/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2352 - acc: 0.8939 - val_loss: 0.3545 - val_acc: 0.8508\n",
            "Epoch 263/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2367 - acc: 0.8939 - val_loss: 0.3272 - val_acc: 0.8572\n",
            "Epoch 264/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2330 - acc: 0.8960 - val_loss: 0.3757 - val_acc: 0.8313\n",
            "Epoch 265/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2336 - acc: 0.8961 - val_loss: 0.3484 - val_acc: 0.8454\n",
            "Epoch 266/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2336 - acc: 0.8958 - val_loss: 0.3776 - val_acc: 0.8202\n",
            "Epoch 267/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2341 - acc: 0.8945 - val_loss: 0.3508 - val_acc: 0.8491\n",
            "Epoch 268/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2345 - acc: 0.8960 - val_loss: 0.3712 - val_acc: 0.8261\n",
            "Epoch 269/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2347 - acc: 0.8951 - val_loss: 0.3350 - val_acc: 0.8521\n",
            "Epoch 270/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2312 - acc: 0.8970 - val_loss: 0.3485 - val_acc: 0.8493\n",
            "Epoch 271/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2350 - acc: 0.8948 - val_loss: 0.3377 - val_acc: 0.8569\n",
            "Epoch 272/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2350 - acc: 0.8944 - val_loss: 0.3718 - val_acc: 0.8320\n",
            "Epoch 273/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2356 - acc: 0.8945 - val_loss: 0.3421 - val_acc: 0.8408\n",
            "Epoch 274/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2325 - acc: 0.8960 - val_loss: 0.3589 - val_acc: 0.8449\n",
            "Epoch 275/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2341 - acc: 0.8955 - val_loss: 0.3771 - val_acc: 0.8313\n",
            "Epoch 276/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2328 - acc: 0.8961 - val_loss: 0.3763 - val_acc: 0.8245\n",
            "Epoch 277/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2338 - acc: 0.8946 - val_loss: 0.3138 - val_acc: 0.8604\n",
            "Epoch 278/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2333 - acc: 0.8950 - val_loss: 0.3307 - val_acc: 0.8533\n",
            "Epoch 279/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2358 - acc: 0.8949 - val_loss: 0.3473 - val_acc: 0.8460\n",
            "Epoch 280/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2336 - acc: 0.8955 - val_loss: 0.3507 - val_acc: 0.8455\n",
            "Epoch 281/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2325 - acc: 0.8962 - val_loss: 0.3504 - val_acc: 0.8413\n",
            "Epoch 282/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2338 - acc: 0.8954 - val_loss: 0.3526 - val_acc: 0.8469\n",
            "Epoch 283/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2322 - acc: 0.8962 - val_loss: 0.3818 - val_acc: 0.8320\n",
            "Epoch 284/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2339 - acc: 0.8953 - val_loss: 0.3738 - val_acc: 0.8238\n",
            "Epoch 285/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2333 - acc: 0.8961 - val_loss: 0.3348 - val_acc: 0.8516\n",
            "Epoch 286/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2326 - acc: 0.8975 - val_loss: 0.3442 - val_acc: 0.8446\n",
            "Epoch 287/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2314 - acc: 0.8966 - val_loss: 0.3992 - val_acc: 0.8349\n",
            "Epoch 288/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2342 - acc: 0.8948 - val_loss: 0.4078 - val_acc: 0.8029\n",
            "Epoch 289/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2329 - acc: 0.8961 - val_loss: 0.3697 - val_acc: 0.8399\n",
            "Epoch 290/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2323 - acc: 0.8964 - val_loss: 0.3520 - val_acc: 0.8448\n",
            "Epoch 291/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2354 - acc: 0.8945 - val_loss: 0.3860 - val_acc: 0.8217\n",
            "Epoch 292/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2320 - acc: 0.8958 - val_loss: 0.4037 - val_acc: 0.8156\n",
            "Epoch 293/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2336 - acc: 0.8950 - val_loss: 0.3452 - val_acc: 0.8499\n",
            "Epoch 294/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2333 - acc: 0.8955 - val_loss: 0.3304 - val_acc: 0.8472\n",
            "Epoch 295/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2326 - acc: 0.8965 - val_loss: 0.3247 - val_acc: 0.8539\n",
            "Epoch 296/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2318 - acc: 0.8966 - val_loss: 0.3804 - val_acc: 0.8255\n",
            "Epoch 297/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2329 - acc: 0.8956 - val_loss: 0.3433 - val_acc: 0.8505\n",
            "Epoch 298/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2331 - acc: 0.8949 - val_loss: 0.3429 - val_acc: 0.8507\n",
            "Epoch 299/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2339 - acc: 0.8952 - val_loss: 0.3816 - val_acc: 0.8446\n",
            "Epoch 300/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2321 - acc: 0.8966 - val_loss: 0.3381 - val_acc: 0.8531\n",
            "Epoch 301/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2309 - acc: 0.8972 - val_loss: 0.3996 - val_acc: 0.8132\n",
            "Epoch 302/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2320 - acc: 0.8969 - val_loss: 0.3317 - val_acc: 0.8512\n",
            "Epoch 303/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2337 - acc: 0.8955 - val_loss: 0.3259 - val_acc: 0.8530\n",
            "Epoch 304/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2315 - acc: 0.8971 - val_loss: 0.3586 - val_acc: 0.8426\n",
            "Epoch 305/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2341 - acc: 0.8955 - val_loss: 0.3548 - val_acc: 0.8458\n",
            "Epoch 306/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2322 - acc: 0.8972 - val_loss: 0.3547 - val_acc: 0.8453\n",
            "Epoch 307/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2323 - acc: 0.8954 - val_loss: 0.3637 - val_acc: 0.8399\n",
            "Epoch 308/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2330 - acc: 0.8959 - val_loss: 0.3860 - val_acc: 0.8368\n",
            "Epoch 309/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2315 - acc: 0.8977 - val_loss: 0.3682 - val_acc: 0.8435\n",
            "Epoch 310/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2334 - acc: 0.8961 - val_loss: 0.3730 - val_acc: 0.8317\n",
            "Epoch 311/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2323 - acc: 0.8962 - val_loss: 0.3729 - val_acc: 0.8248\n",
            "Epoch 312/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2319 - acc: 0.8965 - val_loss: 0.3254 - val_acc: 0.8570\n",
            "Epoch 313/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2318 - acc: 0.8957 - val_loss: 0.3796 - val_acc: 0.8290\n",
            "Epoch 314/350\n",
            "103279/103279 [==============================] - 5s 44us/step - loss: 0.2326 - acc: 0.8948 - val_loss: 0.4044 - val_acc: 0.8361\n",
            "Epoch 315/350\n",
            "103279/103279 [==============================] - 5s 44us/step - loss: 0.2320 - acc: 0.8964 - val_loss: 0.4026 - val_acc: 0.8330\n",
            "Epoch 316/350\n",
            "103279/103279 [==============================] - 5s 44us/step - loss: 0.2316 - acc: 0.8967 - val_loss: 0.3564 - val_acc: 0.8467\n",
            "Epoch 317/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2317 - acc: 0.8968 - val_loss: 0.3844 - val_acc: 0.8299\n",
            "Epoch 318/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2330 - acc: 0.8961 - val_loss: 0.3953 - val_acc: 0.8304\n",
            "Epoch 319/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2326 - acc: 0.8953 - val_loss: 0.3470 - val_acc: 0.8480\n",
            "Epoch 320/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2314 - acc: 0.8964 - val_loss: 0.3992 - val_acc: 0.8281\n",
            "Epoch 321/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2337 - acc: 0.8946 - val_loss: 0.3589 - val_acc: 0.8452\n",
            "Epoch 322/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2324 - acc: 0.8961 - val_loss: 0.3394 - val_acc: 0.8549\n",
            "Epoch 323/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2320 - acc: 0.8967 - val_loss: 0.3386 - val_acc: 0.8499\n",
            "Epoch 324/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2313 - acc: 0.8966 - val_loss: 0.3857 - val_acc: 0.8384\n",
            "Epoch 325/350\n",
            "103279/103279 [==============================] - 5s 44us/step - loss: 0.2328 - acc: 0.8960 - val_loss: 0.3358 - val_acc: 0.8506\n",
            "Epoch 326/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2325 - acc: 0.8960 - val_loss: 0.3534 - val_acc: 0.8426\n",
            "Epoch 327/350\n",
            "103279/103279 [==============================] - 4s 42us/step - loss: 0.2317 - acc: 0.8956 - val_loss: 0.3410 - val_acc: 0.8516\n",
            "Epoch 328/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2316 - acc: 0.8971 - val_loss: 0.3533 - val_acc: 0.8342\n",
            "Epoch 329/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2325 - acc: 0.8969 - val_loss: 0.3591 - val_acc: 0.8409\n",
            "Epoch 330/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2307 - acc: 0.8975 - val_loss: 0.3656 - val_acc: 0.8427\n",
            "Epoch 331/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2317 - acc: 0.8962 - val_loss: 0.3714 - val_acc: 0.8351\n",
            "Epoch 332/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2327 - acc: 0.8956 - val_loss: 0.3238 - val_acc: 0.8575\n",
            "Epoch 333/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2319 - acc: 0.8967 - val_loss: 0.3831 - val_acc: 0.8313\n",
            "Epoch 334/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2318 - acc: 0.8959 - val_loss: 0.3475 - val_acc: 0.8564\n",
            "Epoch 335/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2308 - acc: 0.8983 - val_loss: 0.3503 - val_acc: 0.8471\n",
            "Epoch 336/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2296 - acc: 0.8986 - val_loss: 0.3647 - val_acc: 0.8430\n",
            "Epoch 337/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2314 - acc: 0.8971 - val_loss: 0.3860 - val_acc: 0.8305\n",
            "Epoch 338/350\n",
            "103279/103279 [==============================] - 5s 44us/step - loss: 0.2309 - acc: 0.8962 - val_loss: 0.3368 - val_acc: 0.8532\n",
            "Epoch 339/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2307 - acc: 0.8964 - val_loss: 0.3683 - val_acc: 0.8343\n",
            "Epoch 340/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2313 - acc: 0.8975 - val_loss: 0.3471 - val_acc: 0.8468\n",
            "Epoch 341/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2290 - acc: 0.8986 - val_loss: 0.3360 - val_acc: 0.8548\n",
            "Epoch 342/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2319 - acc: 0.8964 - val_loss: 0.3324 - val_acc: 0.8550\n",
            "Epoch 343/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2313 - acc: 0.8967 - val_loss: 0.3497 - val_acc: 0.8506\n",
            "Epoch 344/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2308 - acc: 0.8969 - val_loss: 0.3802 - val_acc: 0.8348\n",
            "Epoch 345/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2306 - acc: 0.8967 - val_loss: 0.3657 - val_acc: 0.8351\n",
            "Epoch 346/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2297 - acc: 0.8976 - val_loss: 0.3520 - val_acc: 0.8492\n",
            "Epoch 347/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2308 - acc: 0.8965 - val_loss: 0.4356 - val_acc: 0.7872\n",
            "Epoch 348/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2335 - acc: 0.8960 - val_loss: 0.3754 - val_acc: 0.8359\n",
            "Epoch 349/350\n",
            "103279/103279 [==============================] - 5s 44us/step - loss: 0.2320 - acc: 0.8967 - val_loss: 0.3760 - val_acc: 0.8227\n",
            "Epoch 350/350\n",
            "103279/103279 [==============================] - 4s 43us/step - loss: 0.2309 - acc: 0.8980 - val_loss: 0.3446 - val_acc: 0.8483\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "H5mGILuHP4sA",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Display training history: accuracy & loss"
      ]
    },
    {
      "metadata": {
        "id": "0WWjBfcFet_V",
        "colab_type": "code",
        "outputId": "a7a44b59-9730-4703-824f-19ae69385b12",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 735
        }
      },
      "cell_type": "code",
      "source": [
        "# visualize  results (history: accuracy & loss)\n",
        "\n",
        "plot_acc(history, \"Model Accuracy\")\n",
        "plot_loss(history, \"Model Loss\")"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfUAAAFnCAYAAAC/5tBZAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzsnXdgleXZ/z9nZc+TTRYhrJCwwzJQ\nZIQhKtaJrRNb+zpe3ravP/tWW7VaV1utratu60KsgqLIBmXICiGETEL23jnJSU7O/v1xcp6chJOQ\nQELwcH/+Sc4z7ue+n/W9r+u67+uRWa1WKwKBQCAQCH70yEe6AgKBQCAQCIYGIeoCgUAgELgIQtQF\nAoFAIHARhKgLBAKBQOAiCFEXCAQCgcBFEKIuEAgEAoGLIERdILjEmDBhAuvWrTtr+aOPPsqECRMG\nXd6jjz7Kyy+/3O82Gzdu5K677upzvclkYsWKFaxdu3bQxxcIBBcPIeoCwSVIfn4+Wq1W+m0wGDh1\n6tSI1Wf//v3MnTuXxsZGamtrR6weAoGgf4SoCwSXIHPmzGHnzp3S7wMHDjB58uQe22zdupWrr76a\nFStWcMcdd1BWVgZAc3Mza9euZfHixdx77720tbVJ+5w5c4bbbruN5cuXc8011wy4o7Bp0yZWrFjB\nVVddxVdffdVj3ZtvvsmSJUtYvnw5zz77LPZ8Vs6W9/YIOP7+v//7P5599lmuueYatm7dik6n49e/\n/jXLly9n8eLFPP/889J+5eXl/PznPyc1NZUbbriB7OxsPv74Y371q19J21gsFq644gpyc3MH1EaB\nwBUQoi4QXIKsXLmSb775Rvq9ZcsWVqxYIf2uqqrij3/8I6+++irbtm3jyiuv5LHHHgPgrbfeIjAw\nkD179vDYY49x4MABwCZyDzzwAKtXr2b79u088cQT3H///ZhMpn7r0tLSQl5eHnPmzOHqq6/m66+/\nltalpaXx+eef89VXX/H1119z/Phxtm3b1ufyc3Ho0CE+//xzVq5cyfr162lvb2fbtm1s2rSJjRs3\nkpaWBsAf//hHVq1axc6dO7nvvvt4+OGHWbFiBYcPH6a5uRmA9PR0/Pz8SEhIGOBZFwh+/AhRFwgu\nQWbPnk1BQQGNjY3odDpOnDjBvHnzpPUHDx5kzpw5xMbGAnDTTTdx5MgRTCYTaWlprFy5EoCoqChm\nz54NQFFREY2Njdx4440AzJw5E7VazYkTJ/qty5YtW1i2bBkymYzIyEj8/f3JysoCYN++fSxcuBAf\nHx/c3Nz48MMPWbZsWZ/Lz8W8efNwd3cHYO3atbz22mvIZDL8/f0ZN24cFRUV6PV6jhw5wtVXXw3A\nkiVL+OyzzwgKCiI5OZnt27cDsHPnTq666qoBn3OBwBVQjnQFBALB2SgUCpYtW8bWrVtRq9XMnz8f\npbL7cW1ubsbPz0/67evri9Vqpbm5GY1Gg6+vr7TOvl1rayudnZ2S4ANotVpaWlr6rcumTZsoKiri\n008/BcBoNPLll1+SlJREc3MzoaGh0raenp5S/ZwtPxf+/v7S/yUlJTz33HMUFRUhl8upqanh+uuv\np6WlBYvFIrVRJpPh7e0NwKpVq9i4cSNr1qxh9+7d/Otf/xrQcQUCV0GIukBwiXLVVVfx97//ncDA\nQH72s5/1WBcUFNTDwtZoNMjlcgIDA/Hz8+sRR29qaiI6OprQ0FC8vb2dusE3btzotA6FhYVotVrS\n09N7lHfttdfyu9/9jsDAQMndDUj/97VcLpdjNpul5a2trX22/8knnyQxMZFXX30VhULBmjVrpLJl\nMhnNzc2o1WqsVitlZWXExMSQmprKk08+yffff4+npydjx47ts3yBwBUR7neB4BJl+vTp1NXVUVBQ\nILnQ7aSkpJCWlkZ5eTkAn376KSkpKSiVSqZNm8auXbsAKCsr4/jx4wBERkYSHh4uiXpTUxO//e1v\n6ejo6LMOGzduZOnSpT2WqdVqRo8ezb59+1i8eDF79uxBo9FgMpl44IEHOHDgQJ/LQ0NDKS4uRq/X\no9Pp+o2zNzY2kpCQgEKh4ODBg5SWltLR0YGbmxspKSls2rQJsI3Mv/fee5HJZPj6+rJgwQL+9Kc/\n9fBICASXC8JSFwguUWQyGampqeh0OuTynv3v8PBw/vznP3P//fdjNBqJioriqaeeAuBXv/oVv/nN\nb1i8eDHx8fFSLFsmk/Hiiy/yxBNP8NJLLyGXy7n77rvx8vJyenyz2czmzZudznFfunQpX331Ff/8\n5z+55557uO6663Bzc2PBggVcffXVyGQyp8stFgtTp05l+fLlREVFsWTJEg4ePOj0+Pfddx/PPvss\nr732GkuWLOHBBx/kn//8JwkJCTz99NM89NBDfPLJJ/j7+/O3v/1N2m/VqlXs2LFDxNMFlyUy8T11\ngUDgSmRmZvLkk0/y+eefj3RVBIKLjnC/CwQCl8FkMvHqq69y++23j3RVBIIRQYi6QCBwCXJyckhN\nTSU0NJRrr712pKsjEIwIwv0uEAgEAoGLICx1gUAgEAhcBCHqAoFAIBC4CD/6KW319W3n3mgQBAZ6\n0dzc97xdV0G00/W4XNoq2ulaiHYOnpAQ3z7XCUu9F0qlYqSrcFEQ7XQ9Lpe2ina6FqKdQ4sQdYFA\nIBAIXAQh6gKBQCAQuAhC1AUCgUAgcBGEqAsEAoFA4CIMq6g/88wz3HLLLaxZs4bMzMwe63bt2sUN\nN9zArbfeykcffTSgfQQCgUAgEPTNsE1pO3r0KKWlpWzYsIHCwkIeeeQRNmzYAIDFYuGpp55i06ZN\nBAQE8Mtf/pKlS5dSVlbW5z4CgUAgEAj6Z9hE/dChQ9J3mOPj49FoNGi1Wnx8fGhubsbPzw+1Wg3A\n3Llz+eGHHygvL+9zH4FAIBAIBP0zbKLe0NBAYmKi9FutVlNfX4+Pjw9qtZr29nZKSkqIjIzkyJEj\nzJ49u999fky8/PLfyc/Ppampkc7OTkaNisTPz59nnvlrv/t9++3XeHv7sHDhootUU4FAIBC4Ehct\no5zjd2NkMhnPPfccjzzyCL6+vkRFRZ1zn74IDPQa8kn9/WXrGQhPPvkYABs3bqSgoIDf/e53A9rv\nzjt/dkHHHSwX2s4fC5dLO+Hyaatop2sh2jl0DJuoh4aG0tDQIP2uq6sjJCRE+j179mw++eQTAF54\n4QUiIyPR6/X97uOMoU4vGBLiO2SpZ9vaOunoMFBf30Z6ehqffvoRHR0dPPjgbzhx4jjffbcbi8XC\nvHkprF17L++88wYBAQHExcWzceNnyGRySkuLufLKJaxde++Q1MnOULbzUuZyaSdcPm0V7XQtRDvP\nr6y+GDZRT0lJ4eWXX2bNmjVkZ2cTGhraw43+i1/8gueffx5PT0/27t3L3XffTURERL/7nA+f7TnD\nsby6AW+vUMgwm/v3EMyaGMrNi8cOui6FhWdYv34jbm5unDhxnNdeexu5XM7NN6/mllt6Wuk5Odl8\n8skXWCwWbrrpmiEXdYFAIBD0j8VqpbiqlZgwX1TK7sliBqOZY3l1zJkUhlJxac0MHzZRnzFjBomJ\niaxZswaZTMbjjz/Oxo0b8fX1JTU1lZtvvpm1a9cik8m49957UavVqNXqs/ZxJcaOHYebmxsAHh4e\nPPjgvSgUClpaWmhtbe2x7YQJE/Hw8BiJagoEAsFlj1Zn5IVPMyitbWPl3BhuurLbkPv6hxK2HCoF\nIGVyxEhV0SnDGlN/6KGHevyeOHGi9P+yZctYtmzZOfe5UG5ePHZQVvVwuoJUKhUANTXVbNjwMe++\n+zFeXl7cfvvNZ22rUFweHzkQCASC/rBarZgt1otuEe/PrKK01qYFR3NquXFhPDKZDKPJzPcZVQAU\nVrX2K+pWq5W3vs4hOMCTX90w9aLU+9LyG1wmtLS0EBgYiJeXF/n5edTU1GA0Gke6WgKBQDAgtDoj\nn+09g0arv6ByLBbrOQdEbz5Ywm9ePoCm3XBBx+qLToOJL74vpKJe22N5SbVN0OMj/Whs1VNWa1t/\nLK8Orc72vi6q0vTY5+Cpap796Li0vqxWy+GcWmqbLt6nZYWojwDjxo3H09OL++5by+7dO1i9+npe\neOH5ka6WQCBwcfLLmqmo0557w3OwK62cbUfK+HhXwXmXodObeOK9Y6x74Tsq67VUN7bzysZTVDe2\n87+vHmTzgWIADmfX0N5p4kRBfb/l6Y3mQR2/urGdj3ec5qX/ZLLlUCmff1fYY31pTRveHkqWz4oB\nIC3fNjZrT3olMiDY34OKunYMRjM6vYmm1k4+2VVAQYWGPekVABzKrgFg7qSwQdXtQrhoU9ouR666\n6hrp/xkzkpkxIxmwudZffPGVfve1bwuwZcvu4amgQCC4bNAbzLyw4SThai+evGf2eZdjtVo5klML\nQFpeHafLWxgfHdDn9nXNHWScaSTQ151ZE0Ol5R9sz5es4+c/OcHU+CDST9fT2NpJc5ueQzm1zEsK\np7ZZB0BGQQNXToukrrmDQF8PaeDakZxavjxQTG1TB3eumMDCaZGYLRZkyJDLZTbx3nkaP283rpoT\nS1SobfD1pv3FpDkMos4qaqK13cDh7BrS8uupa9GRODqQyWOC8HRX8n1GFRNjAymqamXa2GCCAzzY\nlVbBw6//QHunCbOl2+OwK62CpTOjOJJTi7eHksnxQed9vgeLEHWBQCAYIgxGM2cqNUwarT5rXXun\nkbzSFqaPD0Yuk51X+VarlVNFTUSFeKP2sw2kbWrt5GhuHYtnROKmUqDVGSmq0uDtoSI+0h+wWcWF\nlRpMZguV9Vo6Ok14efR8/Wu0etxUCjzdu5ebLRb2nawmKU5NbXMHJrOVQB93apt1RAR5Ud3YwfcZ\nlQT4uvPh9nw6Oo38/raZUvxbpzfx1L/TaO80ARB61yxiw30pr9NyJKeWMaP8mDQmiG8OFPNDls2q\nLa2xub1rmzo4kFkt1SWnpIlXN53ieH4908YG8983TGZnWgWf7i7ATSnHTSXns71nmDo2mLe/yUGr\ns9XltU1ZVDa0A5BX2sxDa6YT4ONG5pkGQgM8uX7hGBo0nXz+XSH7Tlbx7eFSOg02qz823A93NwWr\n5sXy+XeFvLLxFACLZ0ai7bC52Fs7jMR0dRTc3RTER/qz7UgZj797DE27gWWzoi/qeAAh6gKBwKUx\nWyxsO1LGFUkRBPq6D+uxNu4rYsexcn5781SSxvS0zj7dVcDBrBpWz49j9fw4GjQ6vD1UPUTUkcwz\n9bRqdEyICQTAaDKzfvcZvjtRybSxway7cQoVdVoee/coAL5eKq5ICudP7x2jsbUTsE2/XZocxXMf\npxPi7wmAFSiubiUxTk15nZYXNmRwRVI4e9MriY/046E106U6bD1cxsZ9RYSpvWhq7cRisTIx1laf\n638Szwfb88gpbaawMoO6FptFffJMI3mlzay6IpYfsmyu88Q4NdnFTWw+WMx/3zCFwzk2AV85J4YJ\nY4L55kAxziLrX/9QAsC8xDAOZddyPL8eN6WcjDMNpOXXs+d4Be5uCh6/axZ5pc18sD2ff36eSUlX\nx+Cj7flUNrSzeEYkgb7ufPF9EX94+4hU/uxJYcxOCKO13cCmfUV8ub8Yi0OMPybMJtZLZ0bxfUYl\nDZpO5iWGMWm0Gr3BzNxJYSTGqbkiKRxZV0dNbzCTX9ZCcXUrYWovrlsQ188dM/QIURcIBC5NVlET\nX3xfRHObntuWTbigsvLLmsksauSnC8acZX2ZzBbJ2kzLr+sh6i1aPYe7XNabDxQTF+HL619lExXi\nze9vm3mW5W40WXj09R8AeOyuZGTIeGVjJo2ttoFpGWcaMFssvLMlV9qnuLqVsZH+NLZ2Ej/KD4vV\nyrG8OsrqtFitSKILUFilITFOzcFT1bS2G9h2pAyA3JJmWrR6AnzcKatt46uuuLbjQK/s4iZiwnyY\nNi6IY3mBHM21ubBVSjlGk4X3t+bS3mmipqmdsjotnu4K7ludxEv/OcmJggYKKlo4mlOLp7uCKfFB\nRIT7SVZ/VIgPFfVawtVe1HQdMz7SjztWTGTq2GCC/D3w9lDx2DtH+ffWPDr0JpInhhKu9iI00JOD\np6oprOqeHnwwqwalQs4NC+PxcFMgk8koqmol/bQtPj+7Kxzg5+3GmiXj+HjnaWQyuGXxOI7l1pIY\nZ/O4uKkUPHH3bMwWKz6etllMnu5K7r22O625HXc3Bf9z0xS2HSljwZQIPNwurswqnnjiiScu6hGH\nmI6OoR0R6e3tPuRlXoqIdroel1pbzRYL73+bR12LjrFdbmA7Ta2dKBUyFPLBuyUzixpp7zDi7+3m\ndL3VasUKkuV0PL+O3NJmjCYri2ZEOt1HbzAjl8mkfeyCuOVQKaOCvfH1sh3r9S+zOJZXjxVI6LJY\n7Zw808CBUzZ3cVObnuWzYiio0NhcuodKaWzVM2dSGBX17eSXt6DVGWlu0xMa6El0qC1DWEenkaKq\nVirr2yWxPHmmgWN5dTS36lkxO4YAHzeqGjswmS2k5dczc3wIdV1xZ39vN04UNLBiTgyLZkTyfUYV\n7brumTURQV5odUbcVArmTAqzucz1JmJCfQhXe9HQ2kltk47S2jY2HyxBqzOy9qoEzlRqGBcVgJ+3\nCo3WwLobp6D29aC908jJM40A/Ne1iRzLq8NosgBQ39KJwWjhhoXxJMQGEh7kxYHMak6crkfTbmTO\npDCSJ4bh7e2O2WimRavnF6smUVDRwpql48gvb8FNpeDhW6fj7akiMsQHta8HPp4qdAYTuaXNAFw9\nzxYnl8lkjA73Zd/Jany9VOiNtnpMiQ9i/pQIZDIZ46ICmJ0QRkJsIAmjAyXRBoiL8EUulzF5TBAr\nZsfwk6mjcHNIQa5SynFTDWyqsbtKQWKcWrpvYGifT2/vvj1OwlIXCAQS5XVa6po7mDkh9Nwbn4ON\n+4o4cKoad5WCK6dH4t71Qjxd3sJfPjmB2s+dX61OJH5Ut+BbrVbyylqIDvWRLKKjubVs/L6I/7lp\nChYrPPP+Mbw9lDz8sxkE+XmcFRv+yycnMFusPPyz6SgVcqq64qmV9VqyihrJL28hyN+DK6fZBL5F\nq+cPbx1hwdQIblk8DoCth0v54vsiqcx7r02koUVHcdc0py2HSpg5PoTYcJsY6/QmvuyyauNH+VFY\n1cr7W/M4eKpaciuPGeXHnSsmkFvSRHObzeKWyWD9rgKiQnxo7zTx4oaMHnOyp40N5lRRI2aLlevm\nx3Ht/DgOnqomLb+erYfLUMhl3LQonnqNjrJaLafLWwAYG+lPbJivZAHPTghFIZcxOyGM9bsKyC1t\nZvPBEho0ncxOCOW/VifR3Kbnf189SMaZBjhjq/PKOTHMnxLBzAkhqJRyOg1mNFo9kSE2t3RC19iB\n0ABPZk4IkSzssZH+1LfomD8lgmWzogEYFxXA/MkRHDhVTUSQF6vnd7ulUyZHSPO9n7xnDgCP3zUL\npULuNDxx1dxYvs+owmA0M8VhEFpMmC//d9sMfL1U/OWTEzS36Zkx/uxU484G9slkMq5Nubiu8uFA\niLpAIJD411dZVDd2cM+qBFImR2A0WXj7mxwmj7FZO2ATXk27gQCfs62F6sZ2vj1cypXTItl62ObS\n1RvNnDzTwOyEMMrrtLz3bS5Wq5XG1k4+2XmaP945C7DFjN/ZksvR3DqC/NxZd+NUokN92JlWTl2L\njg+35xMR7A1Ae6eJx989igxIHKPmvtVJeLoraW03kN8lbF8dKOaGhfHSICkr8OJnJ6W6+nu7kVvS\nDDLo0Js4nFPLFUkRVDe2s/VwGd4eSto7TaSfrueVjackl+2cSWEcyanlox35AFwxOYKMggbK67Rc\nOW0UKVMieO6jdA6cqsbbQ8naqxIYE+kveRamjQth38kqfL1U3LxoLO9uyeXFDRlMilNjtlgJ8nOn\nsVWPt4eS+3+aRH2LjpKaNuZ0TYua4CBIq+bFEhroRVyEH2W1WvZnVqNSyonuslwXTovk090FLJg6\nisQuAbZa4V+bsyTXur0DF+jrTvKEECob2vnpgjEYTGbpmHZh9fGUS50tsIn57csnEBnsjUwmIyE2\nkJqmDq5JGU1SnFryfNj5eep4JsYGMH1cSJ9jCew4Wrm98fFUse6GyXToTXh5qHqss3uFZk0M5Yes\nGqaNC+73OK6GcL/3YihcJL/61d2MGzeB4ODuHuK//vUKBQWnmTx5So9t09PTePXVl1i8OPWCjjlY\nLjVX7XBxqbXTYrWe9aJzxGS28PaWHORyGRFB3oMqu6+2tmj1PPbOUdxUckaH+/VdN4uVT7rmHZ8q\naiJlcjiHsmvYfrSc7JIm5iWG4+WhZOexcl7YcJKwQE+iQn14ZeMpTpe3MCU+mE92FXAou5bMwkb0\nRjMr58ZwpkKDwWShpqmDNzZn095pYtmsaMxmKxX1WpYmR9PeaWT70TL2nqgiTO1FfUsnuSVNTBsb\nzGd7bfOHGzSdlFa3EeTvweqU0aiUclRKOQUVGsLUnsSG+ZJd0iS5rQsqNHi6KTiaV4cMm5gBzJ8c\nQVmdlmO5dRRVt1LUFYPVG8wcyqrhSE4tRrOFa64YzcSYALJLmqlu7I4pP3zrdEqqWymo0NDcpiez\nsJG6Zh1JcWp+ee0kgvw8mZsYjp+XGzddGc/4mEA83LrdtnI5HM6pJXlCKNctGIPeaCa7pJmK+na8\nPZT8320zOZJTy/xpkUwZE4Svl5sk0mAT2JNnGgkO8GDtVQnI5TI0Wr3kBo8f5cfCLi9E3Cg/5iaG\nMyai+7qHB3kxY3wIQX4ezEkIY86kMKns5ImhLJkZzahgb6JDfQc0Sj8uwo8gfw/peGMi/Jg2Ntjp\nfa5UyIkO7ZlH/Xyf0WB/z36fkUmj1SxNjjpn5+FicbHc70LUezEUJ16v7yQr6xSzZs2Rlv3tb8/x\n4IP/g7d3z5uwurqK3NxsIerDxKXUzoyCBp78dxrhgV6MCnb+MiqubmX9rgKO5taxal4scrntxdig\n0ZFb2kxIgIcUh84taWLnsXKiQn3wdFfi4enG/768n53HygnwcZeOcfCUTagKyjX8ZNqoHnFBq9VK\nTmkzvp4qGjSd7EmvBGydD28PJduOlmE0WTCbrfyQVU1WUSOHcmoxm63klTUzdWwwn+w8TWmNllkJ\noazfXYDFYkVvNKNUyFh3w1SyS5o4Xd5CfnkLwf4e3LFiAqnJ0ZTUtFFc3UZuaRMb9pzhTEUrAb7u\nPHXPHNo7TWQXN1FY1Upzm55rrhiNl7uSBk0nt62YyMKpo5gzKYyE0YHsPl6BxWLF19uNkwUNlNS0\n8fPU8ZTVtXE8vx6Lxcr0cSHo9CZmJ4Rxx4oJHDxVjU5vxl2lwGyx4tcVgzVbrMSE+RAb7stNi8YS\nrvbm+4xKJo1Wc92COOZOCmN0hB+jgr3JKWlizqQwSmva8PNS8b9rpksC4u2hYnx0AH5O4v6hgZ6o\n/Ty4cnoknu5K1H7u0nmfNjaYK6dHsnhmFItnx6LTnX3vymQyfjJ1FPOnREhuen9vd/LKmvHxUrFy\nbiyRXddeJpP1sKzt+Hq5MS4qgNhw3x7i21+HcyC4qxSM6rLaB8pwPaMymQzFJfSxFRFT/xGzZMky\n7rvvHu6/fx0AeXm5hISEUFJSzB/+8DtUKhW+vr48+eRzI1zTy49XN56itcPA72+b2e92JrOlz7ml\n9S061u8q4Nal4wgJ8KS6sR2rlbOE2mS28NzH6QT5eXDfdUl8l1GJ3mDmza+zUft5EBLgQU1TB8H+\nntJUq6rGdmn/t7/JITFOjUoh582vcwBYND2S25fbRnBv3F9EYWUrB7NqWLNkLDMTI6T48etfZrHu\nxilMHRtsi5FiczFvPVzKTYu6v4VwJLeWNzfnkJocTVyELT68al4s3x4qZfPBEswWK9emjKaqsYPC\nSg15ZTbX9oToAPLLW/hgWx5g6wT87dMMjCaL5LZOigvCy0PJf18/mbe/yaG8Tsu6G6cQ1RWPje2a\nLmSPU1usVm5ZPBZ3NwVXz4vlQGY1RVWtuKsULJ4Zhb+3G1arldBQP+n7DGGBXkSFeJNZ2EhmYaPU\nrvmTI5gYG8gfu6YvxYT5cP91Schktpf9DT+J57uMStZelcC+k1UkTwzlzx+k4aZU8NtbpuHX5fr1\ndFfytwdS8PJQ9rBa4yL8+Mt9VwBwRVI4ft5uTgXcGXZRthMR5M3ocF9KatpIGmNzkburFFKHzhm2\ndd3rA33deeLu808oI3AdXF7UN575hhN1pwa8vUIu65EZyBnTQydz/dir+1wfGKhm1KhIcnKymDQp\niT17dpKauoK2tjYef/zPjBoVyVNPPcaRI4fw8vIacN0uJ3YeK2d/ZhVeHip+e/NUp6NOTWYLTW16\n1L7uA0ru0GkwcaKgAYvVilZndGrBWCxWPtyRzw9ZNTx5z2zCAm3XZ+uRUkxmK9dcMZqDp6rJONNA\noK87C6eN4tmP0jEYzaRMieC21PFSXbcfLaOoqpXiqlaqG9vJLm6SBG9XWjn55S00t+lRKeXcfdVE\n5k4Kp7qh2817NLeOo7l10qAetZ87352oZMHUCEYFeVNS3Ya/txt6o5n3vs3jVLFtNPAVSeEcy6vj\nH59n4u/thqbdQGSIN1qdke8yKrn6itF4uivRG8xs7BoMdqKgHvtA9KQ4NfnlLZyp0ODjqWL57BjJ\nAj1d3kJFvZY5k8JY94/9FFR0575ubtPj7qbgwesn89GO06R2DZBS+3nw8M9mnNVRignr/ib0ldMj\nWTQ9kuiuJB5qPw/uvWYSVQ3tJE8MleLRzizA6eNCqKjv7gx5eyhxd1MQGezNvddO4qPtp5kaH9xD\nJOclhTMvKRxA6uTcvTIBbw+lJOh2nN0njsT3Gtl/PlybEse2I6VMH3f2oC6BYDC4vKiPFKmpK9i9\neyeTJiVx8OA+Xn/9Xc6cOc3zz/8Zs9lMVVUlM2fOuqRE3WK1IuP8XXB6o5mS6lYpWUZvtDpb3HTl\nnJizBrf03m7DnjNSEoj1uwsYHx1AiL8n8ZF+Uv027DnD7uMVeLgp+K/ViUyJD6a5Tc/+k1UYzRZm\nJ4Th5dP9+drCylapzNLaNmLDfPnmhxJSk6Mpq2sjxN+T7zIqpS8wZRQ0sHx2DI1d2aasVogK8Zbm\nwR7OqeVkYQN6o5lwtW26Tk2jJimfAAAgAElEQVRjB7+9ZSoarUFKnGEF3v4mF7PFyvLZMexOr+BI\nTi1WbHNwqxraeXNzDgHe7pKl/sBPk/jmh1JKa9s4Xd5CSIAHd62YyF8/zeD9b/O4adFYzBYrcyaF\ncUVSOE+8d4y0XNs86IXTRjE3MYwdx8rJKmoCYOb4EBRyGZv2F3Owa9rVF/uK0BtsbvIGTSeHsm37\nx4T5MmtiKGcqNFIHwM746ACpkxET6it9xSppjJq2diN3XzWRmDBfnvpFd+jJTu+Ol20aki3WPSch\nVBJ0O8kTBzYCf9nsaJQKGSmTI9hyuFQaEAYwd1I4cxLCBnRP2wcCjgTTxgVfdgO6BMODy4v69WOv\n7teq7s1QfXp14cJFfPDBu6SmLic6OgY/Pz+effYp/vrXlxg9Oo4XX7w0PuBitVqxWuGFDRnkl7UQ\nG+7Do3ck06Dp5EhOLYumR/awVBo0Og5l17JidgwqpZzCSg3+3m4EB3jy+d5CdqdX8PCt06WsU47s\nSitny6FSVEo516bE8Y//nESpkDNjQgg7j5Xz4PWTUft5cKqoEYvVyqp5sRzJqeX7jCpJaFfNi+WG\nhfEYTRYOZdXg7aHEaLLw6qYsfn3TVN78OhuN1ha32nKoFKVCxp0rJpIyOYL88mapLmW1beSVNrPj\nWDnpp+tp0HTi4aZAbzBL1nR2SRPLZ8fwXUalNMjqg+35GLrmv+r0JnR6EzctimfpzGje2ZLD0dw6\n3tycQ3ObHoPRwk1XxvOf7woprm5FLpMxe1IY9S069nelv7x50VjkMhnPfHSc97fmYTCZ8fd2Y+aE\nUIL9PfnT+8cAuubVqlkwJYL9mdW8sCEDsE0Tig71IdjfgwZNJzIgOtQHDzclSXFBVDa0czi7hqXJ\n0VisVr7+oZT1uwuwWm0ZyJbOjCLY34N/b8untcui93RXsnhGJFHB3kxwch3tTIgJoLS2DTelnF/f\nOLVfd7Ez3FUKRof70dzWydio87d2vT1UXNM1Fel2J8llLjROLBD8mHB5UR8pvLy8iY8fxwcfvEdq\n6goA2tu1hIWF09bWRnr6ceLjx513+WW1bTS16s+7d1/X3MFLn50kt7SJxNFqckubUchlFFe3sWlf\nEVsOlQI24Vo9Pw6FXIZSIefD7ac5VdSI0WRh2axonvs4nbGR/vz65qn80PVFoiO5tcRH+qNSyjmU\nXYMMmJsYLsV2j+fXMy8xnJNdMdDskiY6DWbe/TaX394yjYwC23ZzJ4WRPCGUwzk1BPt7sv1oGVsP\nlxEZ4o1Ob6ZDb2LFnBgmxgTw0n8y+efnmeiNZhZOG8X4qAByy5rJKGjg3S25uKsUnO6KBwMUV7VK\nruMGTScKuUzK9/zATyfz0c7TnC6zuZq/z6jC20NJyuQIdhwrB2wx5ZZ2A1ckhrFyTiwAv7h6Ei1a\ng9TOlMnhrJwby9G8Okpr2vjF1QmEBngyJT6Y/ZnVBPq6Ex/pj1wmY/nsGCmrlz2pSXSYD37ebrS2\nG6RO0pol4zhdoZEyfI2L9kcmkzE5Poi96ZWEqb16ZLCKDPbmhoXx0u97r5nE9qNlmC1W7r8uieAA\nT7Q6I59/V2jLkHWNLUOWQi6X5iD3xfjoAHYcKycyxHvQgm5n3Y1TMJst55WERiAQnI0Q9WEkNXUF\nf/7z4zz++FMAXH/9Tdx33z1ER8fw85/fwbvvvsm9994P2FzfBeUtjIsOOOc0krYOA0+8Z7PgXlo3\n/6wY4EB4/5scThU1olTIOFnYiEIu4/blE3h/a54k6ADHcus4klPL6HBf/vuGKTS12XJKH82pJTrU\nB7PFSmGVhiM5tej0to82fJ9RxcFT1VybEsfXP5Qgl8mIj/SXvkdcXqdlV1qFdIxOgxk3lZyckmb+\n32s/0NZhJCTAQxpFa0/wERboyYufneTNzTnSvnMnhRET5su0rgFhCrmM1fPjCPBxZ15SOK16M797\n9QBvfZODyWQhNtyXuuYO0vK70kQmhNJpMJMyOYIWrR6z2ZbbetLoQHaltfPk+2mYzBZ+njqeWRND\n2XuiEqPJwuyEUBbNiOpxTpUKOQ9eP5nvMyoZFeTN1LG2DtcD1yWh6TBISVaS4tTEd001sl/r6+bH\nceJ0PbXNOmnAnVwmY/bEUA6cqmZSrE1gPd2VPHr7TD7akY+7SiFd+8lxNlG356rui+SJoWe5tX08\nVTxz71w83ZWD+vDExBjb6O7JY87/C1R9ZYUTCATnh5jS1gtvb3eyCxt4/assEmIC+439novRo+O4\n4467USptfacZM5K57robWLx4KePGjefmm29lzJh45i9YQlpeHf/84hS+Xm6MGdX3XOKm1k7e/TZX\nSgs5OtxXyu7kiM3VWsJ73+aRMFpNTnETKqUtcUSjxlZGZLAPD14/heP5dSxNjmb5rBi+z6hCbzST\nmhyN2s+DwkoNnQYztU0dpEy2ffSh02C25XZu7KC1w4DFasslbTBamDQ6kPqWTixWyC1txmKxYrZY\n6TSYKavVEhfhS4vWIM0NDld7oTeaefT2mZgsVsrrtLgp5Vx9xWjGjOrpkg0N9GJ8lD9hgZ64qRSM\njw5g4bRRyGQyIoK82J9ZzdzEMK5I6o6NRkX44+Ou4EhOLUF+Htx7TSJF1a3SALX7rktiaXI0kcHe\nxI/yl9zAPp4qjufX2RKELB7LoumRuLspaOswUFzdys2LxzpNjmGvV0RQ97QeLw8Vat/u2L5SIecn\nU0f1uM4KhW0OeWZhI8tnxxCmto21SBgdyOKZUT2O5aZSkDwxtMegquAAD9zclcxPCsffSVKYc3Gu\n0dbOUCkVLJ8dTUJs/xb9UHMpTVMcTkQ7XYuLNaVNZrVa+x/qfYkzFPFvR0JCfHnizR84nl8vfU3p\nfNDpTdIHBPqitcPAP/6TSWNrJ7FhvpwqaiQqxIc/rZ2FTCZjb3oFpys0zJkURmW9lmnjQnj2w+N0\n6E3SxxOunDaK6xaMobXDwKggmxt0T3oFG78voqPLch4b5c+ZCg1xEb784Y5k3tuax4HMau6+aiIL\npozqMSp525EyDmZV8/Ct08kpaeaNzdm2xB1AanI0O9PKkctkPb5kZGdiTAC3Lh3Pm19nU9+sw9CV\nAxq6J9/8+ZdzeOOrbMrqtAT7e/CHO5Jp7zQOOtGKMxpadPh5u/UYKW8fI1FZr0Xt54Gnu5LckibS\nTzewfHY0wQGegzqG2WKhUdNJaOClM8DRzlCNB7nUEe10LUQ7z6+svhCi3gtPHw9uf3wrJrOV0eG+\nPHbXrH63Tz9dT6OmE5VSjrtKwbykcPZnVvHBtnwWzYjkZ0vHO93PYrXy9Adp0hxdRx6/axbVje3S\n3GQ7bko5BpOF1fPjuGpuLL9+eT86vVlaf/1PxrBkZhS/eeUACrmMGeNDOHiqpkcZ9hSXoyP8+P3P\np6NS9v2BAr3BzBubs5k1MZQPduSj74o5L5sVjU5vYn9mdY+vKf3ymknMS7RNEzqaW8u/vsruMUUw\nITaQ/3frdPQGM9uOlpEQG+g0B/NQcrm8MODyaatop2sh2nl+ZfWFiKn34uDJSkxm29Sukpo2NFo9\nOoMZN6UctZ8HFquV6sYOIoO9adHqeXXTKRy7Re2dRinV5p7jlSyZESW5Uh1Jz6+nuLqNuAg/iqtt\nruhAX3ea2/R8tCNf+mThTxeMobZJR2WDlryyFqJCvLnmitHI5TJiw3zJK2vB38cNnd7EvpNVeHuq\nMBgt/HRBHNekxGGxwKHsGtxUcgxGC0dyagn0defxX8zFajT1ey7c3RSsu9GW1raoqpXd6bY4+Khg\nb65ICidc7cW0ccH84/NM2nVGZjp8OGFKfBD+Pm5MGRNEZmEjmnYDKZPDpXLP1wMiEAgEgr4RMfVe\nfLKrgPpmHanJ0RRVtRIa4Mmbm7M5VdjIldMj+c/eQt76OoeYMB/yy1rILm7iJ1NHER/pR3F1G/ll\nLZgtVpbMiKKoupWckiY83RVEBtvi3jVNHSjkMt78OoeOThMP3zpdGsm+Zsk4LF1fqbJYrNy3OomU\nyRFMiQ9i2rhgzGYrP10wRoqZ2hOj3H/dZFra9OSXt5BZ2IhMBvesmoSnuxKVUs7hnFpWzo0lKsSb\n2DBf7v/pZCLD/AZ17kYFebGza3DbqnmxBPt7Mi4qAF8vN5LGBJEyOULKiga2uPGSGZFMHxdCQ2sn\nre0GbkudMKiBWEPB5RKvg8unraKdroVo5/mV1RfCUnegoUVHdlEjE2MCWDIzkp1p5Xx5oJj2ThPt\nnVrS8urYftQ27ejbQ6UYTBYUchk3LBxji4GfqMRgsuDv48atS8ehN5o5mFXN29/ksu1IGWNG+bPv\nZJUUD180I5IwtRfXpIzmmx9KmTE+hOQJofxrcxbjogKY7mD5enuoWLOk5xS4sVHdA7tSJkdwMMvm\nal80PRK1n21g1pT4IB65bSajI3wvSFCDAzyZPzmCk4UNRIb0jH+HO/FEAJJr/7bU8fxs6TgxbUkg\nEAiGGSHqDhzKsWXUmpcYTmigF7Fh3RmzAN78OgcrNvezPavY9HHB0sjksZH+FFRomDk+BLlcxtpV\nCVw7fzSbD5Rw4FQ1FfXt+Hm70a4zMjcxjJ8ttYl0UlwQSXHd04J+e/O0Qdd9fEwAN14ZT7C/B7N6\nTVm6kMQejtx11UQsDt96HigymQyFSAAiEAgEw44QdQfKa9vwdFdK3xdOnhhCaW0bSoUMk9mKyWxh\n5vgQVsyN4a/rT5AUF8TPU7sHws2ZFMaZSo00WAxsnwdcuyqBqWODOJpbx82LxuLjpcJNKR/STFdy\nmYyr5sYOWXl9HUOuEOIsEAgElypC1B24bdkEPH3cUXWNfJuVEMaX+4uZnRDG6fIWGjWdrF4QR1SI\nD6/9duFZSWIWTY9k5oRQpwk1Zk4IlToLAoFAIBAMB0LUHfDzdiMk2EeadhAa4MmT98wmwMed8jot\nLVq99NlIZ1nfZDKZyJAlEAgEghFDiPo5sCdFGe751AKBQCAQXChiOLJAIBAIBC6CEHWBQCAQCFwE\nIeoCgUAgELgIQtQFAoFAIHARhKgLBAKBQOAiCFEXCAQCgcBFEKIuEAgEAoGLIERdIBAIBAIXQYi6\nQCAQCAQughB1gUAgEAhcBCHqAoFAIBC4CELUBQKBQCBwEYb1gy7PPPMMJ0+eRCaT8cgjjzBlyhRp\n3ccff8zmzZuRy+UkJSXx6KOPUltbyyOPPILBYMBisfD73/+epKSk4ayiQCAQCAQuw7CJ+tGjRykt\nLWXDhg0UFhbyyCOPsGHDBgC0Wi3vvPMOO3bsQKlUsnbtWjIyMti+fTupqamsWbOG9PR0/v73v/PO\nO+8MVxUFAoFAIHAphs39fujQIZYuXQpAfHw8Go0GrVYLgEqlQqVS0dHRgclkQqfT4e/vT2BgIC0t\nLQC0trYSGBg4XNUTCAQCgcDlGDZLvaGhgcTEROm3Wq2mvr4eHx8f3N3deeCBB1i6dCnu7u6sWrWK\nuLg47rrrLm688Ua+/PJLtFot69evH67qCQQCgUDgcgxrTN0Rq9Uq/a/VannjjTfYtm0bPj4+3Hnn\nneTl5bFnzx5WrlzJfffdx969e3n++ed55ZVX+i03MNALpVIxpHUNCfEd0vIuVUQ7XY/Lpa2ina6F\naOfQMWyiHhoaSkNDg/S7rq6OkJAQAAoLC4mOjkatVgOQnJxMVlYW6enp/PrXvwYgJSWFP/3pT+c8\nTnNzx5DWOyTEl/r6tiEt81JEtNP1uFzaKtrpWoh2nl9ZfTFsMfWUlBS2b98OQHZ2NqGhofj4+AAQ\nGRlJYWEhnZ2dAGRlZTF69GhiY2M5efIkAJmZmcTGxg5X9QQCgUAgcDmGzVKfMWMGiYmJrFmzBplM\nxuOPP87GjRvx9fUlNTWVe+65hzvuuAOFQsH06dNJTk4mJiaGRx99lG3btgHw6KOPDlf1BAKBQCBw\nOWRWx2D3j5ChdtsIV5Brcbm0Ey6ftop2uhainedXVl+IjHICgUAgELgIQtQFAoFAIHARhKgLBAKB\nQOAiCFEXCAQCgcBFEKIuEAgEAoGLIERdIBAIBAIXQYi6QCAQCAQughB1gUAgEAhcBCHqAoFAIBC4\nCELUBQKBQCBwEYSoCwQCgUDgIghRFwgEAoHARRCiLhAIBAKBiyBEXSAQCAQCF0GIukAgEAgELoIQ\ndYFAIBAIXAQh6gKBQCAQuAhC1AUCgUAgcBGEqAsEAoFA4CIIURcIBAKBwEUQoi4QCAQCgYsgRF0g\nEAgEAhdBiLpAIBAIBC6CEHWBQCAQCFwEIeoCgUAgELgIQtQFAoFAIHARhKgLBAKBQOAiCFEXCAQC\ngcBFEKIuEAgEAoGLIERdIBAIBAIXQYi6QCAQCAQughB1gUAgEAhcBCHqAoFAIBC4CELUBQKBQCBw\nEYSoCwQCgUDgIghRFwgEAoHARRCiLhAIBOfg1ZPv8GHuZyNdDYHgnAhRFwgEgnOQ05jP4eo09GbD\nSFdFIOgXIeoCgUDQDxarRfo/r6lgBGsiEJwbIeoCgQtjtVpHugojSptByzdF29GZdOddhslilv7P\nasgdimoJBMPGsIr6M888wy233MKaNWvIzMzsse7jjz/mlltu4dZbb+Xpp5+Wlr/zzjusXr2aG264\n4ax9BEOLzqRjfd4X1HU0jHRVLnta9Bq+Lto+pO7d/ZWHeHDv72jQNQ5ZmT82Ps3fxNaS3Wwu3Hbe\nZZgsJun/nKb8oaiWQDBsDJuoHz16lNLSUjZs2MDTTz/dQ7i1Wi3vvPMOH3/8MevXr6ewsJCMjAwK\nCgrYsmULX3zxBU8++STffffdcFVPAGwt3s2BqiO8n7N+pKty2ZNWm8G2kt0UNBcOWZmf5m8CIL3u\n8u0cN+tbAKi/gI6Nydot6lpj+wXX6VKm1dBGU2fzSFdDcAEMm6gfOnSIpUuXAhAfH49Go0Gr1QKg\nUqlQqVR0dHRgMpnQ6XT4+/uzd+9eVq5ciVKpJDExkXXr1g1X9QRAU9cLr9PUOcI1uXxo0DXxwJ6H\nOVqT3mO5octCNzpYhUOF2WI590YuilKmBHpa24PF7OB+N1lMLh3SePvUR/wj/Y2RrobgAhg2UW9o\naCAwMFD6rVarqa+vB8Dd3Z0HHniApUuXsmjRIqZOnUpcXByVlZVUV1dzzz33cOedd5KXlzdc1RMA\nerMeAHeF2wWVozN10mE8/5jlUKE1tF/Qy/ticLw2A4B/53zaY7ldzIej/mbrpX1OhhOV/MJFvXdH\ny2Q197Hljx+tsZ0mfYtLd1xcHeXFOpDjTaLVannjjTfYtm0bPj4+koBbrVbMZjNvv/02x48f59FH\nH+WLL77ot9zAQC+USsWQ1jUkxHdIyxsMVquVf5/4DxNDxjI3esawHssis72sfD29L6jNf9z9Bp0m\nPX9d/ujAjmu18Jf9r5McOYWl8QvO+7iONHQ08bs9f2JO1HT+N+XeHuv81R68cPBNUuMXkBw5ZUiO\n54wOg44yTRUTQ+L73CamPRyKbP87nnNVha1/7eWjuqBr4bivQibHbLXg7qkc0Xt6OBhoe7w9PaAZ\nkFvP+xx0atp6/A4I9MDLzfO8yhosF/u6yeW259M30A1PlcdFO66r3Z99cTHaOWyiHhoaSkND9wCs\nuro6QkJCACgsLCQ6Ohq1Wg1AcnIyWVlZBAcHM2bMGGQyGcnJyVRWVp7zOM3NHUNa75AQX+rr2869\n4TDRqGvm24K9fFuwl1cX/2XYjhMS4ktbp+3cyS2KC2pzg7aJTrN+wGVoje2kV2dhMJiZ6jftvI/r\nyMl62wCmIxUnetQjJMSXPbmHOVGdxYnqrGE9p5sLt7G9dA+Pz32YUK9gp9vo2rutPsd6trbbrkWT\nRnve16L3vauQKTBbLbRpdSN6Tw81g3lGLUbbX51h4Pdnb+rbWnv8rq5vxs9t+L0fI/EuMphs7Sqr\nqUPtEXiOrYeGkX7nXiyGsp39dQ6Gzf2ekpLC9u3bAcjOziY0NBQfHx8AIiMjKSwspLPTFsvNyspi\n9OjR/OQnP+HAgQOATfgjIiKGq3qXLFqj9qIdS2+yu9/dL6gck8U8KPemPcY7lK7mzq62OF1n7nvd\nUNKi1wDQ3NnS5zYWh/isY6zW2KU+Q3lOFHbXswu7i8+Fcgjc7733NZpdN5xhn5N/KYTTBOfHsFnq\nM2bMIDExkTVr1iCTyXj88cfZuHEjvr6+pKamcs8993DHHXegUCiYPn06ycnJAOzbt49bbrkFgMce\ne2y4qnfJ0ma4iKLeNTjLmajrzQa+Lz/IvFGz8HXz6bccs9XcYy7vubDHeM1DKDb9Cbe+H8EfSuzn\ns7+OmdkhkUlTZwshXkFAt3AM5TlRyORDUmZFWxVqjwC8VF5DUa2LimoIOja9Rd1kN/9dEEnUL2Be\nv2BkGdaY+kMPPdTj98SJE6X/16xZw5o1a87aZ926dZf0qPeC5kLey/6E/5nxX4R5hQx5+a0XUdTt\nQihzsi6v6TRfFW3FTenGlVEp0nKr1YrOpOvxgjdbLZitZqxWKzKZs9J6MhyWuq6fEfwXK7WnfeCh\n1th3SMhRYOt1DZKoD8dAOYXMNtbEPIgOV290pk7+kvYys8NncFvCTUNVtYuG3VI3XoAQ9+4QuLLn\nw8LlLer2To1c9uPNy/bjrfkI8VHe52gMbXxdtH1Yym81tJ57oyHC/qJzTINpxy6EvUXmq8Kt/L/9\nT1DSWiYtswvVQF92dkt9KF+OduvY2cNo77woZUM7oLI33aLe91xmR1F3TApjGg5RlyvOOuZgaTe2\nY7aapdDCjw1Z1/0wpO73S8BSL20tp0pbc9by3MbTF3St7AOaL1f3++sn3+O1k++OdDUuCCHqgyTQ\n3R+Als6+HxyjxcRrJ989r5SSGr1tIIVimAXIEbMTUTeauwS/1xznnWXfAZDfdEZaZo8TD/TFaRrk\n9gPBHrbwcBJKsKcIdVde2NiBc2GP67f3J+oO59MxIUq3pT50HR3lEFjq9s6d/iKNS7hQGnXNPTwz\n9g7rhZxX+76eStto8Eshpv6XtJd5+uiLPZbVdTTwysm3ee7oP/rdN7fpNMdqTjhdZ5bc70M7AHmo\nMFvM/CvzfU7WZw1L+VXtNVS31w5L2RcLIeqDJMA9AOjOVOWM7MY8shvzeD3zvT63yag7RUVblfTb\narVS21EvWeo+Ku8hqrFzHF/0ziw54zlivHa3ptVqlSzugYq0vcwLERuwfVzjw9zPsFgtkqg7Gx+g\nNbT3uW4okWLqhoFZ6hp9t1fGHqc1DeGccrlkqTtPPmOxWijSlPQ7J9ku5j+Gr5N1mvQ8duhZ/nzk\nBWmZxX6vXYC3wtx1X3sobKJ+qeZCaNQ1AdB2jsG2r2S83WcWSWvXvaK7RC31ps4WTjXkcLI+e1jK\nN1vMmC1m6joa2Fayx6kX81Lnos1TdxWUXS/K/lxchnO8AM0WM29lfQggTbHKqM/i7a5l0G0VDBft\nDg+tM3G1uxj7EgS7qDve9IMV9QvNnvZyxlsApIyaLYm6lbMFyu4OV8lVgz5GTXsdwZ5qqb39MVj3\nu+P/w2Gpn2ug3LaS3Wwp3slN41ZzZXSK0230Jtu93N/sgksFe8fDMc1pX/fvYDB2nT8PpTvoL777\n/WhNOttKdvP/kh/EU9n3/Hid+cIzQ3Zb6pdmlsmh6KT1h8lqRo6Ml9JfR2NoI8RTzcywoZl2e7EQ\nlnofaPRtvJLxNqWt5T2WOwr21uJdlLedPZf+XC9mixPh6X0cZ+I0lLQbut1rznqj/cXbobtzY+4h\n6gOMqVuG9sGUy+SSqBstRraX7OHdrI+lwXN2kR3s8Upay3jqyN94N/uTAW0/IFHvkXL0bFE3X8SB\ncmld2e3OtBT1WUa3pT7yom6xWthavIuqNufuUYWT8RT9pcjVGttZn/cFTx7+W7859+2dVcn9fpEt\n9X/nfEptRz3ZDWdn2HT0sgxFumfrpe5+76rfUHTWnJdvxmw1ozHYwqCO3rQfC0LU++DjvP+Q23Sa\nLwu39lhusHSL+jfFO/gk74uz3JfnsliduTt7JysZrpvWjqOL+Lzc7105tR3XD9R1PFh3/blQyJSS\nkBotJjYXbeN43UlezXgbi9UirRvs8Uq6OloDid+ZLWbpnLUPcPR7j3PXNYbBOJQx9a6OV18DEu2D\nobxUfVt/duv3Ys31749KbTXfFO9gT9FBp+uddYQtDm3v/dyl1WRwoOoItR11FPTTsbF3ikba/W4P\npzji6DUYiKg7dvCcddjtBselOvrdXufhcoubLWanHrT+6nOppdQVot4H2Y22XnGwh7rHcr25p+ut\nrK1Cevnb6T2PtdPUSW17nfTb2Q3Z+8YY7lhOT1E/H0u9S9T7sDwBjtWccOrJsO/jTGysVitbindS\n0Nz3S7Y3HcYO6YVuNBsJ6BrMWNxaRkN7kyReg3WbWgYhsI4xZ62xvc8H3dxHuEJyvw9lTN3ufu+j\nHfYXd38uXbuFbrKYLngMxIUinaM+BqpZnJxzx/Pd+/o7WqP9tc1+TeyW+kiJujNPhGNnSzeAjpfj\n9r3bbLVapef9Uo2pd4v6wO7FYk0pL594i45+Otp2rFZrl6Xe9z1jR2ts561TH7Ju7+85Vut80OFI\nIUTdCY5TjRS9eseO7vdQT5t1vb/yUI9teovbxjPf8Myxl6SetFOLgsGJ+ncVB3kn6yPpQTxWc2JQ\nn4V0dL87jambu2PqeU0FfY4I7WFtOrzsOow63s9Zz3PHzh6Ja+7HUi/UlPBt8U5eOvGvAbYENA7T\nAK1Ye1gsLZ2t3YLfx8v4dPMZp1N4BuMtcXRPmyymPgeW9TVA0S4cQymc9o5FXyJkP35/oR5HERhp\na91+bpyFrwCsdF+vzPpsthTv7PEc9b4mju3pb3ql/fx5jJD73Y49nNLT5e4g6l2dNLd+PtDkeJ/3\n9sI53geXrKXedY0H+uXBzIYc8poLKNSUnLtsBy+ASspv4Pxap9dmklF/CivWAZV9MRGi7oTcpgLp\n/96D3gxmA55KT343a34shgkAACAASURBVB1/nPsQKrmK6vae80WNvaytZr0Gk8UkPSjOrDh7LEvW\nlQrmXPHfjLpTpNdlYraaKW0t5/2c9RyoPNJjG62hnd1l+85qg9liJr+h2xJ2bql3u99fznirx4hi\n6H4A+hT1fmJy9k6PxWo5q/PizLI/F73jXo4v62aHqYfOxK1B18g/TrzJjtK9fdZzIPSOOffVwep5\nvpwNlBs6wbCf23N5KPob2e64bqQHy53L9er4XL1x6t98W7yzx73f+xo5tq1fS93ufu+aEjlS89Ql\nUXcQX8c22cM+noq+B9nqHMS693l0fA8M5Tz1Km0NNQ6eygthsO53+z3bqj93znXHZ9Pu5epL1B3f\nb5pLLIeDEHUnOD4ovV94BrMBd4UbMb5RyGVy3BSqsz/N2PXb/hDaRxDbrV/Hh9K+rd36WJv0cyK8\nw85503aLqkUaqdo7pvbvnE/ZeOYbdpft77F8R+l3bD/zveROdObKknKR9+nqPLvH7Og67u+l0COu\n3OtlWqG1TfMbzJS+/gZxNeu6HziL1XLW8ewvQkcR1pk6yWnMH9TAut73SV9z1R1fnM46REOZkMd+\nLIMTEXJ8ufc3W6PnszD0ov510XbW5/X/JUY79vPlzM0Ozj0Ojm3vfY0c0wcPxFL3HOmYepfQOL4b\nHDta9nvZo5+ZM44WeO/OvNVR1E26IYsVP330RZ468rchKcv+vhnos2m/ZzUDSOrVU9S7xqP00YFz\nPI/95SwZCYSoO8Ex4UrvF57eYsBN0T01SiXvW9TtA5UMXTeW/QXj+LDYXzT2ZXJkyGXygYu6w8dU\ner9siruyvrUZ23otLwXg97N+g5tchdlqZn/loR7Jcuxt6ssqOZel3u9gMcc4fK+H026ph3gG9bl/\nb/qzqHs/cL2vlWM7a9prqdRW8135AV49+Q7lbRXAwFJG2l+udrddn5a65ezzZbaYHZKkDIOl7kS0\nHT0Y/VrqJgdLfRhEPb3uJMfrTg5oW3t7+hIbZ8uNPSz1vt3v/c06sHdWR9r9bu+0WKz9W+pKJwPq\n7DimU+4tjJZeHU5nncHB0l/HIKM+i/oOW6jTYDY4zZDXG0djZiDYz89A0m87vkfkXemuHa/1kerj\nHK+13as9QnyX2Ah5IepOcLxhnFnqbvLumJVSrnTywQeTtA66Xx4Ge5Y2HMu3rZPc7zK59B3s/uvY\n7cK2H6+3299edu+kKxp9Kx5Kd4I8A1HIbZ/n/DR/U49kOXavguNof0ek2FYf7uT2ftzvfXUE9GaD\nFLvvyxpztl9/vfZOY08hMllMVLfXcrQmvUc5BrORd7M/4c1TH0jJO+xufWfZ/SxWC98U7aChK+GH\n/VyHdI2z6OtLbc7Ol+OLw2Qxsb1kz3mFIZzVEcDgRISaHVyG/X3wZiCWen1HY59Tf97L/oStxbv7\nLN9oNg06v0Gf7ncnlrreMjD3e3+WujT6XRL1kXG/2wWyh6XeQ9Tbz1rfG0cPTW+vVe/9BjK47Fz0\n1TFo6mzmrVMf8MTh5wH4x4k3efroi1ICnb6wv3cG6n63X+NWwyDd75ztfv+qcCtfF20DujtH4V6h\ntBm1l1RCIiHqTnC8uI6iZrVaMZiNPQai9Cfq9mQn9hvL2I+lbne/y2Uy5DLFOW9as4Ol3Jelbi+j\nd+rUFr0GtWdA1/HkTi1dyYJ1cL871tvRU/D/2Xv3OEuq6l78W8/z6HfP9MwwvJ8yMIIgD1EjmqAR\nzdWY3yWBGPGnXr1G80lMrhGBGBPN5RruTW5I0BtjEmMS4iM/MOJVxIgYIeITeSogAzMMAzPTM9Pv\n7nNOvX5/VK291961d51zerqnBzPLjx96zqlTtatq773W+q7vWkuMqUdPnW+g/Fk/M79beqxZjNnO\nHHZbCHotjZgGmGu7tzSjLM5i/NF3/gSf+tFnsHdxnzSI0gjznXnMdeaEd0oL16TUf7j3Qdy2/Wu4\nrijVSe/x+OFjAcAaQ6T79V3fSBh8ZmE3bn3iK0aCYb9Cc8SkhKZZRUSb4Qaois+m/G/44cfxSUsu\n/717H8AD++wpgVEaIU4Tq0d3165v4+MPfCoPnXTx1E2GYCfpDX6vjqnr7Pe1yQIwKbS2AX6vMnJV\n+F1X6pn12OWKzTDQjcDtAlWs9qhNCGGVyJh6D/C74qkXSp3NnziNxb5Ie8NRAxsBHF757EeUukH4\nouHwe5zGyJChxpR64PqlTTPSlExbeOoF1M48io4Gvzt9wu/cU7dZi7zmeZTGmI8WMNbI0748xzPG\nVOmeuKXdYhWraANQcjrZ39X1z22euqqob/rxP+N//eCjxk2cw8K0yZrKwNKiJsXMjZT5aF4YGJ2k\ng04aoZNE4j6pQpcJzqT0Ij2Pm5S6NVtAjDVkcXTVUydZOshNNWVGA59PexYnldrf1US57uz3uc6c\n0YgjImSVEozSCBlkKlWr6Ap3394HAQCfefQWPLDvYcx25kSKYT+eOt+UdaOklbQFMbXKU6faATKm\nvjaeuljzKHvqaZYKRWNihneSCP/9O3+KLz3xVfGZjgbq/14JspzNuOdzbu/8PvF3lFQ/236JchJ+\nXx5RTkcEY6bUPcfD+iJMeDhB8EeUukFsaTAE5YUlpW6H39MsFd6COaYuFyUg4fduRQ1E/fQsEbC7\nbfPk4QKyWMlTtyl14cGy7ziD1BTb0lPauo1dHzP/fZzGmO3MYyluGSE8rmzofNx4odAHbeQm5vJi\ntCTh9zRCO+kgQyaq00lPvbxMdDISjWckHMJYbRS7F22eev68QjeU5XItZMQf7X9M/B2lMf5913f6\n2mhNJXx3zj2DP/n+R/GT6SeweWATQi88KKVOtf9NnlMvjXskpyH/7w/3PogdszvxiYf+QbwHIFcO\niVBqvcfUOQqhIxLtpI2BooVw1RgTEVOnObQ2UCsZ0kpKW/FOFqMlYdSY3sWexUk8s7BbWUulPHWo\nitJmVO5e2IObf/LFnp6DLQuGz7mH9j5q/Nwk/VaUk0S5OeW5PTD5MD732BeUz/heRM9SyZBg83wp\nbqHh1zFaLxp8VfQCOdTSValv22Yvn/jTKgr8zl4qWZEhqyHuu0GJVU2TnRQ+L4wC6EQX1XvP4fcy\ny1UXIvNVEeVI+GKdLil11xgr7QhPXZ6TW7smYhf/m/cU1zdbm6euFn2IhQI2pVLpXj2gGlsDRUEV\n8ugpBMGvN9eRsbBW3BL3RCUiZW/lsqeu3xM3HjYNbMB0e8a4KQoDxAtZTN3snTy0XxIXv7bjG/in\nR2/Gpx8tM8X3L03hz3/4V3h65lnlcz7PyLD8p0f+GYvxEi5/3i/h6gveg9HacCWrvRv8Th6uCb7u\nxug3EQR5DJzn/853FqRS78dT1zgbXLhS7y2lbY2Jcob7bxs6A5qeT2BAm6qIcoAdfv+L+/4aX995\nF+555rtdx7xgMUL5vvrQnt6VuvDUewyB8BbSnCR49zPfwb89/e8K90fZf4r1ou4ziZgLS/ESGn5d\ndu18Lnnqv/mbv4krrrgCN998M5aWDs+CBCst9HIDN9DyXM2eOqBuXKQQfddXJoXw1FFelMJTR29K\nnVus3ZQ639wptUPE1F3XuOnyWLP8bR+eOlss+rhspVJ1Ah1tnm1DowrdggaghEUaxWYtlW15Q+ZK\nnRshevxNL0Ckj7UVt8R4al5NxNlMcXVSHqEnPXXbe+O/f6aohUApf1y2zTyJR6ceVzweQE1VpPc4\n25nHuvoYfuboF8F1XNS8WpeUtmr2u2DwG+Zqt/vjc4tCEHws26afFH/PRwvifvphv3PhjOUsy9CK\n22j6TeX6JinH1NcGfjcZNfROVOVUXs/m96N+RuelCoM2pU7GfS+KTI+p75p/FnftukcxePctSnJc\nr0q9V0+dz9nZziwOtKawFLfEXFCdCvk37dU8vJEhU+D3hl8X1SsPpof9SkvX1lNf+tKX8Nhjj+G2\n227Dm970JmzZsgWXXXYZzjrrrEMxvjURmjhNv46ZzhySNIHnSphaj6kD+QZFnytpNCz2S783euqU\n0lbA70A+cfW+Yt965ns4ZugoZXKbYrNceP4pTT4eUzeJYL9z+N2g1FMLlM5jaVEaIWBpgKaiK4Bq\nfcdpLMZgUia6BQ2oMfVmsTERUY48da5IZjtzwkBT8ra1TdvGfic50JoWY6x5ITYNbACQx9VPHDle\n+R03QCjmbPP8+IZD1zONpcO8EfVajBsiuu4lSkpm6IboJBHSLDWm7rWLuHOGzOjRizoLFYahzQvW\nWf/8XgDgiZkd4u+FaEH44fzZp1mKVtxCM2h2bYLElRAhaHW/Bs/xupSJLTx179B46lmW4XOP/Qsu\naJ+FE8KT2OcGpS5qVHDSn0GBG+7Pxn4fDJpYipesoZ6aV8NSvKRwbGzC94G7d30bn370FgDASWxd\nKPO0V0+9B6XOUUwgf/9//dA/4vSxU8R65XsRf260T5BTQM8qQ4YoiRClERp+AyO1YQDPQaLcaaed\nht/6rd/C+9//fmzbtg3vete78MY3vhHbt29f5eGtjcgUllwxUCxOeOoK/F546gaYL80yRSF10khh\neOfHFpYgOFHOK36vTtzdC3tw0yP/jD/+3p+z9B4JCakTVP7NjQhS6jymbhKa1IoSNMXUlZxzzn6X\ncKCuJG3FZ+JMVfb6wuLCNzF69tzYGiialLQ0T52/p9nOXE+11k0xdVWpTynpg5Rjz1uAkiRZCgcO\nmzeJFX5X4p/FszG9L1vbVj5GUfgoy5RwQs0P843KoKiyLEM76WA4HARgDoNUeeoiXm55xoqnLlAZ\niYbxmHruqZfZ77dtvwO/e9cf4KnZp61pkFTIiKcZ8vfluV7X4jMOHASuDwdOuR7EzFPYu7jP8uuy\nZFmGu3bdY017XIgX8c1d9+COJ+5WDZiKPHW+pkwGlun96PuLVOr587J56mQgV1UYTLMUX91xp4Is\n7WR/8+dl2g9twrN+uol+rn1L+7EUL2F/a0qM3eapk7QMz5cY+g2/LhyJqgySQy1dlfquXbtw4403\n4tWvfjX+7u/+Du985ztx11134aqrrsLv/u7vHooxHnKRMFSuCGijoU1Whd9zBc/JTnRcilSZWHfu\nvAv//bt/itt3fF18ZvLUbfD7Uyx32eipK8xpxlRncD9ZlONNu1LPMrnJR5oSLF/frKC5la8TwWyd\n3fjnuWdo7xDWDX4nWFXA72LxSUXC4fcqMcPvulKX8LtfzAkTcTHJctRHNMTJ7HnayoaTUny/vGRJ\nYevMYUWpC089hVswvgGgVpAobWTJNEuFN2L21ImwuQz4nY2XGOYCDWNEQiAPj5jy1L/+1DcB5P0X\ndKIXSd2rYTAYwBQjM7XYvAgcv2tKm+/6cBynRIxN0gR//sOP4zOFB9qLPD3/LD7z6Ofxb09/y/i9\niOfGHcXwqWK/qz0F7O+i6jMyFgbDQqnbPPWCMFilgB/e/wi+sO02pbAQf99UnMnT0ne7e+rVGRBc\naE3SWiMjKg+XGZS6CeHI8u6L/JmSsVn36+LcttTftZCuSv1Nb3oTXNfFpz71Kdx444142cteBsdx\ncNZZZ/3UQvCJptRponUqYup88UXCU08V5UOK9kFWua3NjgUAx3EU+J0LT5PinrIo6arEtM01nmfa\ns3DgCNamSUkkWcKqV5UNAv65ib2eZqlyfd0TVX9jhr9oHIDZU+8VfhdEuWIj4t2nZjtzvSl1o+Gj\nwu88NCPfX3kjTdMEnuOKdMdYgwi5RGmEqdY0Hph8uBp+F+9fvZ6p21SaJQryQM/MtEHT3BwOc6Vu\n9NSpEU0F/G6q8Z+Puwy/UxfEsEj5I2NsIVowNnSh8MYTs09ZN1PP9TBWG8GB1jSyLMPOuV1C2dR8\n8tSr2O+JSGvU61LMdubQSSPsN6AyNmkVa8Pm3UmEqqOGpwzFZ2ht6LUf6FlMtabxxSduN7ZltRHl\nBgpPfcnCXO/FU6fWzFxMWSyhFxrDkTYxpdLahAyeoSBHmsiQWIyXZEzd4lRwacdtZW3ReZp+Q67j\n4jxffvJf8Rt3XtVXc62Vlq5K/dZbb8UJJ5yAjRtz8s+nP/1pLCzkA/7ABz6wuqNbIyFrUHrqany5\nphWfAXT4XUKdps2S/15UlFPgd/LU1Un2zHzObt40sFFJaTOlDvEYMd/s5qOFfDIWm5TnGjw/Cxys\neOoifYYpjkySSHh8Uz+fraKcbVFNd2bxb09/S/FG1JS2MvxOPcLbIqaev0u+2HJPvfvmYILfdU+d\nxu46LvPCDYouS+A5nvD+yRPQxXVcREmELz35r/j4g5/CdEFwNHrqBqMO0OstyFa6OvwOmDdT2hQH\ngiZcxzUeIxR3BfsdMMd0Vfid1pjkJiRZguHaEICc/S7hd3lf6+pjAPLQlA1+dx0XY/UxRGmEhXgR\nH/neDaIyWG6EdYmpp7FQUnpdCgpnzfWQB03SEciavOZSvITHph4HINGxVtJWnqExpm7w1PkxP9h7\nP76y/Q48UjSp2tBYjzPXnZ7/xgK/N/0GXMftCr9XeerGfcWQg17zQmXudIPf+yHK0bnEHGJKPRYo\nkjn8x6WVtJS9mHvqOnrzpSf/FQCwfeapruNbLemq1K+++mrs2yfjH61WC+973/tWdVBrLVZPPS3H\n1AX8zhYfHad76iScqFSG3+3s96cLpd706woTlqxEPiltnvpitIhGIPtnV8VodeFKnRsVJLQB6YVn\nOtpitrUfNcHoAPCNnf+Ozz32L7hv8kHxmVLiMy3/juB3EoIMOXFnIV7sqUmJ4zilz/RaBkQ0U5CW\nKvjd4TH18vMeDAYQpbEY70KngCsNoQDJf6hQ6sJTzxTDgGoYmOYpjzvXvVolUc60yXbbMGNFqeff\nk5IKi5S/htdA6IW5p26AXrki389aJnPxHA9j9TzcpMex614NvutVGncEvwN5Cit/zkS+ayedrl4m\nCSF5fH7c8dRduOGHf4XdC3utnrpc8+U8dZvXTfdF7+6izedLpa4bApBoYdNvdIXfq3oBmAwsk7NQ\n69tT750oR0gCcULmOuUyut0MTzpPYlDqhAb6blAyqPk6/dpT/ybKUh8K6arUp6enceWVV4p/v+Ut\nb8Hs7OHD9FsN0VM7SKmbUtrI4zU159Bj6iQ1t+yp8wXF4ffdC3vwxW1fwUx7VngFSZoyi9Wcp67E\n1NmiWYyXMMAUnlGpW6o6cS83M8BgPN1DOV+vnrqlMhzFQp8tUrwen35SYUbHwlNn8DszXACgUXyn\nw2JTPRSNMG1QevOLJEvFe/OccpqjODZNVU89jYVyc1isezAYUHrDE9PYhBrQ++KKiVJw9GNy44PF\n1HuA32teiNALjfApvT9eFU58Z6mUR8KNPTqWG6NxGsNzPQwGA0VMvazU+DWpgZEuruNivFDqBzSl\nnsPvvhUlyseRiPcVaPA7T2Xq1VsX4RKFWJobb0txi8XUVU+d9giOVAh2tjZ+TqTl1/Qczxoe4iGe\nhl+3eurEyaiC383EyfL8oQJdtKd2Lz5TOBMVRtgnH/4n/OuOb4hzDQW5p26qctkLUpgjJmWiHJFv\nfdcr3RsPP9z25Ndw5061U+ZqSlelHkWRUoDmoYceQhStTZ7moRJ6uQJ+T6ti6uSplxugpFmGTmzy\n1LlS17u0qUS5j3zvz/GVHV/HHTu/KX7DJ1DCUqIUpc6sbNoMOkmEKI0VhWeGc7vHmQUL1RAfp82B\n4k0lpW6LqYvUoXK5VwDYuziJJE3wv+/9P6JWND+f6qmrSp0WoF62Ut/kTZIZNqhE8RZTpGkinqUv\nFHaCKIlKyITneNIYZPA7r1JHDGTKP6ZjTEq9Y4Df9fgykd4yqJ56rUCNzES5/LyB6xt7HOTHqBkL\n3BO2FSbix+t/k3dI5WN9x8Ng0LR66r2kQ3mOK4qE6EZczavBd3JP/bu771U6FYqxZ9JT1+F3zjPp\npRMY3RugrwPJP5Dwu0qUEw1dUEZgbHwKelZkQOVK3VOOIUnFHuSg6TetSp1+V4VymdYMGSsj4ZBy\nT2mWCqO7V/jdZETS99/fcx/un3zYCr9z6cVTbyeqpz5fePykHwI3KD1/Xlo6zhI4PXR6XCnpmqd+\n9dVX413vehfm5uaQJAnGx8dx/fXXH4qxrZnYPHVaGFUxdW5pZllqhKi4RynLxBYxdUdNaTOldemM\nWJOnvqjE1NPis1xBcIVXBedWCZ3T5HXToq95NcTxYsnz5x6sSlSh36nwO8mexUlMLpVTh3SinOu4\npTKuulL3HA9Jlihpejbp5qmnWd5shN6b68h4+Z/c+zEMh0N419lvFZ+FXsDgd1lkp+HXBReiWRTP\n0Y2QypS2pPws5b8TxRMjkZ56WSHSOTzXQ+gGiqFIwt/fjfd9Ak/M7MCHLroa6xpj1jCLHHc5pY3m\nLc13z/Uw4A6gk0bCM1QbC1WzvoH8fYwVsXcT/J53KozxqR99Jr+PV/yxEnKJ0xiBQ0Q5dQNflqdO\nyIphHfA1HyWRgmbYoOd8/uXnoviuHnumPcxzXbHmdR4EPUvXcdEMGqJWROAF2Ld0AHU/zyKg46rg\nd9O7IC7BcDikVG1MkMJzfQSuXwrV6cLXYpZlgBYZk9kYsdhbhwr43VSHXjE8bZ563EYSmuB3Uup+\nyQDi+2qSJtbU4dWQrkr97LPPxu23346pqSk4joPR0VHce++hiw+shegx9bYeUzey36l4Bl+EZqIc\nJ5GUy8S6RvY7nyT8GkqZ2IL16jiOEX4nL0iNqR+sp67C0IBU+DW/hoV4sdT6U+3sVv6b13Dnsndx\nEk/PlSuq6bF4Ij9x4SxqIF/o0+2ZyhaxJKkhVYp/lhTsbs/gqU8u7lM8yFSH37NEeMQ030IvFLwL\nfbMwISuy+Iw9VznOEumJ8Zh6Bewpy+Tm5D+TscfnCoVEnl3YjXWNsR48dTWmToVkAImO+a4v3uus\nVr5X/9sGnyqeuimm7qgx9WcWduPowaPkedMEnoipe8JAch1XUeor5qmz9W3ixuhGZpZlslKhGxYp\nWCrnhTx6dX8xp7S5jisM/8V4CYOOiw/e8xEM+E1c/7I/UFAAW9EiPneafgPz0YK47+HaMFDkrKco\nzoG8umF3T11DveBhx+xO/N8nv4o3b7mcNWJJWEw999SNZYQtFS25LEQLGE9Hxb/nS/C7AcXK5HPI\nkBn32dWSrkp9fn4eX/jCFzA1ladsRFGEm2++GXffffeqD26tJEkTOHCEIqDSgpQexRuk6EQ53Yuu\n8oCAbmVi5STjSkqF3xPohKTA8RX2O52bNojlxtS5mDz13Qt78cUnbscpoycCkEq2X/a7qdsakG9M\nD+1/pPS57qnnBUXURaR76sOFUu+FcGOD+fjfXKnTf+MsyZudmOB3JaVNDTvU3FAYizqk3Cv7nXvl\nSTEG6YnxmDop9fJmyjdm0cegMBpJjJB8cZ1Iec8GTz3hSj/3xPVGGjn8nociCFUxtQAGqjx1V9R4\n17t1hV4Iz/WVDf/HBx4TSp0a1vgipk41CGKEXtjVU//K9juweWATzpo4k923wVNnoStuKC0YeihU\neeqhF2IhXpTpf7qn7ngMSTKz313HFYY/NyrIAOa/W4yXxPsxneuyU1+HXfPP4u5nviPGQJ5zflwO\no/tObrz1SpTj43ho/yP40f5HsX32KdElMckSca7BYEBURdSlCn6ntXOgNY3NzMijevbEjTIp9UzL\nDjqUnnpX8+E973kPHn30Udxyyy1YWFjAnXfeiT/4gz84BENbOyHrk7yYr+64E//73v8jJvUA83Ql\n/E4NAJhXxohyvNd3yrzbXtnvfDPXDQcTjKnktxZeJdVhroqp6+eziamy01R7Gl/Zfgd+sCfPAdZL\nsyZpgm3T29XyoAZL2RZTB/L+3ADwRy++Bldu+ZXid/n56H2FXmD31It3OMTiet3ElP+caggFh9/J\nq0vSXJnq95h76vIYej5keNS8UCgPXYzhkqT8znn/Arqu3LT7hN8dD4EbIENW8mZMylqSRvl77uap\nx4oCkXCxJ94dGapqlTX2tyUm6rn5+F3HLZXzzBWK+kwp/Ssfd35OntKWjz03cKbbs+IZk6feSSLc\nvevbmG7P4ItP3I6PP/gp5fyiZK8ByUihrj+lUYuhn3r+OfPUPfm++bGEMvZClHMdVxj+22d3lmLR\n3NmwMeT5uWi+RWkEz/FE3jiQOwd8v12IF/GV7XdYK/QlmbruANVI4qFI2nvrXk0pU82liihHbVWn\n2tPGdGHab7jBK++/UOrFezGt29WSrkq93W7jQx/6EI4++mhcddVV+Pu//3vcdttth2JsayY0yXhs\nd9f8s1iIFnMPnsVry/A7j6nLqmiDzDpVepCzxQwADszwu1odTJ2IJpYxP54mG22aSkxd29B4Nbkq\n0Setco7CSiWFQYvuqzu+gT+992MKyU2FaM0pbVySLMFwOISx+qgoKEO/811PNCnRN2rd++feQjcx\neeolohyDIen9dQqyl95q1nNdRpSTMXXhqfs1q1LnRhjBrlWeeuAxA4NttCQ2D5afw3VccR59bphK\naz67sAfX3P1hpciSSfl3NPjd1NXOd3zZRlcrqZxfvzdP3XEcNLy6yPdf31iHlx79Ipw8ekJpw902\n/aTM7ijulxefAXIFtRQvIUojHFN4ceSp3/HUN/HpR2/BJx78B+N4xPsyIFZplirPhXvqti51xOkA\nZLrtfXsfwp077xbrkww/1+ExdbOn7jgOXrjxbIReiM88cjN+fOAx5TgecrO1VuUGJK2HKI3hMdQk\nPy4rDGLZXOiLT9yOP/z29dizOFk6rwml4fOfl0wWKZl+TUlD5lLlqY/XR+E6Lg60ppW5RaFNMhQE\nesPep168y4SwrZb0xH5fXFxEmqaYmprC6Ogodu7ceSjGtmZC3lSoKZeFaEEU4iDpDr/nDTH4RFY7\nm8lCNYDuqVfDl0C+wExV3dRa0Cr8XkWUs3nql55wCd5wymvxS6f8gjI2UxyqURR6odg4bVI/md5W\nOtZEpjLF1M8YcgHoiQAAIABJREFUf56w8Dc184Yp9JwkH8HDzx//Clx8zEuU+8rfpbqoh/vw1I0x\ndY2gxSu1UalfoYRSuRnn8TXGQE4T4QmRoVHzQqE8SsJAg3/88T/jN79xtUix0VPaALnJ85g6j+9t\naE7AgYNn52W1QnGOVBKn9CwPEtO83D7zFGY6c9g28yQ7zsScZ+z3LDZ6fbykbpul5ckxmg1f5RzF\ns677dWF0nzF+Gq543i/lfAHNAOykEWvZSQYjeeoSfiey1+bBTXAdV3jqU+08VLndkmJHRq6pbwIn\nvgI6/G5W6lkma1UExZ715e1fw79s+3IZfnc5+z1Hb771zHeV1sOe4+HowaPwK6f9IuIsKeVYq3Uv\n1BREvUKm6zhwXYk8eq6nODhkEDuOU9pvb9/+9dKzM+1rXKnz/Y+nZNocBVP2DUngBhgJhzHVmlbW\nO+03oYDfKZRWzlSo6tmwWtI1pv76178en/vc53DZZZfhNa95DcbHx3H88cd3+9lzWig+qntLU+0Z\njNVGlc90+F0v69hOOgi9QJlUau3zpJjYZfa76oWYvec0S4zerupJElGO4HceU9fgd2RGpX7s0NE4\ne+JMzHXmccvj/9dYUY7fE1COqeuLlh8LSA/AFFPfNLAB/++ZV+DOnXfh+evPAKDmdQP5BvILJ/08\nAJUQxRUDic1Tr3t1tJJWTp4SEGY39nsivA1xTcdjJTzVrIA8pU2y35+e24XBYADjBUO75tUEAlS6\nLjMwvr37+wAkGzdOYjw29Th2ze8W9dppk8838Px+eEw89AJMNNZh98KeUrxcjpcpda2Ov0lZk9fP\nyZpGolyiwu9GT11R6ib2e29EOUCWCgbUOWaCRheiBdT9mnh3oviMJ98bKdyhYBCDwYDw1AcMMWYu\nnQpPPSkR5aRSp3mox4bTLBNrJ2TGlwNHst8F/K4S5W7bfge+/OS/4vHpJ7F1/RYAcn4cM7gZgFqe\nOr8eh9/l+P7s3o/jydkduOHl14l5mhPzVAN7kHvqyPc+HRkF8v1WF+V9i34BEnIXhbiKdFJ6Jqa9\nBzBn38ixuhivj+GJme3GdMlAkCfLvR5kTJ2U+mFElLv88svFS77ooouwf/9+bNmyZdUHtpZCG/S6\n+hi2jJ8m4KdO0lEmJIASLMnjeilSdJIIgRso5Do99hensWhGwdmpauUji6euEWuExa+NAzB76uWY\nurnCGY2J5oJImamIqdY1+N1kLauQsT2mPhQMYiBoCqVtGruiVNlG7TtlpT4cmJX6cDiI1lILgwWR\nDughTz1NzUqdKodpzyqvKJePbzaax/7WFLaMnybGWPNCawywitjXTjr46P1/qzzTkMXuTSltAHDU\nwEbcv+9hzHbmMVKTCIbOfgfyRh1PzOzAm7ZcltdMN8wVU0Efk8LtaDF1U1yf5/RLSNMMv9ueDb2X\nBgubceTGVKd8PlrAusa4uD96ZoEj1zvFu5tBEwNBUxD59BoJukhPvZynn1UQ5awpbUhLvBIASv9v\nOqerZF6keKooZ7pncRJnZM8r7jV/XlSFT7+eSpSThtuTszvEtdSYOl8XLo4bOhYbmuuxd3GfyFPX\nlXrgBkbioYkoxysq8pS2iBlkfP/lUpWh4bkexuuj2DaTlepZBG4g9kJJauWeupoddFjF1Hk1uY0b\nN+KMM84wls38aRIq5em5Hn7jBf8FP3vsz4jvdCtch9/5hM8YLMs3ad0i5CQLBypcRWJr+pGUPHXz\nOAAJlTUrysSmWWZkv9Ok1A2OqkYe5A3d/cx38A8//pyysHjqifid5uFzoc5RXEpKnXnuindQKFF+\nvIkol+fn5kbbEHvPRk+9lNKmNkrxXU+kZUkmsoTi6HnumM1DWccMbhbKq9JTr1DqC9FiaZ4ExTOP\nM3NMHciVOpCnopmu5Tqu8AC/9cx38L0994p4p2le6hUFbcfpRDnTMb7rl5RuZlHkPK583sYXiM8F\n/O5JpW7z1GnuzRfKVPQOKLx83+VKvcgmCZrwC6Y0P9YmpoIxiqduianTnNP3j4x76poxqBvoHvOc\neegnzz8v3nehFpp+wxiLVrJ3DFkTiZY+ydeF63gYqQ3hgy96HzYPbIIo3ARVqY/XR5XWu6Zr03hj\nA/wep4mo2+C7fum5kKhEOfW55j0DcsNGr49hMgr5s6Ad47CMqW/ZsgU33HADvvnNb+Kee+4R//9p\nFj33kivyAc1Tl6lJ1NqSx3zICvWUxVFW6pGAszlcxSEfW2EE3noVAO6ffAg/mdpWGgcgmd+8Lrop\nps7TX8Rxxd8O+lfqAPDtZ78PHhCmBazAX4YysXRdU9qMqxmXjuYpk/hO3jbz2KGjxWcm+D10Q4ES\n6HE/XXT4Pc1SuFocn54jVb/icDYphycLT+nYoaPF5pDH1G2eur2t40KnTFqizSdNE6EMS0p9cBOA\nMsxqGi8pO+EB9tCPPj/eVHxGNUbNCFEZZVHKxCrGVX6N1518KV574qvE53S/XNmaSj0DEEVqyAsn\nA4UMgoCF2+iYnGfjMSJbdetN8uh0AmX+WztRjgx/HTlKWUxd90h12FhNaTMrdYehcqTU+Bj4uPUY\nOqCiQpz9nl+fK3i32CMzuI4DemwDQRND4SAWojw1765d9+BD3/5faCcd5d5lDj5T5IR4IEMnjQRS\nYI+p8/1HN4A8UV54clHtK6CkNRdoLTdmD+uY+o9/nDNYv//974vPHMfBRRddtHqjWmNJs1TZSLjX\npisXv8JTp43cdzzFU9cLg0RJLCYrz1Pnk0SfcPK3kRJju237Hbht+x04bewUZRxAXjpWn+ClmHqW\nsrQvCUHTpCx56hWbNXUAI+HPtObVsBS3uhLlaAwmJexoNim/F4UoV/x9+tipwjM2EeVCL8DGgQ14\nev4ZjBbFSgCpOHbM7sTexX04f9M5JQ+R137Xrw8Q+1zC7/Q897cOAACOHdqMnXO7xLMJLZ66KRRA\nYqrGxYly3Tz1Z+Ztnrqcv1R4w1ae1CbGlDatn7q5NrhBqaPsrfG/HTiKoibkq8FgcXUNyGPH66PY\nvbBHKDsqhkMGAUfmRN2HoAnPdUtpZDapLj6TKYqGF0eyGQ3EIAdMnrr6TD1Xza6h+8xboEojjmSs\nNqqw0HlqJCC9U5Wlz+a65qnzdeE6DqjNs+u4mCkyE0bCYQyFQ8iQYT5awKMHHseexb2Yak0ZPXXZ\n+yBSnl0rbgmnK/BUlFB6+Unx37jkbOVFi8hTV5U6KXJA7mtKZ8xSTP0wUur/8A/mtIyfZknSRPEW\nOfSre+oBI84AqvWdFfmjYaDG1E2euon9rhKNzJunrVhDavDUF+MlNP2GEj5xS/B7WirQAsgqeHoO\nvbk7F3nqunVcLnoSpzEe3Pcj3PHUN0upXQBw4vBxeGTqJ5hori9dpzKmbvDaTx8/BbfvyBm1PASR\nl2dtIfRC/OdT/xNef/Kl+NITXxXfZ1mGbdPb8af3fgwAcPbEVkVBUkU5fn2dUU0FYGg8XOkEro/1\njXXCU6701A1M/CohZax7T1wmGvmzPVD0BF+MFvH0/DPgZUP11E1TaeIqMXvqakMXWYSnLhrY+K6v\nPCtA89QNxKm8KZLc2iT8zoly5VLPADBebOLUFY/nOgOSKKfE1P0mPMfLsxuyci6/Lh0DqifZ70mJ\nQEhiZb8jZXnq6porKXWlmZD0bKM0UlLaSEbrI8rvebwckM+H5/8naaqEE01rMb+ONIQcVkNgpDYs\nOqvNduaxQP3nk0gzphPlHqMsVhDNVtIWRhhvojUUDIjMhTiLsXthDz78nT+BLp7rCWdCj++bCpCp\npbk1pW5oRbta0lWp/+qv/qoxhn7TTTetyoAOB0k1r0uF3/WYup0oB0D0z+aWndh8iipHOfwuF5Rn\nUuoWmNNWVpHnkhIJbyFaLHUvK3vqmWT1Mm9WwO86Uc4Ev1N6jaaY+AYl4fcED+77MX4y/QRCL4QD\nR/E2fv6En8VbzvxVI9GkDL9zY8UVFjkphROHj1e+D70QnaSDht/AUtxCzQsFksGRlSzL8NnHPq/c\nB6VSBa4vUBRuILm6p56livfC48RD4RBcx8XG5gY0/AaOGdpstex76SPNhTx1tfa7+s5DL0DoBliI\nFzEfLeCqu/4QAPDSo18kjtffpa2Hu01MSJMOv1MIq+7XWFc6E/xejqPzzx04yiZK96sS5ZhSd8rw\nu/TUC6Xu6/C7GlPnxq6xrkGa4EcHHsX/95MvYrZQXuY89czYDY/fn14tUKkop8Pv2nPneeN878iV\nejnlUc/2idMYSZoIw4uez4zSlll6vRzu18/twlXm5C+e8hrceN9f4z+d9PP40f5HAQDznXnBsO+k\nqlIvp7Sp/KJ20hbzXxSmcgM8b/xU7F+awraZJxGnMb6569swCRVdAsrhT1NMfVFpokV1Dszk1NWU\nrkr9Pe95j/g7iiJ8+9vfRrPZrPjFc1+SLFG8maGqmDrLW81/qy7oOI3hOi5Gw2Hl/EA+MdpJRyPK\nucxT5zmg/Xrq5cnfTtoibYrEFFNfiBYRuIHi2dCkdB0XDpye4HddefBNPHADOHCUyk+dpJMz1ZnC\nC9zAyhyt8tTp+rmBRhBcgNee+EphLNXcXKk3/QYOYErhPfC/U6SiMxOgKsjADViTGLunzsvBuq6n\n3BOFdDYNbMD1P/NBuI6Ln0yVc/oBNZ2Lw4g2CViFMZ5mpMtAMID5zgJu/skXxWd0z9xTJzF1BqwS\nm6dO98Bj6nze+a5nIMqZG7pwT5M/fxlTtxHlmKdexFCJO7CUUEy98NRN7He/wXK/zUo9SmP85QN/\np3yWpIm4Fx5Ttz1TUrr0Hj3XQ5IkRUqbWlFOXDfRPXXJ2dlfIDN0nB5TB4AxzVMnb7kZNNBKWmL/\nmeWeepYq/A0+31wNfidxHAdbxk/DR382bxZGoajZzpxYX+2ko6b5FgqzY4Xf22jWG8pzCb0Qbz7j\ncrTiNv7bNz8gUkpNYqpVQmKOqZebaK0FUa6rUr/ggguUf7/kJS/B29/+9lUb0OEgCVMEgEqaKsXU\ntfaiCfMWKKXEc1y87JgXY7Q+ir956B/Fiw7dvNZx3hazDL9zOEd6g+pGbvXUNfidKsXpi77Efkeu\n1In8I48rE1z4dV51/Cvw1R13ApAbvd5ukEOBXtHOM0kTJRas55TbKqvl59fy1Esxdg8RYgW+fc2J\nrxR/h14IRJLly5EJvph174srSJ/VDbel1OXnkDFtX4Pf+Zyic/SS0pbzEsxlOkkClkNbtcEMhgPY\nsziJXfPPis/offHiM+I7Q/3yKjER6qIkQsOrYyFeVJU6U74eqyhHkmYpvrf7h/nfhoZCLhxFUXeH\n3+W7GK2NwIEjFHZbeOoUU5dEucV4CQ2/Ds+VmRUJe89c4iwWyBAJbx8q0/USu6cuysTmx/qOhw6I\nu5MU4+sSU2fEtX0sTtxhaCGfH2VPPUKSJazZlcFTV6oX6qhJ2dgCymuX1uJcNC9y9SMLUc7mqWfI\nxNyRJV2pClyRhpbG2DlfbhJF47Ox5k0x9cXnSkxdrx737LPP4sknn7Qcrcp1112H+++/H47j4Jpr\nrsFZZ50lvrvppptw6623wnVdbN26Fddee634bt++fbj00ktx44034sILL+z1XlZM9Phow68LZap7\n6o7jiNq/ANji8kWZUIIQjx08WjmGlAr1jgbUGJQKv8vKVnxjIEs5cINSoxcyLHjuq77o9Q0+Kzz1\ndY2xSoILh99dx8XrT74Umwc24e9+9GnhdeieOq+45BZkppi1SMx/oyo826LKz6F76qqS91wPSOwL\nijb20Avw2+f+uui7DEDxTCmLgYTHp/lxep46F178Ja8oJ39nStezGTOqUg+7KnUJv6fCszF66n4T\nnaQj4uqAVNx6SibQf0zdjOhECL0QS0lLhd91T10zkLIswxefuB1ABlPOOg9h8fttKJ66mShX92ui\nqxgg24sS+933JFFuIVoUmSSi9KrFU4/TGE2/UWKj8/QvoCj+ZGmopLPf6Zq580A9BeyGNP2GjuE1\n3XlMnT+7zYOblL0lKghlvuMj9EKxdmc70lPPu0VKkqXNOXAM74iEYtnTrRnWJTMyE+UMKW0kvPYD\nIJU7IY475542FpahsdrWoeKpOwalfjiz39/85jeLvx3HweDgIH7jN36j64m/+93vYseOHfjsZz+L\nbdu24ZprrsFnP/tZAHnnt7/5m7/BV7/6Vfi+j7e+9a2477778IIX5Lml119/PY499tjl3tNBCRFd\nXG3CDfhNzEXzJaUOUI9ltea6Xyh1+j0gPUudqcrhd54CorLfmbHAJmGH5dFGHbVEre96ObGFNYnQ\n8071yRalMVpJCwN+06qkHMeVObOp5B/I3HOJKlxzwW/jM4/egidmdigQLDV30IuO8DgWPUebdIff\n1XrdutBC910fxw0fo3y3rjEu/s6YJwSo3hgfq5pGV4bfOfvd5qmL31vz1FX43STcI/RdP68sphU4\n0oUMCz7neMGSg4ffzTH1up/n5HNPnWc/2GLqGcrGNyddcU4FLxNLYktpq7khBsJmBfu98NSTXKlT\n5gCv0lal1Ke1Kmlxmqhs/jSH33lFQ/3+pKfui+eRFHUSSuu5BL97RgUTJZIAxys1jtZGcN1LrsVt\n2+/A13feJYhyxD1pCaJchadu2Ud4XQndICei3O7FveKzdtKxxNRZRTnNeKRnRPterdhzHSd3Kkw1\nFcRYXc+Ypw+oSJpkv7OUNhFTPwyJcl//+teRpqlIC4miCEFg955I7rnnHlxyySUAgJNPPhkzMzOY\nn5/H4OAggiBAEARYXFxEs9nE0tISRkZGxO8GBgZw2mmnHcx9LVtIuerw6WA4gPlowVgtKmC9prny\nJXGF0nOUY2hjUeNZnP0uLT+bp02LquHVMQdZrCFJE/huUCxCWfpV97h0b5riqHmaDvfOVWs7zdKC\nDxAJr1O0HGXGzNGDR+HYoaPxxMwOxWuI0qjon9wpwdZ8E7ctqvz8eplYs1K3lWgMmVLXZcv4afjI\nS38ff/Xg32P77FNqLI95Y74C81aktCnsd3XzHTJUt7Pdt4ntrQv3CKmBB6Xd6eMkMZU2rYTf+yTK\nmWD6KI0w5A7mvdpZJcNuMfUUGVAoT57OKYun5POCGOkmopxaC4HNNy/EYDCAfUsHkGap9NQ1otxi\n0cyFjHwBvzPjTb3XWEmpI+HpX/m95XnqA0FTQNpknMjWq4TaSXQgL5jllxS2/tz1sq1yfJwop37f\nDJqyOVMBv7uOi7pXE+2ouaeudgTUY+pm77zsqeeo2e4FqdQjTanLlsJSuethHnpGEn5Xsx6qmldR\npgARErmoMfV8bSxFJk/9MCTK3X777fj85z+Pv/zLvwQAvPGNb8Rb3/pWvPrVr6783b59+3DmmbKH\n8Pj4OCYnJzE4OIharYZ3v/vduOSSS1Cr1fDa174WJ554IjqdDj760Y/iYx/7GK677rqebmBsrAnf\nX7kHRpZtoxZgYkLCsZc+7+XYO78PmzaOln5TC/Icz4mJIdSe9sRnKBzQRq2GiYkhuIuyMAIADNYb\nwCxQH/DgB/mk3jAxjPE432DbqYSlUyefHPUgBFgYPcqi4lxN7GVIbOZmqPkBluIl+IGLwZF8Eg41\nm+K+JiaGMLakbeb1fIzrhkcVD2bjxDCGarny8VwPB9pT+J1/+738nOFAfq5Ofq6k8DzGRvPPB54u\nNlOXQaVOko95fh8Cj8Htvo9NE/kzdhwHGzeMWCsYRnW12MqGiWHFaAmDAGgDzXpdeZckQ80BYAoY\nbDSM309gCLVHfaRZqoxheKQOP8j/3azVgMJJaTRq4jyNulpVbHikDreTbwxDg01snJAEpKPWrStd\nv2ZxIDzfEcdmjpkkN1KXqYjDQ034rgfHzTA0XCuuX77fjXvHgKfV86ROPt7144PoJOr9hA03P4dX\nXWiFJAid0jWjNEYzrGExCZA5KVw/P9fYoDxufHQIG0dVslaapXALsiY37GhdDQ/n9xd4ubE9NJT/\nOylaaQLA5o1jQpGMLcg1cNSGMYxvH8ETMzswMOojdfP1cMzG9aj5IaacfCwtJ59744PD+Rxv5HN8\ndLyJ2r58W33n+b+Gx/Y/ia8/8e8YHA4xOjAIaOXMR8bqinFWb/hIshjD9SGh1F90zDn41s4fYClb\nxP+8988xUs+fTy0IgSVgZLQBuBkCz8PoSHXd+Q0TI0rraJLESdBo5uMeGxsovavRyfy8jcF8rTZq\nIRKngfn5BUxMDGEhkWtxaLiG2mJ+rnXjQ4jn5WRu1uQaqdfkWm3Uw9I1635NKTkcNFx4gXzfg0M1\njIxLQ83xM4R11Tigtb++lROV+VoPvaDSUx8eyvfK0A+wFKlKfWRQ7qPrOvl/I0jEcWg4v+5gKyzO\nlT9z0z6z0tJVqX/yk5/EJz7xCfHvv/3bv8Xb3va2rkpdF85YnZ+fx8c//nF85StfweDgIN785jfj\nkUcewde+9jVcdtllGB4erjiTKlNT5tZ/y5Wh0YJYFGWYnJSQ0gtHXwiMQvmMxM1cLEZLmJycw/xi\nPkmcTCqqOE4xOTmHmbY6VifJJ+CBmXm0O3kDhn375jE3WxR0YBXCorjwmjLVgGlFRT4vtFSWKBIK\nrt2OsGdfvjiSTn4PExND+Xjn1HjSrv15oQkvDhTCztSBJbSKTdfJHCxFcjE4cDE5OSfGTcS3uZk2\nJr05tFv5xrjYkb9pddoYq+epZEpsPHMwfSC3TgI3wL595VKRJNOLajx5/74F1UNOC2REe5dC4vz7\nuJOavwcQR2WW/74Ds1hqF88tkWOP2ok4TxKpym7fgVkRc2svxpiZYlBdyytdv2XZbDpRJI6NErOn\nHjpSAS8tRPDgoR1FmJouqqQtRuX77bAYf5CjUkudAlqdbpU8lem5BUxOzqHVMccjdZlbXFKuGSU5\nU9nLAriZh3bUwUIrv+csks90Ya6DmUx9FlmWIdaKtAAQZUHn59qYnJwT86q1GGNycg5LHYmQ7d8n\n48mL83Kez011EGb589v+zG7MLi3khVEOtOA4bczN5c9kz2xeNMhLQkxOziFq5/Nkct8MFoo9YNyZ\ngJ/kzOrJA7OIo7IBtHffjAIZzy+00E4iZZ2fMXIGvrXzB3hyame+XsirLeb3gal5tKMILjwszFe/\nj+kDi2h5qjHouz5anTbmCuU7O9PCpKvOj85SPsY9B4p9JM7gZQFacRt7986iHcnr7p+aE89gZnoJ\n80usMiZbi1Ekx9Fpl9fguvq4Qtw8MDuHVpvtSdPzeNaVHJCldhuz8+oem8X5fteaL55x4orruGCp\njLXRUs8CWieBE2AJ6hyM21IXLM7lY5ptyb1qemYRk/U5TM0Ua24hn5u2faZfqTIOuir1LMswNCRP\nMDg42FPt9w0bNmDfPlkvd+/evZiYmAAAbNu2DcceeyzGx/O45XnnnYeHHnoId999N9I0xU033YSn\nnnoKDzzwAG644QaceuqpXa+3UrIcuMRXiHJlApXeDIWEIKG8oYvskEWLlseaOVGOCx3T0JqgdNKO\nyEnPS78S/K7+Xr9PDr/z2ssqa1UjpLF0N7offhx9z6GuThIJ8hGvmsWLY1RB74CB/a5BeIQ02FLi\nahXwu+2cQAEvUp66Z4bfy8VnZIEQ11XbfQ4aquUpvAJLxziuaPkxAxqb3nVdxJnaZEMX3qhorDaC\n+WhBgd/13/RNlNOMAmpTOhQOYao9g3bURlSkf4ZavNIYUy+qqDlFfJeX4aV5Qb+jsVOsvqblctM8\nySvR+aLD3UxnDq24jbpXY8078rFRoRQBv7uyShsvTMTLSJty9XXGNnVp4+//1LGTAEgUURDaBPye\np7Tp1dtM4jpeQSSUkPJQMIh20q6cH5S6S8Q4z3FR92uiHKvKSpddJ/Xa7yYCY/53WadsbE6o2Ril\n4jOpkjkTmWLqOlFO4evINbi+MV5S6rRvmfYhfY4CanObwzqmvnXrVrznPe/BBRdcgCzLcNddd2Hr\n1q1dT/ySl7wEf/EXf4HLL78cDz/8MDZs2IDBwXzzOvroo7Ft2za0Wi3U63U89NBDuPjii/GZz3xG\n/P79738/3vCGNxxShQ5AWZC9iqsQx8oxdaH0NLY2xWU6RTxLxgINisRwXi6cBATkSpOuW02UU69F\ninwgaCqlH/WmDKZz6NXmXC2mrfTPTiMlxinP5YnNsErZ5udnea5wSkqerqsrWBKKE/at1NNEFJfg\n8d6qlDZOHsoNF+4Zl8mXlCZFGRcExSpEIbaBNfyG6KuuduFz4Du+Rl6qjqmP1kexc/4ZMWd4GhRJ\np0tMncYeujniox9H7VmHw8E8C6Jgv/taXNhElKPOXmlhKPmOhw4jMzpQjUmRJlgYCHqlQ17HwHEc\nqdTbs2glbSX+TuuPlDoZzmKtZakw+DgXIWJ1CrgkWaIUlqJMmMD18eGfey9mZ1pijunxcZ9dM8kS\nBF6t9J504WtVKPVwAPML85VKne6bUvxclqXSitslY5NXI9SbK5GoRLnyNTc0J5R/m4hyelMgvUW1\nntJmUsZAToz9yfQTym/pWZnSS7vnqR/GMfXf+73fw6233ooHHngAjuPgda97XU/Q+7nnnoszzzxT\ntG794Ac/iFtuuQVDQ0N45Stfibe97W248sor4XkezjnnHJx33nkrckMHK5zM1Ku4cEvFBviE0dnv\nJIqnnmUixcO0MG15qCS6Us+QwXO9olAMI8rpSl1TPqQYBoJmz1Z1m5GyuLga8sAXYCeNSmPm48nb\nJVZ76lVEG4AR5SxKu4ooR2JCpagrGx9vPgZVGXGJs0R4aj5rvQoAg5Y2sJS+OMDKWtqqqTWCuvLu\n+Jg8x0XMyUuGPk6cgU+5yeQFuVpFREBC3TalPhwOYbo9g5pfQ6cTlRQald0cCgcRFNkjURojcH21\nba72rOgZ8DoJoRcCbG7ROyPviL+LTc0NpT4CYuMv5ttISJ76LFpxS+kDQMdSxbtGgTYJ9nuqVu6T\n/dcjc/URSwitAAAgAElEQVRFzbsk4lngBnje+pMxmc0Za/rn98fY75TS1tVTl88kQoS6V0PohUr5\nV9M5RCiPrXUiNHIvXz4DWZ3OlkXTbf1u1JR6J+0oxYaSLCkpdRv7fV1jPK/WWPSIB1Qv3kRWlZ56\nuQANXw+mPZnXHbDd32pJV6W+tLSEIAjwgQ98AADw6U9/GktLSxgYqCZkAMB73/te5d+nn366+Pvy\nyy/H5Zdfbv3tRz7yka7nXw0RBSz6sKzyvG01L9Gk1HVlWOPsd6Ql+N0k+uZKosPvgPR6swLSA8q1\noas8delpe6USrMpvCmViSzGj83D4Pc1Sq6cOAMcMbTY2XeHCG7qY4DthIHTJUzf10xbnNShAYveW\nvZDeis/QeyExPQdApi9yJS0LkKjM76FgEHuRh7vU1rouPNdHK2pXbtr8GlRFjBfVsbPfzXH9icY6\nTLdnUPdqmMN8SfnTPBsOh4p6BYmAnX3NOKIKcSIEAbVugJ7HTh6gr2VlAMB/e+G7reEjmg/kqU+3\nZ9BK2kp3N/05kGGqVJQDh99lBTqrp66VNgVU79A0t/P7K/LUizRc35KuRsKbRXmuCyT5+OmeqFWw\nyZANhDEj4fcaV+patzyZHlfupy7vqz+lnu+TaraDqtST0jyjuTEYDIhqjeK74t00/aYxq4nGaqqV\nwRW9aX9ZS0+9q/lw1VVXKbHxVquF973vfas6qLWUNLVvfDZxHKdUQciUv8zzPwG5cKmhCymQqmvb\nGn3UDD2c81hobnB0hKfua8donjopdV966vp4bEaH7Tj6nOfMvnTzhUrqkn6O3zrnv+KtZ77ReB15\nfl5m0u6pu5Z4lvTU7QvOdK9xAWXrqWmVxWd4nrpmJNk4KjSHFKWeqbG644aOxpu2/DJOGT1JHDPg\nc089j2nyNLwq+N2Bo3imQK4Aykrd3Hr1wk0vxFvOuALPX38GABni0HsXSPh9SGyuraRdgt99htyQ\nJGmiGDS6USY9dRV+B/INWkdm6BqBptQprY33YdfXDyl8U546L69rgobzz3WlTp66OazDRcTUkSLO\nklL5Ydvx+XhllT1CKAhaNykg2ndIqbuOJ/YcHRZXurS5rrFjon5fJsNlg9bEqVQmthRTL4d5TM6V\n/C4fSzNooBHUS8e5Yl4Epe9UGL+8J6eaPqh6LystXTXX9PQ0rrzySvHvt7zlLZidna34xXNb4qx/\nYoPDGhOIUqDKAlJjziShiLfFStpUpadui6l7Jq/XFYVirPC7toBF/2i/LpShPiFt47N56nT/tBF/\n4ML34lee94ZKT9113K6EzKoyk/xcNk+cCFPVMXUT/C49dddSArNUfEZrR9mL0LtuMiWdiXmWn2s4\nHMKLjjpPmRfNQFXqvushSauJcqEXIPRCDARNY9VB3fCx5akPBE2ct+kchYToO14pp14S5QaV4h06\n/E455FXvSB+bIzz1aqKkuIZGzBwKBuHAwZ6i8Ak3mPVx0LqTfBIVkaHjrZ66oQkJoK5T3RnQx8K5\nBVVzy1QEpubXStC66Xoypp4f47myhXMrbilKPWY17R2oRDk+9xT0z7B+eV6/A6fgHlXB70nJyKws\nM13cZ80LlWvxEAWg8pDISDXF1LlkmvHdj5N4sNL1SlEUYds22VziwQcfRBSZYzw/DSJJLv3B7wAR\neMqeuoipwwK/F+QYPQZtEptS3zy4Ca8/6VIcPXiU+MxzPNEJKSpioKXiM5rx0klp0UooT1f8/Sp1\n/Vmub4znMTmDUq/ymsvXs1ekAuRmbdvUNw5MwIFT8gi4mBCANCWl7lk3LCNRjuZW8d1vnfMOvP/8\n98AmgRsgcH3FK0gtsB6/HocSCU3QWdkm2TJ2KraMn2ZEXDiTGyg61RXevw6XAyoK4hVEOC4UUx+u\nDYk5nWRJTmbr4qnronMmqjx1k5DRR+vRcz0Mh4PYu5gjlDy0pWcCNMhTp3amGiKieur53rCuPoYL\nNp0r7tmk1LkicRzHqGjpOSVFKEbviKYLX8dkYDc8Br8LL7yCKMfZ7x731FWiHC90ZI2pw6zguZw4\nfBxCN0DNC/PiM6zWfw6/s05/mYn9bn8eFG6oeTVFqesIJS9YQ3NEqShncBqoeuNhSZS7+uqr8a53\nvQtzc3NI0xRjY2O4/vrrD8XY1kSSZRAbHGGly/raSqUxy+YiKsoVVd8ka7d/+L3u1fCqE16BqfaM\nSANx3Rx+zxhRrluZWEH0Y3HfXuF3u1I3f17lqfciaky9ylM3n/OkkRPwpxd/2NqJCVAZuiSyf7qj\nXLcSfs9SAUHTeE4bO8V6XQA4d8PZONCa0prLJMX5ZMc3fk7P8VDz5fGyolx1QxcAeMdZeUnoh/b9\nWPmcs8PjmJpoyA009ELEBfOXNlHOV/Bdv8Tcnu3MwYGDwWBAmQe+GyidvIRhVjEv9Pers9+7eUme\nAWYdqQ0LcqJufAauL7zaUkw9VZnfPlPqcZpgtDaCD734aty58258d/e9OZeAPZs2K++rjLEgO5rG\nLWr0u9WeOn/vZEjkZXp1aN2k1HXF76kxdSUrg3dpc0rZDHI8jvFzLr/zwncBAK759z9CO+0oMfUv\nbLtNOZb3DyCpMgap6mLohWgqTYTUOUdGtQNHtnJl+6jJ0UqzFA/vf1Qw4g8rotzZZ5+N22+/Hc8+\n+yy+853v4POf/zx+/dd/HXffffehGN8hl+Wx34se48iUhi7ie/LUK/PUU9g8W+U3VvY7eQyqYqHU\noo4VfrfF63wWNtCJSPI3L5jYip899mXGc5li8pysYwoZ6H3Iq6Q7+90cPuBSpdD18/pFjXLK+da9\nNiVPvVQmVirBbql6JJee+HMAgNue/Jr4TI+pi7Q9kboTlFAiWScgUn5jE/156degc5FiqHk1UVjH\nE16vTBf0HZOnPo/BYACu46LBiH0lT50Ms0pPXVPq2nvvhrrRNTikOlIbBoqWnDr3I3ADqdSL72ge\n6OV4JfweCTJb/h0ZAWoTEooRl0IKjgtYUtpkq+Nu8Ls8J+0Hda8u4GNbFkt+zwaiXLHntJK2wnHg\nnnrVGrFB8VxEDNsNrVkAXPRW1D0pdTdUDEuFTAi553qMs2BLjSP5/p778Pi0bHx2WHnq9913H265\n5RZ8+ctfRpqm+PCHP4xXvepVh2JsayLLgUtoElD+LGBJadOJctT+L4kUT12F9xpK/qO9OQkRdtRY\nvuM4eUxdFJ/R46Xm++S5yVUx9ZdsvhAnj55Q+hyQnrRN+RpT2voxpioY+YB8VlXs9u7XYCSrIvUq\nSVNQe97eiXKp8CJsIRSbnLvhLEx3ZvHwvkdEeMQGv4cFZM/HpHt03bwGW7iFG5RRGgnkwdTGlMPv\nuTFUjqlT73LOGchj6nL84h1WGGa6p17FfjfJUDiEdfUxnDhynPhsOJRVLcfqY+r1ijE5cMR9dstT\nj9O8TnktIGPHE5/r9Rvy8+mEVgP8zgwGOmevRDmaC3XfRJQzrSX1GNeRMXXi4VBBm5i1Ji4pdTaG\nqi5tuoRegNl2y0p6JdHLvlatfTIAan6o9QJQjUHaMz0WguKQfN4cxlfe47Pze5RrHRZEuU984hN4\nzWteg9/+7d/G+Pg4br75Zhx33HF47Wtf21NDl+eqVLWntAlZmVQEomydeuI4rtgp5kZ56rqFCAAD\nWqpFt+IzOtSVx9Tteerr6mM4Y/x52DIuG+jQ+KWnrW/y8h6q2KV0HzalboLf+1HAiqVvgMl78dS7\nCfcg6NmpKW09xtSzpG9PnWTjwAZc8bxfQsOvl1IndTJgHodXPXX6Tu8aaBObUqeN3XM8REmseOri\ntzr8XhDfOPu9k0RoJS2Rssg5AL4bGAld1US56ph6N6UeegH+8KL345LjLhafkQc+4DdxYRH/JqE1\nWPNqpTVrY7/nRlAiqxyKeLiq1Dup2VM3IlFkrAlDoDqljZ+DPGslpa3wwo0pbZo37zmeeEbkdAhu\nRJooho21S5tClKsmxYZemMPvDObnwkl7XCpj6qTU3VBkf4zWRkohH0JwKJSUj8fciljcj2Z8HEqi\nnHWl/Nmf/RlOOeUU/P7v/z5e9KIXAbBDJD9N0i9DGWBEOaSiFSmfpGoPYYcxQx3Rq9jGfm8GDfCy\nwyalnrcrLbwSLWXEdYo8dRFT12J1rod3v+Bt+NYz38WPDzwGgMdnu8fUq5Q6eeoqm5l76oaUtr6y\nDqo9dfL4Dgb64qEGWsi0cfuub/XOS2Vi04R5VMszij1HFjmS5SdJ6UkPWe8cJ1IKe1Xqrmos0fE0\nxwaDpgK/K1Bkcd+UhtfwG/Ad1VOXhWcKpa7D7+y9iZKvFcZeCX7X2O+9kF71ve1lx1yEOIvx6hN+\nrnR+UoJ8/gr4vUh3pOcmY+p58SFppHBPnbUkTswhEhM7XOTAFyTYvAphb0Q5krpXE/dXHVMvw+80\nj0UJajdAK2nn64PVOLCtf7cLJ4YLoWQ2fkzDb6CddMT4SKqMQbEneqEw7AaDJv7H927Ix1qsAzJo\nXJaepztHW8ZPw3xnHmeuOx1f2fF1g2F8GMDv3/jGN/D5z38eH/zgB5GmKd7whjf8VLPeSUR97n7g\nd3D4PSk2I/OEdeGIMg2O44i2rRkyeAa4mkOTgFkh1A3QEf0t8tQtxWdMYxSxSAt7XFXqZvg5v34X\nT92YhtcHUa5APnLmrz2m3g+jXhfX4KmnaY7I1JwQekta+bfJU6f2ucsbj+O4AnbX26iSARN4Ovwu\nIdnlwO/8WJk3P4ADrSmhjEye+mhtBL9+1ltw7NDR+MsHPqlUU5sqOshRkRvuqQcsT53Pu6qQhTVP\nnaVH9ivrG+vwy6f9ovl6xbjqBnJVKkiUKg8h1j11xvjn5C7yoG2Mfi6eMAy4p14VUy9/V2N17avL\nxJYZ8rQ2aF777J6ILMiNQkDlzHTjxHChfauVtAVPiAv1qtc/r5o3p42dgsemHscxQ3mVufWNvBdJ\niSjHPPXNA5sw3ZopeervPvttcBwH908+pPyW5LDw1CcmJvCOd7wD73jHO/C9730PN998M3bt2oV3\nvvOduOKKK3DxxRfbfvqclqqqWzaRiyJvMsEnfH4uLY5E7Fjk1m6UxsiyFI7wUBj8rtUF5zFxUmjc\nY9DhYMdxkaaRFX7nx+rjtcPvTKk7ful38rgym183HgipkMq5P4XnOnkN66riMwflqfOYOhEbGVHu\nUMDv/PyyHLEOv/OYupko1+mVKGdR6i875iKcMn8iHp9+smC/G2LqbD5sXb+lOIen1Knfv5R3OBsv\nYtXcU/fdQHhIptQ2k9igapnSuLIbKhnWeqoboIZmAMabKdA4/X3Fhh7gQPkdGUu3smp1ABG5+lPq\ngReUoG9bzQenaHcrrlWcT0dsklQ+A0fLEFH3wmqkjUuo8DZ8QXIj0YluJm6TLm/f+mv4yfQTOGv9\nmcrnOkIZspj6L5/2i8r71e9FZF7o8PvhEFPncv755+MjH/kI7rrrLrz85S/HRz/60dUe15oJbbzd\nCBlcREobcg9OLwNqY3k6joPAK+B3pUubnABNXamzScpjeyR69Sa3IMoJ9ruh5CGgxrT07la9wu+2\nrmk29isgFyPVHu938svc/ooysQcVU2dKXcTU89rjZaWuoiRcEhYCWb5Sd6yVqnzmqetwp6956iYD\niIutq9YFm87FL57yGgSujyRLxJziSt30/qiiHcmBVt4ucx0pdV9nv5dDSZXsd91TL+YyndeUZXEw\nQuvO5KlTXwA9Y4A8XBFaEHnt5kYvZU+9KqbO2e8VMXXDuwld30CeLV+LyGD8GFdT6tJTl+uDxiXG\n3MO+aBJO0jRlAPF3wffDqnnTDJo4e2KrtREUreeQ1S9wHKdyj9JRInnOwwB+N8ng4GDXmu3PdZGe\nej/wuyw+k2Rprkxh3hiVggsg+D1G6AbGMrFVRLnAC0qNUfQFlDebyST73TLJldxgLRZZ5ambWszq\nx7mK56eeq+HXMduZw0A4gLlovu/J7xgMB/0+Ds5TZ/C74okQIdICv2sLP0+FOzhPXalcqKW02ZpP\nLMtTrwipAPI5EClJ2UQNz9p1XGRFvXbXcbFfU+oNhSgnK8pxr7+vinLFO7vkuItxyuiJlcWFliNC\nqRuMaWoBq3vqLa0Eq/TUy/XK8+PshrR+TWEsas5E+ZzldxO4QdcWxvJYXyHliZ4OBdJAyjZm60O/\nF7X4zPI9dV14k566VxPkveVkvoiwo2C6987NMWUw5b89DOD3/6iyvOIzjP2eJgJ2IuEKs+SpFwSQ\nwPEtRLn+Yuq+o27IBEVFBcnEdl+qp67B75UxdTV+azrOZp3nY88NkqFgALvR/+QnQ8jEnh0MB5T/\nLkc4Gc/EfrchMqZ+6sKjWWaKHT0b3jtcJ17psb6DTWmrIk1Rbnp3T116sVypj1s8dVNMfTlEucFw\nQIQAVlJ8QZTjxrQZfpdd3chTN8XUy0pdV1ym+S3z1HnxmSqinNlA0zNHbA1kSqmSwlNXq2imWVK0\nkq5G6vqKqVuKvVx26utx+vgp+MGe+8VnNb8GFHy55RjQcv4V8LsrPfVuYkPBDgui3H9UWVbxGbHZ\n5kS50A0rJrJa2jRw/VzpZrERrq6C3yn9jluxusftOLL1qg16B9TJWI6pV8HvdgVgttTVY9Y3xrFv\naf9Bw++mxfSSzRfinONOx3Ay3tc51fPzmLpU6rKhS+8x9UgQ5ZYfUwfMSt3GyuVNZyT7vfoZV4VL\nAKaojJ56+d4olEUow4GlAxgJh2T+rwazi9KwPcbUgx5yuldSJPzOY+qF4VLEkzmC4sARz8oUUzd7\n6nZDWhzD6sqLaxUxbFPql0nh673lbdcC9CZV0nERxqrHiXLyGdiIcgqZuEskmO9xfBzj9VFsGtio\nfKY4OcsIvZXQr2Ke2pj3XGjm6b0OVprXUSWH7krPEUmWUfudLN0MmShKYo2pM6vYgcxj7SQdY661\n3hKQT14iS3HvTFegruOK4jNVzQ1M0LEVfudePdtQdW9CQlHmhQwAv7blMlx74X8zFs/pRWxxfyDf\nfE9Zd8JBpWIqSt0tE4FMJU3z8WhKvYBZHVTH5KqEEzJFKdJis5horMNRQxtwyuiJ2m9kwQyZp179\nPFRP3RSHzZ8DFfoIe/XUi+d2oD2N8brZ0ArcgKEPvcHvNk99tcQX8HvZU5fwez4mikUT/G6MqRv6\nrOvz2WS06p66jrDpwnlC73j+m3HuhrNwwvBxxgY+JlHeM4vfC6KchmRx5FEv3Zt/Xt27Qb22uYIb\nOT1cedd7jKnbRA/b0fPpJzVSN6oO25j6fwTRU4V6Ed2D0uF3G5zpOo5CiDFN7BL7nR1PE8fU5hXI\nreI8pp4T5So7FhmgY1E8pgS/mxWZbsiYwgn6c81zREOj19aLOBWe+koIN1RojDFLe7ShEDweFxXN\nPOI0XnY8Pb8em2epyn5v+A3c8Jo/xOTknPIbjxkethxoXbhXYdrgfU2pB54vshdMnhFP96K0o3WN\nsdJxQAX8XpnSZo6pr5aYPHUb/A7kY5f53arCsMXUTbXfdSnVfmdKKHADER6R55DP6eyJM3H2RM76\nDrVsCZvoDHOdKBcoMfW0tDZiRiDUr9UNfm9oIRoScnr48+LvZTmoWM3PiwrRb8M+9iYyXhLNUDuU\nSv2Ip67J8uB3IsoV7HeNKGf31B3lOo7hdVQp9UQodUZW05jPbo/wu5KCJzx1G/xuJrjw9JVe4mim\n++rbUxdxu9XZyFX2u+yqB0gkRIzFEJYgBII89eVC7/n5WZEjLaXN/huTp957TN2GgABQyEi29Ed+\njiRLsH9JjaeXxut6Zvi9itXNPEHgUHrqZaKczn4HVIKZ7Don0YuO6KFuRtwAs6Hia54yjeGNWy7D\nZae9vnS8bU/jtfd7Vep5Zo2u1CX7PclSFV431LxQ947qOcn3Qf6cKB2SK/XQC2UBomWst9effCn+\n6/PfLPZLUfK4h72J1mhS8tSPEOXWTPTuV70Ih99TAb+bK8rpStAGR5Ho8DufpFSowrYZUBGcvPVq\nZG0Go/9O1sw2w+9VE5TieUpFvS5saoA1Tehz8gsjYpXsU1PxGVKOeg6uKR+35oWYjxZWyFMnjzeT\niFKXWJ1ClOsxpt4rUY48dd/18/WSJMb7c5mnPhflfdRHWG11LnEawXM8+I6nENGqe957aqXGVffU\nTUQ5CacnBk9dHKcxq+MsRieJRCU9SXrTeQL2mHonVT31F0xsxUx7rny85b3zPaaqXGtd89RlpUIy\nKnLERucV5NcuG322DKFuY+T9y6k4F3/GAZUmXuZ629icwMbmhPi36KFe4RSR2Dz1fojXBytHlLom\n3dpTmoQT5ZI0KXlwtoILuadu/o5EWXCOa4SAbL19yVPPkCFK42r4nS8wbeMxdoyyCG0KrkUx2Bav\nhN/7m5K99KA/GDER5TiMbYXfWdlWQBLlDspTh4zX6fC7TXhBn07SW0y9GyxK72opkUq9V09d1Nu2\nVDaMkgie6+GdZ78F47VR8Xl1nrqrVmpcZU9dKHVD8Zk0TUvwOzemaX3ynO520kbNqymdzspNagxK\nXfOUlb72BmPPZsz5ro+aF6KddCoNvoa2F9E+QHX9SdHLMsplWF8PP8r7q35neoEiEkFiY/Pj/I3n\n4Ad77keM+KCaOZE0/Dqu3PIrovJclfA23CR6OHa15YhS1yReBvzusE0rQ6ZMeECbyFpuplpn2xS/\nVIlo5lxT7gnw68owQIas0tI0pWNtaK7HK455KV648QXKsdWeugcgskL03Zi1/XrqVXnqKyGm2u8d\n5vHaSGWiwhsp9TRPaeObU99jUbgbvSl1TmjqtfY7Z1BXsd+XIq7UzQYgIOdkkqWIRA9r81zsFAqK\nNxji1zSJ6xabZqETV5v9fu7GszDdnsYpoyeJz9TiM+pzq2uwtXJ8mhs6NS8U84ofR6LfE49pC2PN\nQtSsKqVM0vSbhVKv8NS1ctR6SpvrOHBdr6j9nhrXfam6puFv2/hITDD4ySMn4gUTW3HxMS/GqWMn\n588vWX5NCF0uPOqFPR3nMMOb5FDG04EjSr0k0lPv/UWQoo5ZaomtsIKep94Nfuf1zR1HpifRZ4AK\nv7uatc6vXeWpK0qYxdT/82mvM4zJvgBNaSy9xNTXFXWXx5h31otUVZRbCTF1aePK0Qq/azH1OEsQ\nZwcHv6vdAGW5TpNce8HvYNf8sxgKB0uKtpdNxnO8wuOsYr9Tdy6v0lN3NQUGlHsQvPH0y3DTI/+M\nczecZRyPz+akziw+1DH1jc0JXHH6/1MaA8DhdzMbW+9HEKUROkkHw7UhpbKc/hz1NeeymupGT52v\ney9AJ+lUhmoGgiam2tO9x9TZM6fru8j5Gzw7RL8f1enovfjMADOGTXNspDaEtz//SvFvv8LIXE1x\nDDH1I0p9jWU5RDk9X5OTSOjfJDppzma5vuGU1yq/yVu6OszSd0UqTGhjv2uwTxX0W1U4RZduMXX9\nGFsogssLJrbif7z0A6IdZ69CnvSqsd8V+L0gyiVmopyJ/c7T4KIVY79nXbsJbh7chM2Dm5SxmO7J\nJp7jIUJkPJaeA7Gr8251ZSKUPJf01IkUple+e/Hm83HRUedZYUoOW+t1v3Xjai26SfKYuq7QuDLk\nLXJdx0U7aRfw+3qR9sbPJ/+tKXVH9pfgxWdMvw/dQqlXrGtKDeMhAF0aGoeAEB2h1AunI6HiM33E\n1LsZ5Tx90HUcbGisF/PbJNRB8VDGsgHmqSsG2qEdwxGlrsnyWq/qMJTe0EVV5OJvx1FyR7l3z3s7\nu46DJFPJKa7jwslSZMgUZii3hPV8+Wr2u9kIMUnVpmkqBtONeEXSr0Ln5zsUMXXeG5u+s9V+J8VX\n92tio8tbRx5MTJ1CKSnrJtiDgu4C5Zp/4wKJ+fxCqUeLAFT2u61MLJDXVSCI2dQtsGpeHT98LDYP\nbMJQOIhHpx5Xx6oZr6Yw1mqLxwrs6ErdBL87joO6V8NCtIg4S1DzQpWs2oXHkqMT+We8+Az/niT0\nQiBaqJwrVI66Hbetx+hEufyarigTS5yfRCsTqx5vHmMvISF+7O+/6Hcr50veLOrQqzcT+72fPiIr\nMoZDerXngBxM8RkijBDrnMRGGuuFKJd/XiguSPjdcz1cdf5v4meP/RmcNSG7DCnFYDTjohp+78dT\nr0ovkkiC6dwrrXxFLvwqQa7qZpLHp0VMHfba7wNBE2854wq85oRL4LkeS1s6eE89ycoV5aqkRLrq\nKTWnjLiQiBrfrJa97IhWvj8eb+50IcrZZPPgJlx74e/guKFjymMt/keyFp46h8L1+LWSz86LpPh1\nwVIPPa0KpW6I6YWd2No2OSI8M0NmlnT31E2FcEj0PHX+3/xvB9S8J8+AMcDvlv2uP2Ky1/Udj4TD\n1gyL1RSeCUVyBH5fY0kOgv0uLWY7LKtP5F6UOq+hTIvVdVwcO3Q0jh06WjlWh4D5wuoVfu/GQKcN\nxhS7pPM4lvtfaShq1T11Le3Gc1xECfdM7AbLeZvOKX7nlbp0LWssWj0EOnc3qardb/2NI2PYuugk\nN5/VHDcZhLzZSbsLUa6bmNaIHmZa7Zi6SarIiA2Pw+9qrJ061tV0pd4lZOJq6ZSAqWBNzovopbCT\nXg/DJJz9rteyoL8910MUt0vwuzASLeull5RU3/URp3FPSNPbn3+lsfvdaoux7/0Rpb62ImLqfbVe\n1QgjFUQ5VyP02Dq4cZGQuyP+bduY9eIzSqW4KqWulH6tnoTU/tM8gatj6iutfE1NI1b0/FoYJfe6\niW1sJ8px8RyXNfRYmZh6KuZpb6Q3fTy9/sYEHerQOcUv+RhN407Savi9FzEhMjpRbrXZ7yYRSp0M\nPvbcVPhdvv+G1i60Wqnb2e/yN+V/R5CeeiX83pNSL5fFLSl1hShXDkHa4Pde0JW6V8M863ZYJb3c\nz2qIyaA8lHXfgSPwe0n68YBISuz30mZvg5x6hd/VODVPU9JFqQbmqkVwqhSK4ll3URSyPW15+tB5\neojMdgkAACAASURBVAk/rIQcyjx1gt853Nmt+hqQP/d2rHbpWo7wHNh+yhmXmNQ9eLK0EZnh97JS\nD70AoaGNJ79+WkGU61VshiSf52sJv0eG9rYqUU6F30lKnnq3mDrc0nvUETbZkCQsjUkXnjJmE5Ws\nZvDUi/BgnMUitVd8Z/Ls+2i9CsjnRc1xDkcxI0lHPPU1lTRdCfjds8LPjmadul3qbPPPXUWp245V\n86ZNRC+TmJjbNiGlbmKcmzx1G5lsJUQaEatfJlYPl+jeUpWnTt5FFa+hm5DiSvsoEwuUsy96eVYS\nfjektOnwu+PjdSddipn2jOVcxAWQJVGXD78bkABXXW9rQ5TT4Xez8lZi6izVrQy/V3vhvXjqtLcE\nPcTUB3qon2Dy1NVeE/n6MBH3TMfbannYhJ5Xq4LMt9Zi9NRXyeGwyRGlrsnO2WfhwBGtQHuRUkqb\nY2e/l+F3tUiESYRSZ/WMbd60yn53lQ2uZ6LcQXjq+lj1c698TJ2IcqsVU+flbt2Sguwla4C/4xVr\n6JItD37v1VitjqmXPfVTx06qOBcp9VSURF0usnL4wu+qYc/HqXQNY+/CBr+bFHaZ/e6U7tMUUweA\nowY24rGpxytTwHry1P1yBT3d8/Zcjxn95T1AXT/9euqFUk8OX0/dNPeOxNTXUGbas3hk8nGcNHI8\nhsLBnn9HCkUtPmPe7Ks9Pwv8TqVXC4X9+pMvtaan6QUoeoffuRHSm1I3KVK52NWwguu4pVSflZDV\nb+iiPhcd0ehFYfJKXwcDv9O9UjnifAzdnydnmver1KvY7yTd7kkUn8kStNPOsqF3wOKpQ1Vwa0GU\nI8MiSgxEOUtMnXvwnP1uLOCj3VNvMfX8PJsHNuJ/vuwPDzqmbor5K06KYy+bHLp5kxXfsl56Wb8E\n/1el3a21mD31I0p9zeSHkw8iQ4ZzN5zd1+9o4xdWuutardBSlzbX/B0XQUopNvWqkoV62dLe4ffe\nvcmq+vhSqZc3oTRLVzxn02RErMb56W+1eIZj3cS48I3s4OD3cky9l/vmpWl7RTRE213DPQVaTe9u\nhoLI4U4TREln2SS5/HrdPfW1iKkDBTGtC/yust/1mDohcfZcf/5vPczgaTUQeOvkbu+o3/LFYt25\n9vXBDbBfOOlVOH/TOaqnzovP9DAvyVNfSg5jpW6Zn4dSDn3w6TCWx6a2wYGDF2zY2tfvyvC7nqdu\ntkj1MrHWlDYD1GUThf2uGRdVSn1ZnnrFBDZtQvm5V3bKyZ7tq7OR66GDKvjdpmAVg+kgjI/lprRx\naLXX518Fv3uux8pwdvcLeJ56+yCVus0TshnOh1Jcx1XSWklMLVoBE/xezuXm5+bC89TlufU1Z3+H\nuvQCv5uupZNgfS37huT44WNxwaZztfFxflH3MZ4wfBwA4DgtjfdwEhOfo5cQ2UrKEU+dySuPuxiX\nnPZijNZG+vodTU5ZfKaqS5u6+fQC3/ZDBiOvxdRYxq+KqSvhguoFlhYtLk1KzJTqkn/e+wbTj/Rj\n8Czr/EqsttzApSeinLsynrpkv2d9pV6GXlD09I5XBH7PzxkijpcQ9FAhT6S0Fez3sXp/9f1N59I/\n03sqrIV4riSJ2eB3rvR4jDr0QjHXTEpdvycXTumzkqfehyEdegFOHD6+khuhXN9w7nI2T/V1bbwj\nm7zs6IswGAxg6/rTexrjWkhVmu+hkiNKncmJI8djYmIIk5PlXsRVQgqRsz57IsqxWu5AFVGuv7Qt\nYlsvt/hMt1KmSYWnbuuapocQVkpkWdrVZ7+bUth6SWnj8cqDial7Qqn3V1EOyFv4znTmep5DrsET\n4xJ6IRbjpb6IeklRJvbgYuq24jNr76l7joelNCdxcViaIxNc8ZZS2orfGDvdGZCv0hpz9TVnz2Aw\nyXvPe3dPxwHmEJwLV+sUVz3XHM0g6Cae6+H8oqDT4SqHQ0z9CPy+AmKC39VmBW7pWPq8lw5m/Xqj\nvL1jr/B7Lyxukgz2mLrNU18tj1oQ5VaL/a6XieUxRK0egU2ZjDHk56DY70SU6zOlDZBlQPsxDKuO\nJ7JcL/dD56CqestNZwPM79lzXQ1RWTv4ncS2rn1rSlvNmPZF4mjfuY5behZ6aKcb2rIc+S9b34SX\nbL5QlGDV9zm1z0U3T70/+P25IEb2+xH4/bknrqbU8/7OZvhdVwL9pbT1Hw9VN5TeWq92i/tWldI9\n9DH1VYbfKzx1volR1yqTjCpK/WDg9yJPvc+UNiD31Pk5ukk3hUDZF70gDzTGpaJoSL9137kcrmVi\ngd5SB/kxDd1TL9a36Z3Sd7UCIXFNMXVdqVeQHZcr52x4Ps7Z8Hw5Ls144Shft7WuGMw/Jf7lEaLc\nT4kYU9p6Yb873fup8/P36oHw5gnLab3a1VNfBvu9m+e3XFn1inIa18BGlKu6vqrUD4YoJ2PqVCSp\nVyOJPHW9balNunEgSDH30nWOxkhKfcXZ71A99bVkv5PYnpsaUzdXlDMT5fJ7omfnOk7Ju9XXramJ\nykqLTgKu6oVQ+q0FzXwui8mgPNQla386nuQai7n4jHnCqjEoLaXNCr/3GVMXHbNUiK6y9rsFWTDJ\nJce9HIDa810/jx7H6ze+16vYjIiVElOXNvFvuGwjtr+bsfoKwe/FNfplvwPSU2/1mA7UjTlNcfHe\n4Hfy1JeK3x4EWmErPnMYeOqu230N8c/LKW0VnrpDnnoO2Tsoh34OVciLi14tshcnhaTfmPpzQXSD\n8sotv4LXnfTqQzqGI/D7CghNTqWfOqQitjFz86IsPRSfKc7fa9yJQ8J8QXdr++k6eS55N2/yxJHj\ncOMr/rgSajIVy+BjWylxsLobl26Q6cQ43jXPJtxTX7nWq0lpblUJeQtkeHaTqtrvAIffD7WnbkaH\n9PLLayG9wO+8JWejj+IzdE816rjmqOiEmVy38jF1XcqeupodUv3b/irKPRdENyhPGDnukHvqR5T6\nCggtLtnMQeapl1iryyoTS6VQ+4Pf9bhbt3guKXU9NcYkto2zK1FuxYvPrHKZ2FJMvYy6VHXNA3T4\nfYVar6ZpX5t10++vuEjP8HsfSr21EjF1S21tfV2thVSlN775jMvx0L4fKz2+qVwuGd/0G2P72uIz\n8tR1Q9JkCJjao6606PfMyX/dvO9e0kGfa6Lvi2sxF1dVqV933XW4//774TgOrrnmGpx11lniu5tu\nugm33norXNfF1q1bce211yKOY1x77bV46qmnkCQJ3ve+9+G8885bzSGuiEj4XZKXbNCX6LhW5Jn2\nxn4vFmePClGy31UPpqunLmrLH3zctwy/r1ZMnVCM1So+I9+X67iKEuObZpWC5Z5pLzFo61gE+z1D\nWqQs9irNPr2FbuESyX7vnSi3KOD3lfbUvdK6WgupSm+8YNO5peIrjuOg4f3/7d17cFTl3Qfw79lb\nsptsLpvsBkWQWyAxXGPECt4LvBV8tbWjb+hL8QIqgzBtGVREkfZ9RwjephenIzhxxkGKtIwzL9Mq\nOB36zlgbcZSRNqmONfOq2DKScEnIDXI57x+bPXt295y9nrPP7tnv5x9Ispfn2bPnPOf3PL/neYpV\nCyjFS5SLHFOXpMh6aj3HrHMuolxRQwAlqj0zEo+pi8+DMJpeD2U2mdaof/DBB/jyyy9x4MABdHZ2\nYuvWrThw4AAAoK+vD62trXjnnXfgcDjwwAMP4OOPP0ZnZyfcbjf279+Pf/zjH3jiiSdw8OBBs4po\nmNCBCy0+o46QY/dFjmyEkpqnjtSiUfUFOdl56uqyZZJYo5fUl8rqVim9n/I5m3PyRHfvq99natmV\n43+zJ50r4LSnf8qpt14dHhtJaapMypF6wu73YOOSyuIzQwZ0v+tmv8Pcm7tkpLNxTpW7MuY58aa0\nhRPlkojUQ93vJk6pit4LoTSFRl1vJ8t8ZulIva2tDUuWLAEATJ8+HT09Pejr60NpaSmcTiecTicG\nBgbg8XgwODiI8vJy3HHHHbj99tsBAD6fD+fPnzereIYKHTh197tupA4p4l9T5qmrxtKS3dAFCJ9k\nRkTqMZtLJGgk0pVqvkGqom8aQtP5JEjwe6qUvyVbr1BvTiZlGZVHcWboHCZ4/Ek/N/1IXS9RLpUx\n9cgpbZnNU1fP5nBieGwY6h3LRHW9A+l1J2+c/6Ayym6Lc/6F6lekatRD/waHzGKfUzS+Yl0mwx2J\nRETqkpRSox4Z5VukUY+J1C3UqHd3d6OhoUH52efzoaurC6WlpSgqKsIjjzyCJUuWoKioCCtWrMDU\nqVMjnv/aa68pDXw8lZUeOBzG3on6/d6UHl8xFLxghhZl8VWUwO8LvobT7oh4Pbc7PCXF7/ditHhQ\n+VtJSZHmexcXBS+CxcXOpMpWXOSCdEFCTaAc3rPhCC0QKIt4XPRrOex2YATwV5XB703tMwgp+TJ4\nIfG4I+tS5ArWu9zrSfnzjft+XwXfr8zr1n3dTN6vzxF8rt1uh9/vRWfv/wEAFlzWoLxuudsLt6M4\n7vvU+2fgk67PMXXChLQ/24r+4AVzQOrD8NgwJvsuj3lPvTJMtFUlfIya91Twe6P3uVaeDv6uxBO/\n3gAw6Az+fXB8y8zqyrK0j0n5YPjmxOVwYvjSMALVZShyBc8Rm81m6PcrFe6icOOZ/Pc8/JiSk+Pn\nTnH43An96+0KHo+KklLlvfx+r9Kou5yx14YfeP4d11w5G7VXXJF2nRJxu8Nj6L7K0ojkv9KS+N8N\nqX9Y+X+Nvwwuh3k3H9kyNBJZh+pqL3zu8GeQje9m1hLlZDmc9dnX14fdu3fj8OHDKC0txb333otP\nP/0UdXXBNX337duHjo4OvPzyywlf99y5AUPLmc4ysX0XgtOELo4Mj/98CWcxXi5Zini9S0OhSC34\n+/OD4b2BhwaGNd97eDh4szB8aSypsl0/4TrM8E5Dd3cfBgbC85LVz9Ws5/gh6jk/BPtQap9ByKWL\no8q/6tcfHQm+eH+/dh3TdXEoOOQx0H9J83XTOZ5qPX3Bmy6bbENX1wXM9tXjfwfew6Kabymvu272\n/bBJtrjv89BV9+HcxR7Yh9zoSvOz7esLfs86u04CACrslYmP6bih/nAPQTKfx9Bg8HMd1Dlew0PB\n7+ToJTnh6/X0B7/joWvAxf7kvsdaLlwIT8lzjF++zp0dxMj4OSLJydXPDKHvOAAMpPE9D10bRoaD\nn6n6eA6ON4C2kWBynW3Uga6uC+GevzFJ4/2cmFE809TPY/hi+HvV2zMER3G4UR8aHIn73ucvhq/d\nZ870w2HL3d3XknVpdDji53NnBzDqCgadmV6L1OLdHJjWqAcCAXR3dys/nz59Gn5/sLuws7MTkyZN\ngs/nAwA0NTWhvb0ddXV1+N3vfoejR4/i17/+NZzO9Lvpsinc/a4eU9fJfo/aVSxynnr8FeWS7eJt\nDIQTElMZw1ZWtDIgmStmYQyTprSFp9CZNaY+fryUveyX44aJ38KEkhrlMb7iSs3nqrnsLtSk0F2u\nJVTXU/3fAEBEGRJRL3SSjMTd76nMU498DSO2nw2+jkP5nZIoJ3RMPbNs7lDZtbLfQ9+/ElcJfrxg\nnTL0Y9PIzcmm6CEHdfd7wrXfLbn4TPTP2f8+mvZJLl68GEeOHAEAdHR0IBAIoLQ02HU0ceJEdHZ2\nYmgoeAff3t6OKVOm4OTJk3jjjTfw0ksvoaioSPe1c42SKKexolx0xno4Szc2KUZ/8RlbxHNSKlsK\nz4k3ppfqa2htQKH+1yjhi7m5U9pCx8lld6bUmBpalvELRPfgGQBI6Sah3FWG+f7ZaJ71vaQer7W1\nplpoXDy5FeUiX8OoKW3qpLHo80qEZDb3Seb5tjjZ7w7JjukVU1DmCkZqRiS3ZiJ6lzZ1EmSia0/E\nNtQCcyGMFLObnpWy3xsbG9HQ0IDm5mZIkoTt27fjzTffhNfrxdKlS7FmzRqsXr0adrsdCxYsQFNT\nE1588UWcP38eDz30kPI6ra2tcLlye6xF0mjUtRpt9WOV7PeIFcoSZL+nEYWk8hxDst91ppiZtvUq\ntG8ijCJFJcqJFL0TWcBdncJzJTw4Z3XSj09m61UgyUS5qBtbo5aJnVo+OTiebHcq54jISN2pSgBM\n53uubrijRW/oEn6O2EY9XnJgoim46il3VpnSFn1zYqnsdwDYvHlzxM+hMXMAaG5uRnNzc8TfN23a\nhE2bNplZJFOETsbQalHq7veYL7oyPU1jSptB2e9qqUSwoQumEduD6k3lM6v73bR56iavWJcK9WdX\n5fZFNCLGv1ey2e/Jb70afq4xkfqd05fD43CPz9kODcOIaxyqi33K/9Nq1OPcVOvNYZcMOGczEbEi\nJrSvdXrM3oxJhNhI3ULd74UkJiq1qeepR3e/R0fqqjvdRLu0mR6pZ74BhN7FNW83dFFeX8xFU019\ng+Z3V8V5ZOYqiysARK6GpzatfAoWBOZibnWD5t/Vos8Bo5aJVUd4uRCp+z3hnpN0bl7jLT4zsfQy\nlDpLcFnJBO3nCIvU9Zd6DU3/1H9u+te1XBUTqVup+72QRH+Z1fPUY7vLIi8+kWPqemu/p9+wpDLG\naBtfxzyTC2Pi/dSNvfhka+vVXOh+V1/8KosqTH2vef4GtFz/NLyuUs2/e5xurJ29KqnXij42mcxT\nl3QaEfWKcqIEVI16Ot/zeMvEzqiYipbrn9Zd3CQ3EuXCx0CGrKxLoPtcmHtDLoIkSUr9ATE9R9b5\nNAWKvpDYJVtwuUpIMSd3vMgi8XrqqX9B5AR3y2o2yZZwL/VkXkP9b/TvDe9+D3WPm3Ty5NKYurp7\ns6JYO4I2kl6DniojE+X0tuuM7gETwe9WN+rGRuqA/mp6gMhEudhV9DzO4Jz60K58enLphtlIyVzT\nzcRI3QCxDVhwLervzlgek6EcHgOO/SLrNUzhLu3Uv/yhLTqTMbuqHmdKzqb8Hmp6jbppiXJZyn7P\nhe73yEjd/EbdKOpj7hjfxCRdEZE6Yv8vMlJPZTqXlnQa6Fyb0gYAbocb/cMDylr/eqLXvLcK9XfQ\nUtnvhSQ6gg6dlEsm3xTz2OhlYiP+ppcol8G61onGtdTumJ75vr+6kbpN+/eZCo3PZtKlG4+SXWzw\n7nLpUN9YmN39biT1Mc9kL/Xo19LsfhcYqWf63okide33FBvtai31WlVcie7BMwmH/sya5iqaJEnK\nQl6Wy34vFFqJcnriZWvrLz6T/h1tKpG6EfQjdXNO4IUTGlFkd6G2YpqhrxtidiJeKtQ3j9nofjdK\ncDdCO0bl0YyS5ADVYkBRuR/Khi45Mt+591LqK4fFG1PXf05oGpyYS3nEhi7j171V9Xfjfzrfxp1J\nBAnRG9NYgTpwE3GTaa1PU5DobvF43WfRY+rxXkf5fQYN4mgGG4ikI1GinNERRYnTg0WXL8xon/J4\nwol4udD9rhpTz6PudyBc9kx7VGw650/0So2iNNXMBxA5vp6sdCJ10T1Jkkak7iuuxP0NP1BmUMRj\nG9/S2EpE9xoxUjdA7EIrcSL1OFNvEi0Tm8+Rel1lLb7o+QqBDJdKzTazlrdNh/r74U5x2VfR7JId\nwxhGUQZz1AH9cdhcyH4HgNX1/4Glk2/GFd7LU35uaNnbVJbRDd8si4/U07mhkiRbwkVq8k30WiTZ\nxkbdAFqJcnri3cXpnRTh9c3NHVM3gl6jPru6HrOr67NaFiO47C4su/IWTCu/UnRRTFvfPhvsSqSe\nWaMeXgwo/lRRUew2e1oNOgDMrW7Ad6cvx5zqq5J+jjI7Q1DDqD7P00sOZKRuNDbqBtCa0qYn3jru\neoklmcznDHW/Z2vKS6LlRfPRndNvE10EAMCIPCK6CGkLRWMZj6nrTO/MhSltmfI43Vh65c0pPSc8\nDi/mUq41rTDV54seMjGaMqYuKnlRyLtaTEz2e5wxMSlORKH35Q5fsFI/XHW+WgDAzZMWp/zcdNT5\narHosmtQ75uZlfcrJKFuWfXUqXwR3hAnw0g9Yfd7YV3SzN77IJFMd6ZzSA7T8mFEiZc3lQ3W+jQF\nid5oI96XO15Xut54YCZJQHW+Wjyz+EmUu8pSfm46yovK8J/1d2flvQrNxNLLsH7eA5jsvUJ0UVKm\ndL9nOKVN0um1ymTTo3wmep66+tqXTiP2vRkrUFOVeOvifCI6UmejbgC9Va60xB9T18t+z2zhlnzL\nlCZ9DVV1iR+Ug2xGjanrRuq5NaUtW/R2b8uWTCP1ay+7Gn6/F11dqU8BzFWiI/XC6qsyibqBTtQN\nFi8z0ox56kS5INToZLJELKC/2EquJMplm+hIPd7Wq4VK9HeRR8EA6i6/RCdXvIQe/e73UJc9Dxfl\np9B5YVT3e3TXpujoSJRc2U9d1EIruUh00iZbCQOkcrcaP6FH1n79Ah0vJOswrvtdO1IXPY1IFCX7\nXVCkbtZKkflMyfsQ1LzySBhAHWEnumOO10CPydqNeibZ70S5wKjsd71IXdkfocAuaVpbOGf3/bWn\nGBYy0TeYhXUGmCRyAYb0u99lnUg9l1Y1I0qH0cvEMlIPEr/1KgOOaHozNLKFR8IA6STKaY2fj+ms\n/sZInfJd6Lwwa5nYXNh6VYRcSZRjwBEmesliHgkDROzrnGC5xniRul73u9teFPEvUb4JRZJOg8bU\no8crRScniRIabkhlZzcjKdNt2ZQoROdAcZ66AaSUut/17+JkaEfq8wJz8LDdhauqZmVQSiJxQje7\nGU9p0+naFB0diSI6UrcX6M1UPKKHgtioGyC9RLnYO1tZJ1J32hyY62/IoIREYhm3TKx2trXo6EgU\n0YvPcA2NWMx+t4D0prQlnyhHlO+MWiZWb8nkQl1RLnQzI2pKW6arXVqR6EidR8IAEYvPJIrU0xhT\nJ8p3Rk9piz7PRK/iJUq450Jsohwb9TBmv1tARPd7okS5OBcfve53onznd1fB7ShGmcub0esoW/tG\nRaaFO6aeI4vPFNjnHo/o7yLH1A0Q2f2eXKSudRKM6STKEeW726f9G5ZNuTXjRDmn3Yn7FtyNClRF\n/F50l6coohefUa5nCYKZQiI6v4NHwgCSJKm6BZPdpS38uAcafoBylxdXB+abV0gigSRJyrhBD1k+\n81bUVk6L+F3Bdr/nyNrvnNIWFn8pcPMxUjeIJEmQZTn57HdVpH51zXxcXcMGnShdUoF2A08um4iO\nM2WoLK4Q8v4cU48Vasw5Tz3P2SBhDKlE6oV18SEyk1YPWCG4YeJ1uP7yb4nLtGajHmv8UDD7Pc8p\nkUKChBVbgU69ITJTqPu30CJ1QGyAEN6lrfA+dz3hJYuZ/Z7Xwgkr6We/E1F62AMmRniKLpuSEEln\nLYVs4ZEwSLIJK+yuIjJeoW7oIhoT5WLpbQ+cLTwSBtHbEjKaxIsPkeEKdUMX0RikxFIide7Slt+U\nA5lwTJ3dhERGE73gR6GycUw9hughVjbqBkm2+71Q16gmMlOhzlMXzc5IPYYk+DPhkTBIsolyjNSJ\njCd6wY9CxQ1dYonO7+A8dYOEvtQJI3XB2/IRWZHopTkLlcvmxHWXXYPaimmJH1wgwtnvXFEuryW7\nTGy8XdqIKD0c1hJDkiSsqr9bdDFyCsfULcKW5N2Z6ANOZEWcp065QnT2u6mR+o4dO3DixAlIkoSt\nW7di7ty5yt/27duHQ4cOwWazYfbs2XjyyScxPDyMLVu24F//+hfsdjt27tyJSZMmmVlEw4Qihegt\nIfUeV4grXxGZhSs1Uq4QHbiZFql/8MEH+PLLL3HgwAE888wzeOaZZ5S/9fX1obW1Ffv27cP+/fvR\n2dmJjz/+GL///e9RVlaG/fv3Y926dXjhhRfMKp7hwvPU4zfqbkcxAKB4/F8iypzoCylRiOgxddPe\nta2tDUuWLAEATJ8+HT09Pejr6wMAOJ1OOJ1ODAwMYGRkBIODgygvL0dbWxuWLl0KAFi0aBGOHz9u\nVvEMF96ZJ/5HWu324ccLHsayK2/OQqmICgNnlVCuEJ39blqj3t3djcrKSuVnn8+Hrq4uAEBRUREe\neeQRLFmyBLfccgvmzZuHqVOnoru7Gz6fL1gwmw2SJOHSpUtmFdFQyUbqAFBbOR1uh9vsIhEVjPCs\nEjbqJJboSD1r2e+yLCv/7+vrw+7du3H48GGUlpbi3nvvxaeffhr3OXoqKz1wOBI3pKnw+70pP8fl\nCH6U5WWetJ4vQr6UM1OFUk+gcOoaXc/KkRIAgMddZKnPwEp1icdK9SwucgEA3MWumHplo56mNeqB\nQADd3d3Kz6dPn4bf7wcAdHZ2YtKkSUpU3tTUhPb2dgQCAXR1daGurg7Dw8OQZRkulyvu+5w7N2Bo\nuf1+L7q6LqT8vNHR4A3IYP9wWs/PtnTrmW8KpZ5A4dRVq569vUMAgKGh/Dj/klHIxzOfXbo0Evz3\n4khEvYysZ7ybA9P6BxYvXowjR44AADo6OhAIBFBaWgoAmDhxIjo7OzE0FDwR29vbMWXKFCxevBiH\nDx8GAPzpT3/Ctddea1bxDJfslDYiMp7S/c7zjwQTnbRpWqTe2NiIhoYGNDc3Q5IkbN++HW+++Sa8\nXi+WLl2KNWvWYPXq1bDb7ViwYAGampowOjqKv/zlL1i5ciVcLhdaWlrMKp7hpCRXlCMi4ymJchxT\nJ8HCayZYcEx98+bNET/X1dUp/29ubkZzc3PE30Nz0/NRKEEn0Tx1IjJeaPYJs99JNGUmlNWy3wuN\n6J15iAoZt16lXGETHKmzBTJIsmu/E5HxylzBxCGvq1RwSajQiZ5eyQ1dDJLKPHUiMlbAU43/uu4J\nVBaXiy4KFTjR+xCwUTeIjd3vREJVuSsTP4jIZKKz39kCGSTc/c5InYioUIV3aeOYel6zJblLGxER\nWZfo7nc26gYJj6nzIyUiKlQ2wQshsQUySHgRf0bqRESFSvT0SjbqBgmNnzBSJyIqXOEli9mo5zWJ\nY+pERAUvHKmz+z2vSRxTJyIqeIzULcLGKW1ERAVPdPY7F58xyPzAHIyMjaC8qEx0UYiISBDRlS0R\nRgAACLRJREFU2wCzUTdIY2AuGgNzRReDiIgEUlaUY/Y7ERFRfgtPb2aiHBERUV6zMVInIiKyhnCk\nzkadiIgor3GXNiIiIovgLm1EREQWEVpJjpE6ERFRngsvPsNInYiIKK8pW68y+52IiCi/iV4mlo06\nERGRQcKJcmzUiYiI8lp4ShvH1ImIiPIau9+JiIgsosbjh9PmgN9dJeT9uUsbERGRQRqq6vDCjf8N\nu80u5P0ZqRMRERlIVIMOsFEnIiKyDDbqREREFsFGnYiIyCLYqBMREVkEG3UiIiKLYKNORERkEWzU\niYiILIKNOhERkUWwUSciIrIINupEREQWwUadiIjIIiRZlmXRhSAiIqLMMVInIiKyCDbqREREFsFG\nnYiIyCLYqBMREVkEG3UiIiKLYKNORERkEQ7RBcglO3bswIkTJyBJErZu3Yq5c+eKLpIhjh07hh/9\n6Eeora0FAMycORNr167FY489htHRUfj9fjz33HNwuVyCS5q+zz77DOvXr8d9992HVatW4dSpU5r1\nO3ToEF577TXYbDbcc889uPvuu0UXPSXR9dyyZQs6OjpQUVEBAFizZg1uvvnmvK/ns88+i48++ggj\nIyN4+OGHMWfOHEsez+h6Hj161HLHc3BwEFu2bMGZM2dw8eJFrF+/HnV1dZY7nlr1PHLkSPaPp0yy\nLMvysWPH5IceekiWZVn+/PPP5XvuuUdwiYzz/vvvyxs3boz43ZYtW+S33npLlmVZfuGFF+R9+/aJ\nKJoh+vv75VWrVslPPfWUvHfvXlmWtevX398vL1u2TO7t7ZUHBwflFStWyOfOnRNZ9JRo1fPxxx+X\njx49GvO4fK5nW1ubvHbtWlmWZfns2bPyTTfdZMnjqVVPKx7PP/zhD/KePXtkWZblr7/+Wl62bJkl\nj6dWPUUcT3a/j2tra8OSJUsAANOnT0dPTw/6+voEl8o8x44dw7e//W0AwC233IK2tjbBJUqfy+XC\nK6+8gkAgoPxOq34nTpzAnDlz4PV6UVxcjMbGRhw/flxUsVOmVU8t+V7Pa665Br/4xS8AAGVlZRgc\nHLTk8dSq5+joaMzj8r2ey5cvx4MPPggAOHXqFGpqaix5PLXqqcXserJRH9fd3Y3KykrlZ5/Ph66u\nLoElMtbnn3+OdevWYeXKlXjvvfcwODiodLdXVVXldV0dDgeKi4sjfqdVv+7ubvh8PuUx+XaMteoJ\nAK+//jpWr16Nn/zkJzh79mze19Nut8Pj8QAADh48iBtvvNGSx1Ornna73XLHM6S5uRmbN2/G1q1b\nLXk8Q9T1BLJ/fnJMXYdsodVzp0yZgg0bNuC2227DyZMnsXr16oiIwEp11aJXPyvU+84770RFRQXq\n6+uxZ88evPTSS1iwYEHEY/K1nn/84x9x8OBBvPrqq1i2bJnye6sdT3U929vbLXs833jjDXzyySd4\n9NFHI+pgteOprufWrVuzfjwZqY8LBALo7u5Wfj59+jT8fr/AEhmnpqYGy5cvhyRJmDx5Mqqrq9HT\n04OhoSEAwDfffJOwSzffeDyemPppHeN8r/d1112H+vp6AMCtt96Kzz77zBL1fPfdd/Hyyy/jlVde\ngdfrtezxjK6nFY9ne3s7Tp06BQCor6/H6OgoSkpKLHc8teo5c+bMrB9PNurjFi9ejCNHjgAAOjo6\nEAgEUFpaKrhUxjh06BBaW1sBAF1dXThz5gzuuusupb7vvPMObrjhBpFFNNyiRYti6jdv3jz87W9/\nQ29vL/r7+3H8+HE0NTUJLmlmNm7ciJMnTwII5hHU1tbmfT0vXLiAZ599Frt371ayhq14PLXqacXj\n+eGHH+LVV18FEBzmHBgYsOTx1Krn008/nfXjyV3aVJ5//nl8+OGHkCQJ27dvR11dnegiGaKvrw+b\nN29Gb28vhoeHsWHDBtTX1+Pxxx/HxYsXcfnll2Pnzp1wOp2ii5qW9vZ27Nq1C//85z/hcDhQU1OD\n559/Hlu2bImp3+HDh9Ha2gpJkrBq1SrccccdooufNK16rlq1Cnv27IHb7YbH48HOnTtRVVWV1/U8\ncOAAfvWrX2Hq1KnK71paWvDUU09Z6nhq1fOuu+7C66+/bqnjOTQ0hCeffBKnTp3C0NAQNmzYgNmz\nZ2tef6xWT4/Hg+eeey6rx5ONOhERkUWw+52IiMgi2KgTERFZBBt1IiIii2CjTkREZBFs1ImIiCyC\nK8oRFbCvv/4a3/nOd2JWubrpppuwdu3ajF//2LFj+PnPf479+/dn/FpElBgbdaIC5/P5sHfvXtHF\nICIDsFEnIk1XXXUV1q9fj2PHjqG/vx8tLS2YOXMmTpw4gZaWFjgcDkiShKeffhozZszAF198gW3b\ntmFsbAxFRUXYuXMnAGBsbAzbt2/HJ598ApfLhd27d6OkpERw7YisiWPqRKRpdHQUtbW12Lt3L1au\nXIlf/vKXAIDHHnsMTzzxBPbu3Yv7778fP/vZzwAA27dvx5o1a7Bv3z58//vfx9tvvw0A6OzsxMaN\nG/Hb3/4WDocDf/7zn4XVicjqGKkTFbizZ8/ihz/8YcTvHn30UQDA9ddfDwBobGxEa2srent7cebM\nGcydOxcAsHDhQmzatAkA8Ne//hULFy4EAKxYsQJAcEx92rRpqK6uBgBMmDABvb295leKqECxUScq\ncPHG1NWrSEuSBEmSdP8OBLvao9ntdgNKSUTJYPc7Eel6//33AQAfffQRZs2aBa/XC7/fjxMnTgAA\n2traMH/+fADBaP7dd98FALz11lt48cUXxRSaqIAxUicqcFrd71dccQUA4O9//zv279+Pnp4e7Nq1\nCwCwa9cutLS0wG63w2az4ac//SkAYNu2bdi2bRt+85vfwOFwYMeOHfjqq6+yWheiQsdd2ohI06xZ\ns9DR0QGHg/f+RPmC3e9EREQWwUidiIjIIhipExERWQQbdSIiIotgo05ERGQRbNSJiIgsgo06ERGR\nRbBRJyIisoj/B8O3Gy4ab+tfAAAAAElFTkSuQmCC\n",
            "text/plain": [
              "<matplotlib.figure.Figure at 0x7f4d16572550>"
            ]
          },
          "metadata": {
            "tags": []
          }
        },
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfUAAAFnCAYAAAC/5tBZAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzsvXm4HkWZNn5397ud/ZwkJyFsgqwS\nYRRG1MFPEIkERn/j+KnEccRlZtBRLhGHuWYmDoIfmHEDFdBRVFA2DWLGYU3Yl4RAEggJZCEr2ZOz\n7+/S2++P7qquqq5+t/Nu56Tu6+Iip99eqrur66nnfu7nKc11XRcKCgoKCgoKUx56vRugoKCgoKCg\nUBkoo66goKCgoDBNoIy6goKCgoLCNIEy6goKCgoKCtMEyqgrKCgoKChMEyijrqCgoKCgME2gjLqC\nwhTHaaedhq9//euh7d/61rdw2mmnlXy+b33rW7j11lvz7rN06VJ84QtfCG3ft28fzjjjjJKvqaCg\nUBkoo66gMA3w5ptvYmxsjP6dy+Xw+uuv17FFCgoK9YAy6goK0wDvfe978cQTT9C/V6xYgTPPPJPb\n57HHHsNHP/pRLFiwAJdffjn27NkDABgcHMSXvvQlXHjhhbjiiiswOjpKj9m+fTv+/u//HhdffDE+\n9rGPTWqiMDQ0hKuuugoXX3wxLr30Utx+++30tx//+Me4+OKLcfHFF+Pyyy/H4cOH825XUFCQQxl1\nBYVpgEsuuQQPP/ww/fuRRx7BggUL6N8HDhzAtddei5/97GdYtmwZLrjgAnz7298GAPzqV79CV1cX\nnn76aXz729/GihUrAACO4+BrX/sa/uZv/gbLly/H9ddfj69+9auwLKusNt58883o6OjA8uXLcd99\n9+H3v/891q5di23btmHZsmV4+OGHsXz5csyfPx+rVq2K3K6goBANZdQVFKYBzj33XGzbtg39/f1I\np9NYt24d3v/+99PfV65cife+971429veBgD41Kc+hZdffhmWZWHt2rW45JJLAADHHnsszj33XADA\nzp070d/fj09+8pMAgHPOOQczZszAunXrymrjc889h7/7u78DAHR2dmL+/PlYuXIl2tvbMTAwgIce\negjDw8P43Oc+h49//OOR2xUUFKKhjLqCwjSAYRj4yEc+gsceewzPPPMMPvCBDyAWi9HfBwcH0d7e\nTv9ua2uD67oYHBzE8PAw2tra6G9kv5GREWQyGVxyySVYsGABFixYgP7+fgwNDZXVxoGBAa4N7e3t\n6O/vx5w5c3DrrbdSBuGKK67AwYMHI7crKChEQxl1BYVpgksvvRTLly/HsmXLcOmll3K/zZw5kzPG\nw8PD0HUdXV1daG9v5+LoAwMDAIDZs2ejpaUFy5Yto/+tWLEC8+fPL6t9s2bN4towNDSEWbNmAQDe\n97734fbbb8fKlSsxd+5c/OhHP8q7XUFBQQ5l1BUUpgne/e53o6enB9u2baMUOsF5552HtWvXYu/e\nvQCAP/zhDzjvvPMQi8Xwrne9C08++SQAYM+ePXjllVcAAMcccwyOOuooLFu2DIBn7L/5zW9iYmKi\nrPZdcMEFWLJkCT3XE088gQsuuAArVqzAd77zHTiOg+bmZpx++unQNC1yu4KCQjRihXdRUFCYCtA0\nDfPnz0c6nYau8/P1o446CjfeeCO++tWvwjRNHHvssbjhhhsAAF/+8pdx9dVX48ILL8RJJ52Ej3zk\nI/R8N998M66//nr85Cc/ga7r+OIXv4jm5ua87bBtmxPpAZ4Y7xvf+Aauv/56LFiwALqu44orrsBZ\nZ52FbDaLRx55BBdffDESiQRmzJiBxYsXY/bs2dLtCgoK0dDUeuoKCgoKCgrTA4p+V1BQUFBQmCZQ\nRl1BQUFBQWGaQBl1BQUFBQWFaQJl1BUUFBQUFKYJlFFXUFBQUFCYJpjyKW29vaOFdyoBXV3NGBws\nLw93KkHd5/TDkXKv6j6nF9R9lo7u7rbI35SnLiAWM+rdhJpA3ef0w5Fyr+o+pxfUfVb4OtU8+eLF\ni7F+/XpomoZFixbhrLPOor9deOGFOOqoo2AY3o3+6Ec/wpw5c/Ieo6CgoKCgoBCNqhn11atXY/fu\n3ViyZAl27NiBRYsW0RKRBL/61a/Q0tJS0jEKCgoKCgoKclSNfl+1ahUuuugiAMBJJ52E4eFhjI2N\nVfwYBQUFBQUFBQ9V89T7+vowb948+veMGTPQ29uL1tZWuu26667D/v37cc455+Bf/uVfijpGRFdX\nc8VjFflECNMJ6j6nH46Ue1X3Ob2g7rNyqJn6XSwx//Wvfx3/5//8H3R0dOBrX/sali9fXvAYGSqt\nmuzubqu4or4Roe5z+uFIuVd1n9ML6j7LO1cUqmbUZ8+ejb6+Pvp3T08Puru76d8f//jH6b8/+MEP\nYuvWrQWPUVBQUFBQUIhG1WLq5513HvW+N27ciNmzZ1MafXR0FP/wD/+AXC4HAFizZg1OOeWUvMco\nKCgoKCgo5EfVPPWzzz4b8+bNw8KFC6FpGq677josXboUbW1tmD9/Pj74wQ/isssuQzKZxBlnnIEF\nCxZA07TQMQoKCgoKCo2GW2/9Md58czMGBvqRyWRw9NHHoL29A4sX/zDvcY8++hBaWlpx/vkfqkq7\npvx66pWOxaj4zvTCkXKfwJFzr+o+pxem+n0++uhD2LlzB6688ht595vyMXUFBQUFBYUjCa++uhZ/\n+MM9mJiYwJVXXo11617Bs88+BcdxcNFFF+Kyyz6P3/zml+js7MSJJ56EpUvvh6bp2L17Fy644MP4\n0peumHQblFFXqChytol1PRvwrtlnImkk6t0cBQWFIwD3P70da7b0VPSc7zl9Nj594cklH7djx3b8\n/vdLkUgksG7dK/j5z38NXdexcOHH8dGP/l9u302bNuK++/4Ex3HwqU99TBl1hcbDG/2bcdfmJdA0\nDecedXa9m6OgoKBQU5x88ilIJDyHJpVK4corr4BhGBgcHMTIyAi372mnnY5UKlXR6yujrlBRZG0v\no8G0zTq3REFB4UjBpy88uSyvuhqIx+MAgEOHDmLJkntxxx33orm5GV/84mdC+5K1TyoJtUqbQkVB\ndJcOprT+UkFBQWFSGBoaQldXF5qbm/Hmm1uwf/9+mGb1nR1l1BUqCheO93/XqXNLFBQUFOqHU045\nFU1Nzfjnf/4SnnrqcSxcuBA33fT9ql9X0e8KFYXy1BUUFI4kXHrpx+i/zz77L3H22X8JwKPWb775\nNvqbLKWN7AsAjzzyVEXaozx1hYrC9Y35FC9/oKCgoDAloYy6QkXhuMSoK/pdQUFBodZQRl2holD0\nu4KCgkL9oIy6QkWh6HcFBQWF+kEZdYWKgtDurvLUFRQUFGoOZdQVKgpCuzvKU1dQUFCoOZRRV6go\nXFfR7woKCtMfX/7yF7Fly2Zu2y9+cRt+//t7Qvu++upafP3rX69Ju5RRV6goaEwdSv2uoKAwfTF/\n/sV4+uknuG3PPvs0LrroI3VqkQdl1BUqCqp+V566goLCNMaHP/wRPP/8M/TvLVs2o7u7G2+9tQtf\n/vIXceWVV+A//uNfalIaloWqKKdQUdA8dSWUU1BQqBGWbn8Y63per+g53z37THzi5I9G/t7VNQNH\nH30MNm16A2ec8U48/fQTmD9/AUZHR3HddTfi6KOPwQ03fBsvv7wKzc3NFW1bPihPXaGiCGq/K6Ou\noKAwvTF//gI89ZRHwa9c+TwuuODD6OzsxPe/fyOuvPIKrFv3CkZGhmvaJuWpK1QUrvLUFRQUaoxP\nnPzRvF51tXD++R/CXXfdgfnzL8Zxxx2P9vZ2/Nd/3YAf/vAnOOGEE3HzzdVfwEWE8tQVKgqXprQp\noZyCgsL0RnNzC0466RTcddedmD9/AQBgfHwMc+YchdHRUbz66is1j6kro65QUaiUNgUFhSMJ8+cv\nwJo1L+MDH/ggAOATn/gU/vmf/wE/+MF38dnPXo577vkt+vv7atYeRb8rVBQOFP2uoKBw5OD88z+E\n88//EP37H//xK/jHf/wK/fuSS7ywwN/93adCS69WA8pTV6gogpQ2Rb8rKCgo1BrKqCtUFK7y1BUU\nFBTqBmXUFSoK4qGrmLqCgoJC7aGMukJFQdXvylNXUFBQqDmUUVeoKAL1u4qpKygoKNQayqgrVBQ0\npq7odwUFBYWaQxl1hYqCqt8V/a6goKBQcyijrlBROMpTV1BQUKgblFFXqCjU0qsKCgoK9YMy6goV\nRbCgixLKKSgoKNQayqgrVBSOWnpVQYLBzBDe6Ntc72YoKEx7KKOuUFEooZyCDI/uehL/veFOjObG\n6t0UBYVpDWXUFSoKldKmIEPWzgIAcnauzi1RUJjeUEZdoaIIYurKqCsEIOWDlYBSQaG6UEZdoaKg\nZWJVRTkFBg7tF3adW6KgML2hjLpCRRGUiVUemUIApbVQUKgNlFFXqCgctfSqggQB/a4YHAWFakIZ\ndYWKwlWDt4IEJNXRVvS7gkJVoYy6QkWh6HcFGVS/UFCoDZRRV6goFP2uIANhbmzF4CgoVBWxap58\n8eLFWL9+PTRNw6JFi3DWWWeF9rnpppvw2muv4e6778bLL7+Mq666CqeccgoA4NRTT8W1115bzSYq\nVBjKI1OQQcXUFRRqg6oZ9dWrV2P37t1YsmQJduzYgUWLFmHJkiXcPtu3b8eaNWsQj8fptnPPPRe3\n3HJLtZo1LfD4W88gbsTxoeM+UO+mhEBT2lTtdwUGKtVRQaE2qBr9vmrVKlx00UUAgJNOOgnDw8MY\nG+NLRH7ve9/D1VdfXa0mTFs8s28Fntu3st7NkEJ56goyOK4y6goKtUDVjHpfXx+6urro3zNmzEBv\nby/9e+nSpTj33HNxzDHHcMdt374dX/nKV/CZz3wGK1c2puGqNxzXaVij6aqYuoIEKitCQaE2qGpM\nnQVrhIaGhrB06VLceeedOHz4MN1+wgkn4Morr8Qll1yCvXv34vLLL8fjjz+ORCIRed6urmbEYkZF\n29rd3VbR81UcGqAZ2qTbWY37jMW9eaJRgfZVCo3SjlqgUe9Vj2kAgLb2ZEXa2Kj3WWmo+5xeqMV9\nVs2oz549G319ffTvnp4edHd3AwBeeuklDAwM4LOf/SxyuRz27NmDxYsXY9GiRbj00ksBAMcffzxm\nzZqFw4cP47jjjou8zuDgREXb3d3dht7e0Yqes9KwHQeWZU+qndW6z2zWBADkzMm1r1KYCu+zUmjk\ne82ZFgBgYGgcvfHJtbGR77OSUPc5vVDJ+8w3Oaga/X7eeedh+fLlAICNGzdi9uzZaG1tBQAsWLAA\njz76KO6//37cdtttmDdvHhYtWoQHH3wQv/nNbwAAvb296O/vx5w5c6rVxCkL13Ualt4O6HdFsyoE\nIEyd5ZhYe/g1ZKxM2ef608ZHsan/zUo1TUFhWqFqnvrZZ5+NefPmYeHChdA0Dddddx2WLl2KtrY2\nzJ8/X3rMhRdeiGuuuQZPPfUUTNPE9ddfn5d6P1LhuE7DxiYdJZRTkID01039W/HSobX4zGmfwAeO\neV/J58nZJpa88RDOmHkazph5WqWbqaAw5VHVmPo111zD/X366aeH9jn22GNx9913AwBaW1vxi1/8\noppNmhZw4Tas0QxS2hqzfQr1AekPE1YaAJAtc111ssqb4zTmpFZBod5QFeWmIBzXbVz6XXnqChIQ\n9bvpeJqLcpkm0u9VDXkFBTmUUZ+CcOE2LP2uPHUFGUh/tRyL+7v087jc/xUUFHgooz4F4TSyUI56\n6o056VCoD4gRNqlRL6//0v6lhJgKClIooz7F4DZ4ZS4y2Cr6XYEFmYRS+r1MoxzQ743Z/xUU6g1l\n1KcYiDFvVKNJ6dEGZRIU6gORfi+XyQn6vzLqCgoyKKM+xdDoZVhp+xp00qFQHwRCuUnS71AxdQWF\nfFBGfYqh0YVCjR4eUKgPaEzdnqT6XfUvBYW8UEZ9ioHSjw3qqTsNziQo1Ackhm5OWv2uFoZRUMgH\nZdSnGBqd3lZ56goyEE/dcn2jPkmhXLnHKyjUAvWcdCqjXiFs7t+K+7b8qerGzGU89UY0nET9rgZd\nHo34rmqJoPb75GLqNPykKsopNCg29b+Jf3nuWuwbPVCX6yujXiG8dGgtVh54GUPZ4apeh1WVNyLF\nrTz1MH678ff4j5U31LsZdYU4yStXvU7XZW/Avq+gAACHxg8j55joSfcV3rkKqNl66tMdJG+22vmz\nrLF0XAe61ljzsqA4iBp0CdYcXgegMd9XrSAa8cmWiVUxdYVGBbUFTn1KGR+ZI0wV4FKjXt0XydKW\njegNOw0e868nCPV8JEKk2ydfJnZ6GPVGXnFRoTyQsa9e6xMoo14hkMGm2rMztjxmI3rDboOn3NUT\n5hFt1EVPfbJ56tPDEP58/R34+fo76t0MhQrCrpGDFwVFv1cIZEnIWtPvjYagOE7jta3eOFI9dVey\nqmC5Qsrp5qkfGDtU7yYoVBikb9erjyqjXiEEg0216fdG99Qbu4xtPXGkeuqyflp+8RkyYE6P/mW5\nFjRo9W6GQgVBQ7F1ytBQ9HuF4NRKKAfWU2+8gc2hecSN17Z640j21IvZVtS58uSp10uYNBnYjq0W\np5lmqDf9rox6heDUSPHIeeoNaNRVSls0jlRPXeaVV1ooN5gZwr++cB1eOri2rPPWC5Zj1W3wV6gO\n6i2UU/R7hVCr2Rmnfm9Ab7jRF5ypJ45UT13G2pSf0iaPV/ZnBpG1czg03lPWeesB13VhuTZiylGf\nViA2QNHvUxxRg03lr9PYQjlHeeqRsPy1xI80SD31SQrlxP5FtCyN+E1EwaqRuFahtnBrpK+KgjLq\nFULtPPXCQrnHdz+DX79+d1XbEQW2+Iwy7DysKRjzrQRk/aDslDamTDL7LRCvaCqVJ6Zrywv3ojC1\nUatCZFFQ9HuF4NZKKFdEStvrfZuwc3g3XNeFptVWWSvm0StlbwDzSPXUJYZ2shXlAP9b8LuXTT31\nqTORZPU3tmNDN5SPNR1A+rsSyk1x1Ko0IDtARnnCjlu/uDbbJuWp87COUEFUJdXvTsSklnx/U6nP\nkRXrgPoZAIXKgyw2pIz6FIcbocqt1nWAaKNdq0I4MjR6zL+eMO0j1FOvoPqd7f9s/6brrE9B+h1Q\ncfXpBOqpK6Hc1EY91O9RA2M9q24VM+k4UsF6ZkcSZP2wXCPGr1LIGnXvuyt39bd6gDfqylOfLqAT\nTOWpT23UakGXUExRgnp2Kofz1JVRZ3Hk5qlL6PcyPWpXQrmz/55Kfc4UYuoK0wO1KkQWBWXUK4Sg\n+Ex1X2Qx6nenjgOc8tSjcaTmqcvLxE6uohwgp+KnUsjHdhX9Ph1R7+IzyqhXCLUaVFwujljIqNd2\noBAX7phKVGgtcKQa9WpUlBPP4ThTT/3Opjgq+n36QK2nPk1AjFlN6fcCnnqtB4rwSlxTZ4CtBY5U\noy6b3JU74YsSYlL1+1QVyin6fdqgVunNUVBGvUKgpQEbQChXLypSjPFPpfSiWuCIjanL6PcyJ3yc\nd84ZdSKUmzp9TgnlpifqHQpSRr1CqEtKW8QARleyqvEAF/LUp9AAWwscqZ56JdXvfPGl8AR3KsXU\nlVGfnlDFZ6YJ6lJ8JsLbCapr1Zh+Fz31KUSFVguskTliPXVp8ZnJq9/Z/h3kqU+diSRbjKhcge3+\nsYN4bNeTU4qhmO6gxWfqFFJRZWIrhEYqE1sr1iB0XSj6XQT7DI5UT102uStbKMfG1Jl/21NxQZcK\neOqLV/8YAHBix9tw+oxTKtIuhcnBqZG+KgrKU68QGmlBl3rl7IrXU/Q7/76OWKNe0QVdCpWJPbKM\nOsG4OT7Z5ihUCPWs6Akoo14xuDWK6fHq34iYep3UlyFPfQpRodUC600eqUa9oiltEer3KZ/SNkmq\n9khdAbARUc+KnoAy6hVDPcrERnklgade65i6I/w9dQbYaoF9Byqmzm6rhFBOltI2dfqcVULxGdux\nsXN4d+RzO1InjI2IWlUXjYIy6hUAW3Sl6kK5kirK1XamKIqUlFCON2hH6sAr9dTLLRPL1XuXLOgy\npej34ovPrOvZgJte+Rk2D2yVn0up5xsGqvjMNIDLCXbqT7/Xq0ys6JlPJSq0Wpiu6nfXdbFndF9R\nExXZ5LMyS69OH6GcU8AAjFkT3v9z8ti5Xce+tXtkLwYzQ3W7fqNBFZ+ZBpAtAVktcJ56xDrVQZ56\nfSvKTSUqtFrgPPVptErbntF9+P6aW7DywOqC+1Zr6VW5p964fU78XktZerUQE1EvT33CTOMHa2/F\n/3vph3W5fiOiXuFPAmXUKwB+5agql4llB7UCqUI1F8qpinIhsHTxdPLUR3Kj3v+zIwX3raj6PbJM\nrF9RrkFDPsveegrfXX0zR8nyMfX840ah0s/1onr7MwMAgJxj1uX6jYggpU156lMW3BKQVS8+k7+i\nXFQZzVogXFGuMQfYUjGZycl0TWkjE5RiBnPp5LNM48v1b4QnsI3qqe8c3o2D44eRtjN0WykxdVrQ\nRPimNGj+uerTtwYyg3W5biODprSpmPrURdQaz9W5Vv4FXaJSfmqBcEW5xhxgS4HjOvivNT/BwzuX\nAwBe7dmA77z0A4wVmRfcSEK5oewwcnauIuci91IM+1BR+j1CU0IqsjXqRJI8L45d4BZ0KUC/R5Qe\njekGACDrVOa9lor+9IB0e9pKV/3aD+1Yhl9u+F3DLYZD+qVSv09h8DH1atPv+T1xWenMWkH0kqYD\n/Z6zTewfO4hdw3sAANsGd6Jnog+HxnuKOr5RPHXTNvGtld/FjS/fXJHzES/TtIvw1CuY0uZEfGsO\npd8bs8+Rd88aILMETz1KUR3TvaKgWas+Rr0vEzbqqw6uxTXPX4dXDq+v6rWX7X4aG/o24tl9K6t6\nnVIRhEqmIf2+ePFiXHbZZVi4cCE2bNgg3eemm27C5z73uZKOaTTwKtwqC+Xy0O+DmSFkGU+s9vS7\nkKfeoANsKRBjmcTTLdYTYSdhZgXijpsHtmI0N1byceO+erpfMgiXg8BTL3xPlVS/T9UFXSwnrM7n\nYuoFvM0oQ2Fonqeeq7On3hJrptue3bsCALD60KtVvXbCSAAAHt71ODJWtqrXKgVBX5xmnvrq1aux\ne/duLFmyBN/97nfx3e9+N7TP9u3bsWbNmpKOaUQ43MIMNfTUmcEya+fwnZd+gAe2Phi0pc6eeqMO\nsKVANOqE5kxbmchjuOO5inKl9Y0V+1/CIzsfp3/vHzuI2177NX70ys9KOo937cqyBMQgEWPyzN4V\nWPLmn6X7yvqBC7csw+5yk1pZmdjGnEiS52VHMDflCuWop16hsEqp6PONOmkHEEx8k77RrRbi/jVz\ndq6hyuROW0991apVuOiiiwAAJ510EoaHhzE2xnsY3/ve93D11VeXdEwjQpYvW2kM+yrjqIpyaSsN\n07EwlB2W/l4LTMeUNhLLJAY58NSLNOqCQruUic7z+1fhyb3P07+J4rwv3V/0OQhyRdDkpYB66rb3\n/9WHXsWKAy9J943qh+VM+qJYMafB89SDmLod2gYUb9TFfHbqqdfBqLuuS5kflnXI1sios/fcSMV3\nqFGfbkK5vr4+dHV10b9nzJiB3t5e+vfSpUtx7rnn4phjjin6mEZFtRXnb/RtxqKVN2LzwFbBUwn/\nm1Uj1zulrVGVyKVgsvS7+AxK8dZtx+YGhskMkpX25CxB/U4mLKWkr5WzTKobMaml6vcGTWmjMfWI\nTBmyfcKU96so70/z/18Poz6SG6VCSVYwSfpaooj+escb9+KOV5eUfG3bsblrikzUruHd2DKwreTz\nVgKkj7pw6zLJrNnSq+zHODQ0hKVLl+LOO+/E4cOHizomCl1dzYjFjIq0kaC7u62k/c3RCfpvzSj9\n+ELIDXkfuh3PorklTre3tCXptdwxL6bkaMFA0dKayNsW8bdVe1/BcGYUC065wDun6+J/Ni/DXx59\nFo7vPEZyBh7ZBM+qdHSk6DXeGtyHp3etxOXv+iRV7NYKk3of4/5gqbvo7m6jz1dLOEWdd9RIcX93\nzEiiNdFS3LV1F7ZrY+asFuiajiG9lf4Ude2o7Ycco+A+pSBx0Dufq9vo7m6D5rsHM2Y2I2bww0rr\neFJ6jhkzm5GKyX+LQnOPvP8bMc+8abpW8e+vEiCTjfaOFLpneO3TjGB8S6QMHLD34rsv3IpvX3AV\n3jnndADBu0rs9Z53MmVw9+fqvtJas2t+36MDgT7Ddix6fRKS6WxrLdimLYNbcSjdgS+dfVlJ157I\n8ZOf9o4kfa4A8KNXH8RAZhi/+pvvl3TeSsDRAkM+Y2Yz4kbQZ2vxjqpm1GfPno2+vj76d09PD7q7\nuwEAL730EgYGBvDZz34WuVwOe/bsweLFi/MeE4XBwYm8v5eK7u429PaOlnRM/3iwf9Y0Sz6+EIZG\nvXjR4Mg4snYgCBkZSdNr9U54/8/kghn78OhEZFtk9/njF38NADin8xwAwOGJXvzh9Qexb6AHnznt\nEwXb2S+ESgaHxtGreddYtu15PL33BZzVcSbe1n5cwXNVCuW8TxZ9ae/YnP9ex7PeYNI/MlzUeftH\n+GdyqGcYHcniZu85y6LHxPUY+oaC68mune9eD/cHZTwr0T9HxrzvLp3Nord3FFnT63cHe4ZChnpo\nRP6N9vSOoCmWkv4WhdGxIOwxNBL073TWu75lWRX//iqBnOUxGv2Do2iz/Tbngm95bDyNHYf2AwB2\nHNqPOfox3Pscn/D2HZ3IcPdn+n1kIpuu+X33DgeFh2zXweGeYeiaTr1TK1O4r1mODdu1S247G2YE\ngN6B4LkCwHg2jXSu9s8E4NMTD/eOUIZtsmMRi3yTg6rR7+eddx6WL/dyezdu3IjZs2ejtdXzNBYs\nWIBHH30U999/P2677TbMmzcPixYtyntMI8OW0ICVBJsOwxffCKt/WTXyZNOGZGk4+RAuPhMWidUr\nd7Nc2C4fUyfU4kTRMXWRfi8+tk2euy2Jx5YKdjJYCQRCOUK/R1c8i2LcytF8RAnlGqVMrOM60oIs\nJhHKMQO+JdDv5H3LQjTBGt38b1TAWQf6XRwXRAq8mFAIe9+lQLxfsfa95dp1i7M7ESGWWqFqnvrZ\nZ5+NefPmYeHChdA0Dddddx1O4kRGAAAgAElEQVSWLl2KtrY2zJ8/v+hjpgK41JoqvETysViuFRlT\nFA0xUP4EI2vn0BRLlZwmFMpTR1hA2GiFIgpBTE8h1GKmDKEcUNrgS56ZRZ9d+RPGSsdcA6Gcb9Tz\nGKRooVzpBjiqDDLZXm9x5h+3Pojn97+Ia875Gk7seBvdXlAo53usgHxiRNfoFp4v6RP1KD4jji+m\nY3Fx9GLGQu++S+/XYn8m/e61ntdxQsfx1AGyHRtGjcN9svLFtURVY+rXXHMN9/fpp58e2ufYY4/F\n3XffHXnMVACX0laFl0i9XMcW1O9hTz1XAU89a2fRFEvlHWRkEAdvWVZAvdI8ygWdLAke0UTRQjn+\nfkdyozgaRxV1rFhwhFUYm7bJxeoKofJCOb/4DCOU8/4fTp2L8tjKEbVFC+UaQ/3+/P4XAQBvjeyl\nRt1xHanQzRLGjWBiJHmGEUI5MvaYtgnXdaFpWujYakFsp1izoJhFaly4ZY0JpD/H9Bgsx4LlWtgx\n9BZ+9cbdmJnq4ibEBo4so64qylUA1S4+E3jqNr+etOTflVDikw+mVE89lNLG0VDR9GwjI0hP8QZm\n8i5K9dS7kp0AgEEhFpgPdGCSKKfZGuLFoOL0e0j9zocpWJDvQ4MmbJ8c/d7IxWfaEkHYUKZyB7xn\nmNDj/nabTtpkz5CdYPelB6inSvPz4VakuFEpEMc6S8jWKDdNrxiQ/twca/Ku5dg4OH4IANCfGQx9\nO7WC2P8mw66VC2XUK4Bqz8wst3hPPapdpYB8MKXW0xYHb34Antr0u+1anLdbrKdO3tGMlJeqOZQp\n3qg7TuBtAPyzS0ekPkWhWilthOLMTx17z1DMeigvTz2cm85et1GKz8T1gEVhGRaRfk8anqjQdh1G\nlxDtqU9Yadz48o/wp20PwXVd7nnXOq4uvmvLMbnJZrGlb8uJfZN7bY4303Ow6zFYeViPakLs0/WY\nZCqjXgFwA02BmZnt2Hhg24PYM7qPblt7+DX8x4obMJyVKyO5mDprKCO8Flm7SgGpI11qZSTSNsPP\nb5oO9LvNPAM2jle0p+4zKNSoZ4fy7S6/tkRkWKxQj6DSMXWTMTw5xwwmILKYut8v2KpjQHkx9aj+\n32h56jZnvKM9daKMdhy7gFDOu9ex3DhMx8KoOR76vmudq07eOXmvpmMjbTJGvWDpWz9kUoY3S4x6\ni++pW45FjXrKSNbRU+f7tKLfpyhK8dQPTfTgmb0r8NLBtXTbnRvvw0huFK/2yBdAoF6R43DXKuSp\nly+U8z31EuOUpD26b9SlQjnh+fSnB7Bz+K2y2gkA+0YP4L/X31FWPfRiwNK6GYbCNh2rpBXKZqY8\n+n2oiPXHyXHk+clKjBY7qSCoVkwd8GKpxXiZpPqZuL0URPX5RqDf2cVtuHXTBUEc3e7aVFhmuwHb\nYeV5hpR2lwjMau2pEw+bTEwsx8SEFaQvFhp/7IjlZItBTvDUbcfGWM4z6i3xZqlwuBYI0e/KqE9N\n8Os653+J1OuWzGLFQS90jGsXVL+zKLdM7GRj6rp/H2xb6Qcs3Pf9W/8Xt6y7vWxaftPAm3ijfwt2\nDe8u6/hCYO9drCJXjGElM/fmeDOSRiKUXxsF3igQj4b11Eul3ysdUw8MWM7OBRMQqfpd7qmX0z/F\nOLpYkrOe9Ds7YWPHAXbyx35Tjutw9DsriBVBvFr22xRTHOtFv6f8AkuWY3HlkwvT7+WnuZL+TBaS\nYen3FFP7oNZpbcqoTxNEpdnIIFutiSCq0loQU7UEcVy1PHVehFM0/U48Mr14T33cHC/a65VhMrP9\nYsA+V7GEZzGGlTwTXdPRmews3qhziwRZoW2lrldN3ilhUSYL1gNKMytkyXLpaUzdn+yRNpRVJpY5\n5qGdy3HDSz/ijHs556wU2HfL0+9ho07GAeLlep63Fdo/OB/vqTtMDJ6g1vQ7+fZIsSHTsbhvotiV\n51y39HKqQUydCOUC+j2mBZPHmnvqQvhHCeWmKEQaMJ+3kG92WrqnXjmhHLsvmQWXuoSgI9DvsrCE\neN9EPV2ucjeqKEelwCnOBUNajGElRkbTNHQlOzBmjhe1Brks9YlvS3n0u47KpDyxHnmGeQ754sGG\n76kTj708+p0/pifdh6yda4iUNs6os0Vm2CVWhdS/uBGHBs1PaYsWjpFvnQ2Nhb+l+njqZGJiOhYn\n4CxIvzPtL/W9kQlMCxHKOTZGfE0Sm7NfrrNQLpRQbpqglBdp5/HUjQgvymZouWoZdXZWHXjqJcbU\nqVAuH/3On4sYuHJn1NVeEYk1IsQLIZRfMYaVtE+Hjs5kB4Di4uq8py4TyuWfUKw5tA53brwvZAwq\n5cmy74vVGkjV7+DV73GtsFF/es/z2DywNXwuyYSZXf2uvvR7lKceFsqRbYZmwNB0vwJadAVHsRAR\nyTpgUXNPnRp1z1O3RE+9EP3ulG/Ug5Q271vMOTm6iiG3epuKqSuUg1IUj/kqq+kR9LvJeOqy3HSv\nDZM06lxqjOipFzdQkgGVGvUi6HfiqefK9NSDtJjqfLycd+wrezuS7d7fpRh1TUNnihj1wgp4UVAl\nbisUz//tpt9j7eHXqKFhY7HFGL6BzCDu3fwAxk153XaTo9+DtuSNqWu8p/7SwbXYPrQrtL/t2PjT\n9ofx+O5nw+eSTErYlLpy12mvBIYjYuqyJVbJ/2O6AV03BPV7tFAuOE+Yfq+1V0ram2KMOtsXCo0/\nxZTXfnz3M7h70/2h7SL9Ppgdpn2D1Y+QZ+m6Lp7e8zx6Jkpf9dNxHfzstd/ghf2ritqXhTLqUxSy\nDy4KVhk0YTCDF8vEVtBT5+h3PqZeNP0eSmkrgn73rzV5T70GMXXfCyGFRYoRJrEZAa1xb3W2KEPJ\nQkx98raVLpQjoRB2oCumX3xv9U/x4sHVeJnJ0mDBTqLYCYZYgxsI2A7iqROj/uy+lXho57LQ/nQS\nK5noybUjvBK8XhQ88RSBaPW7GFP3PHWDM9Iy+j3MBgYTmQSlv2tdfMb31GOBUWf7WSH2rJh39srh\n9Xjp0NrQ70FKm+ep96eDFeNknvqhiR78afvDeH5fYcMsoj89iE0Db+IPb/5PwX1DDl4d6nIoo14B\nlDI7k1Gp4m8i2IpiXPw+Imc3aMckPXXa1tKEcjSlTVJpTzS+k4+pE6FcdT4eR2JIgypWxae0aZpO\nJzvFPE85/V5+TJ0d6Apdf8wcx7ifmpSMWBqVj6kznpGUfudj6nFGBW/a4WeYr7KazFO38ix0VEtk\nuWdsy/8tLNIT0z36na0oJ6ffw44DuecmIxCq1RKkTUnm+ux3XLiinPwZsSChHZGZEj31gUzAfsnW\nWSf9vxzdQSljk+gAqZj6FEUppQHzFWFhz/PwzsexdPvDAPjqSJGrVJWwkIaI3SN7aY4nIPPUS4yp\n62H63ZFMZtiyq8WIx2TItzpYJcDOvKlR9weSYtJlSIhEhyYVEEaBp9/Dg32xeflEGZ6z2TUB8rd7\n9cFX6L+jDAWnfrfzFxwRK8qxRj3f5LbYFd/EQbde9Dtn0Lg8/rCnTvqtocUQ02NetUgqlItmO9jz\nkGuQiVe531C5CNHvLp/FUlgoV9hTz/oTRrHYUs7OIaYZlKWIDBMJDGExjN7z+1bhFxt+S/tRKSxi\nOBRbe6Ne1QVdjhTIqLEoyLwu+huz7bG3ngQAnDP7L7iUpsgysWV66qO5Mfxw7W04c9YZdFs4pl6s\npy4Un5FWlJMPdmWntNHKUdUy6mxMnXjqQcGLwscHz4Tk7xdl1NnQhcTI9ab7ilrAw3EdmMJksND1\nD6f76L+jDAUnlGNj6nmoY1I6Nc6s5EX2Pzzeg2f3rcQnTvlY3loOsrbnhDbWi36PMmiymDox3IGn\n7hSo/R5mA4M8ceIpV9eobx3cAcuxcMbM02gbuOvbFl+AJ2IcdF0X24Z2CpMg+TsjnrqYaZK1s0ga\nSarTyESshSD2pWK0Nxv6NmLzwFZk/IWtSqnxQJk5aP5iNYp+n5Kg3lgR9GoQU49WuLJY/tbT9Jh8\nNGMhzz8KE1YaLlxOuRuUiS2Rfgdv1PmiPGGVOksJT5p+rxL1yN47qZbFLiJRCIFQrjT6nasRLryH\nWakZSFsZjJqFvXXbdUKDUqHrswOz7L3Yjs1NEtg8dXlM3dv3PXPejfnHX4CTO08M7f/SoVfw/P5V\n2DG0i+nvknNJJq+mQKm6FSoVO5wdwe2v34UDY4e47T0Tfbh/659Dmooo6pk10mK2RhBTt0PUPIuQ\n4+AEMXhS/KXa9PtP1/0SP1v/G/o+wzF1k2clhPsmz2vv2H78dN0v8ezeldzvImzHps9UrBGRtXNI\nGInI2h4ENHSZp7BP6LpU9+Av3lSGUSe6ERVTn6IIPBH/RRalfs9vhIkAZEPfJjpLtV1boN/lVLx4\nvj0j+yI7F9mHHaByNBe2NE+d5iPLPHWJt5njjMckhXJV8s74lDbPGwjo9+Jj6p6nTuj3YgaWsNCK\nbJvb6i3deni8sJLXdZ1QqlOh92kVYFCI0SX3w3pJ+WLq3U0z8fGTL6UxWCB4b+N+4ZC0lZEKA4P7\nCRv1sKdeGfr9V6/fjfW9b2DZW08J2+/Cc/texFN7nuO2R3mplsSDJ4aeqN/5MrGFGQpbRr/XSChH\nqG6RfjfdIKaeNBLcN3nflj/hO6t+AMd1MJ7zjueEhZJ7Zsck0VPP2TkkjUTBtdLZdTO8/xfx7QnL\nCrOakSjcv/XPeHjncurIBKvvqZj6lAQZRAi9mG92Jhq3qFxNdknFLFWIi556/pQQx3WwuX8rvr/2\nFty75YGIthOjHnTcScfUCc0sTWkLzmVWoEhEvnrZOdvEpp5tk4qx8iltJKZePP0eiAfLj6nT2gb+\n/49u8Yx6Mek5NuMhERS6PhcWkdDvZKAkjAWvfo/WdpD715mQAfFKiSeWtjL5U7sknrqYDlkJ+j1n\n57BrxCs93J5s434jxkisN8ALtMKaCCAchjK0mEe/Ow7yrSwmq4VBtlFPXSI6rAZ6/fBMIJQjtd+9\nmLoGDQk9wRnqvnQ/hnMj/j5+GivTL2XvjB2TxJg69dQjCnYRiPR7cewaMerEUy8sSn354Kt4+dCr\nIU+92MyhSkIZdQavHH4N97/xUMnHhV+k9/cL+1dhy8A2bl+xoEsugrKT0vN5UtqiPPVDEz0AgJcP\nvRL63Tundx124A9i6tGCJRlomVhZ8RnJJIa993JFPjTuL2E+Vh54Gdc/czPeGtlT1rnZ8wNh9Xtx\nQrkgJFGa+p2JyQoToqNb5gAADhdh1NlJoezcMrAen8z7y2fU83mZxJizpWrJ/uP+s83YGWYBm2I9\ndXHSMnlP/bXeN4I2CoaAiLNEhsB0TCYEJ/fUqRiOU78TT5334lmI78wrE+vt1xThqTuuwwlgK4Xe\ndD/XTjalzXJMxI04DN2Qrq1uOhZ959kCGRks7S166pZjIabHQusJJBi9BtkPCCaPxYxl4gpvxXjq\nOSeHsdxY4OAZylNvCKw4sBoPbHy05Jk+2T+gXLzKb0ve/DMe3fUkt6/oqbMfImuYooR0TgT9HrU/\nqWIWBXKczFOfvFBOElOPoN9lOcnFgOb9Sj5WQhNOZgU3dnJFmIhyYupaqer3PBXlKP1elKduhyZM\nhbwH3qhL6Hd/W5MfhkgXyE0mA51GPXU9tP+E/65Y+r0Yj1VsL1B6TN1xHdy7+QFs7H+TbuuZCMSC\nYjoV+c7FWL7Jro/OlomV0O/UU2eNeh7DI0ubFSu6ic/h4Z2P41svfrdgXQTHdfDn7Y/ijb7Nkfuw\n77XXfzakH6WYlLacYyGux6j4Lzg+mMyQ/pjlPHUZ/S436o7rrWBoaHqotDb5NglMgXYvRlBL2k3p\n9wIxddtnUHOOSdX6xYRiqwVl1Bkk/dlVqStahcQR/izahRv68EOeOheHy5/iYbmWoHgP9ony1KNi\n8OJ12Otl7Rxc142k39NWBo/tejLkBQT5yOEFO2QxdbMCMfVAgCdbBCM8cSoVIt0b12N0Fl6Up86p\n38tMaRPEPu2JNrTEmyPpdzGkI+bmlkK/yyr95ffUZZR5MLEBvJK5tK3+MyQsSMbKMDS0xFOX0e/C\npKXUcMtQdhgvHlyNVQfX0G3sEqLioJ7w37/4bEzHpAaOEzpKPFYaU9cMGLrupXfmEQjK6HcqlIvx\neeoTZho5O4e+dD8sx+Iq3clwaLwHT+x5Fv+94U5OvCbeGwHx1GVlYi3bRFyP04kKfQZ+v/A8de/f\nXKEamaduyen3IB3QgK7ptF8BYaMu0u5Feeokpk4mHwU8dfbZkNAMMepP7n4W+8cOFrxmJaGMOgPS\nOYuhW1jIFI/BbI//QMVOlpMUrBCNMf3dsTnjXchTF+tDD+fCH7dshuzCRc4xpXFwwFPkP7zrcdyz\nhS/fGHjqPP3O3g97rpwzefV7vrx/mgM/CVWwSOsnjAT1DorJX2XjyeS4ydLvhubVkR81+UnVaHaM\nprDR9rtu6P6LUb+TCYg0pk5pXy+WmyngqYsMDhtTtwRPPcN46i7Cq3cVJ5QrzVMnz4f9FlnvVvTU\n47pfwY3L/fdqLpBnYrnhSRnbNtFTZ89XslDO4PPUf/jKrbj99buYEsz5C66w1Qlf6XlNuk+uCKNO\nis/E9RgV/7HtBXiFfKE0yyhPnS3co2kaJ5ZrYpZd9a7He+rFZMmwoQKAr8MgQ05i1GM+mzOcG8UT\nu5+THlctKKPOgMyyS/fUeaEca0zDAyo/QHP0ewElt1f7XZ6bHuWFsx+WzLOLulbWztL2iCwASaXa\nO3qA2y6mtAWpLywNV5yn/ty+F7H60KvStrEQY5QsKuGpi88noQdpNMVQeWw8uRT1O1dtS6Bldc1A\nTItx99w70Y9/+PO/4p7NfwwZEWKsiKCqkLdiORb1ePLF1JuItkCSh83fC+kXnjHXGPrdY7MsJh85\nIxhE/nzSPPUQE1Gapy5WHQP4FCpxPJB56qSdxGuW1e5n2x946jFqlOgKbLIQhhBSYIVyHt1t0G9o\nMDOEgcwg/b7ESY8I3mDKxwP2HEQoJ94zEcp5nrrOi3qZ8VDORITvmfPUJau/kckQu9RqU8hTF2Pq\nxU+oqVCukKduh4368W3H4Kjm2QCAcavyuoZ8UEadARF8lJKXCAQdkqhk+zODIbEFQeCNhz11MYdV\nRNhTLxyDZwfZQ5IUqKhObtpm5FrlZMAX64+7QkobmXTIBDOAIJQTjMcjOx/H8reelraNRb6JkCXQ\naOVAHGySRgKGRsIsRaS0kaVXUSL9LvPUSW6zbiCmG16FQf+Z7x3bD8ATRPITRZsOyMSLKYZ+TxpJ\n6JouNepksBOpTiCKMo9WvwPAGKN5YGPq3vn4ZyzNUxc99RJj6uQabIx33JpATI+hLdEaGtTFtc2B\noP/KJk7yBV28bYZuIOF7/gEjY0s9c/7vIKZuaAbieoy2wXa9CoLUUy+wRgFrMKMmnCKLkbEyEep3\nhn6Xhh2sooSAAD+ZGswO4cUDq7GhdyNti07XEijeUy8mDTXIviCx//z2QOapdyY78O/nfgNA7Ssc\nKqPOgNBIpPhKsSAD9ymdbwcA7BjaFeklihXlZOr3qEFXXLgiKr5Ot7kON/M+7Cvh+X3kH7HlWPQ3\nx+VX9iIfTkh1DLn6nfc65UI5MR3HdK2iFkwJhEcST52EOCZDvwtGhC14UXLxGb0E9bskpS0YxHXE\n9BhHT3P11EP0u587HCPx3sLq97ge8wxFnpS2pJHgRG9sG1kEYkFi1HlxE5uvnLGzkZNAIIJ+F4vP\nlEm/ZzlPfQItsSY0GanQJJ88E7b+PjXqxFNnJ2X58tQ1g3r+LGRq96h2G7qBuB6H6ZjUg8/ZOZj+\n/ci+I8d1aMiDXy41YpIvjGM5x+QK6MQ0AzmfWo/5zIFsFTbTV8jL2iOCfe77xw7i3i0P4Jev/w4r\nDrwMADSdjYw3CT0eUsOXV3yGZ1nZ8Iu0TDHnqXsTVF3TEdMMaNAKMiWVhjLqDAj9Xrqn7nXIY1rn\noimWwvahnZH0uxgflQnlxPKL3PHCgC22gWsXeE9dto63eBz5UCzBW2A9pKSkXWsPv4b7tvwJQLii\nHP9xs556dJ46W00qH0Q6k0UgzpmMp84/nyQbUy+CRncZ6rlc9TtbjIUI7sjglbVzODzew1GQohGh\nnrpEmS2D6aclxfW4dEIUpGPFuMmE91th+l0H76mzRt2j36M9ddnkdbLFZ6Lo9+Z4M1KxZCimTvoT\nG3cnk9KEkYAGjYvdsvfQn+7Hr16/i6rrDd0IpWEBAV28b/QA1vduzKv61zUdcSMO07GYiWyOOgzj\n5gSWv/U0hrPBc35wxzJ868XFSFuZooy6OIEnk35D8+LaMT1GGY2EEYeh6zQLyLsfln6XeOqSbVEC\nNSL8o/S7P8lOSIrRUPW7kK8uwnVd/OHN/8HaQ+vosw7y1FlBX/h4maeuazo0TfMZlNquc69qvzMo\nP6ZOhHIG3t5xAjb2b0F/ZhCAhH53iKI87MmLSvSmWFNogsGer5DQRIypy9bgFj/ipJGEZU3AcqzQ\nTDufUbpz433032JFOZlgBhArygX/9pT3dlFrrOej36l3MImZsvgRJ4xEkK5SoqdO6rSXT7879NkS\no/7M3hfw6FtP4v97+wK6v0i/B1W+ivXULd9Tj+eNqcf8ffh8Y9aADaI92UY9Z02Spw4InjqjfvfO\nV4ynPjmhnGjUHdfBhJXGUS1zoGsacr4HTMWDzMqCOdtEwgieU8KPJ8veHwDsGd2PPaP7MSPVBcAz\nTCRFjm+Td8x/rflJZLtJvyb0+4SZptcynYDper1/Ezb1vwlDN3DR8ecD8MRuOTuH0dwYH1OPot+F\ncYzkm7P9kZwnpse4zBlDMxhlvyntU4U8da4t/n0R5ot8C3E9HipGE9R8z69+PzTRgxf2r8IL+1dx\n4QRAWIXQsUNsADu+jGYDow54+eqTYQrLgfLUGdCYepnqdx06rWu9dXA7/Y2nE3nBBqd+F2hWUo6U\nhUitim0Q28Wqt2VrcIuruwVrM1vcB8Cev5BATFylLYpOZT9uSzK5MW2zYDwqKGoTTb9PxlMXr59g\nKOeiysQiMGilqd/l9HsgDvL+35fx1pHuY9aTFvsIYURSRcTUSc5tTI9zxooFWzglLhgk0jfGzHFc\n/9L38dDOZUEBHshj6mwdgbRdOKYuTgpEL5L0uwNjh7zSnQWedxBTD5b5dOGiJd5Mnxk7JrCTUZL6\nRp5TXI+HlN8yYRih7mO6QY0I16Yi+hal3zXdn4BZ3Lc25mdHDGWG/Xaz64wHExNOhBbxbZsST912\nbGY53Tg16mRiA4TZRytCKCePqfPXnNPc7W/P+vcd8//vfQuysrEh+j3CqB9g0s7y5anLwpXspJIs\nWUz6eFyP13z1vKKM+htvvIFnnnkGAPDjH/8Yn//857F27dqqNqwemKynrms67Xh96UH6u2z1JllF\nOdFTlwmROI+WoSJlA5colCvKU2eUrJx61Q1PTAD5IBBWvzOTg6iYOjuQ+/u7cAtS3HmFckxubLkI\nPR894dGNghAoun2BQZMV5Ym8rqTMKGfU/cGUGAd2whZWvxMRV5KeJwrkWSX8al2s1mE0N4bDE710\nOcuYFgtRx0PZYTy55zkMZAbhuA6GsyNcqVyAV78DvKees3NSVXlwPzKjLvfUVxx4CY+99RQOjh+O\nvF/vGoGYynZsjJvBEruyMYH9BgkFT55b3AiLxGRGjHyLhhajQjkWnig2/4SWtMOLqXtCOVnogjxf\nvv4AYSdM2oda4s2REyDyTmjKns/kEeOdjCXp+UlMHWB0Qgz9Lvsei/HUj2s7BkBg7ANPndDvcS4M\nxT6DQsVn9rFGnRHYuq7LjZvbhnZi1UHe9okTHiCYwEZNjKuJooz6jTfeiBNPPBFr167F66+/jmuv\nvRa33HJLtdtWcyQnGVNnY51svJiLcfr/JuIzPqbOizlIjXEW7AfhFvDUXdfhjKLUU5fEjEmb2d9k\nohcgmJmyEGn6qGPZZzSSG8Vju55Ezja5AVH2wbDIW3ymAp66+HyIETN0o+CE442+zej3c3rZ4jOl\n5qlPmGms733Di6kLlGNGYtRFzyKfiEtE4IXHkRDo93s2/xE/XHsbR78nBCry4Phh/M/2R7DJr87m\n9SMxT1006p6nTgzoOJN/L6PfxeVmxZgluV6wZkLQN7YN7sTukb3C8ayxy1HvuyUWeOoyURzbVuKN\nxfWYN+ETPHVN0BEQNiEWEVO3HKtgJbgcR7/H4VUPDH8HxGPnGRDiqXv3q0FDS7w5mn73n2UTTXW0\nuElmM6M6J2ViAVbtzwrlotNPWZCY+uldpwAAzpz5Dm87MepUKEfYgnyeOp8WKmLvqJc9MjM1g74b\nknfPhjn/d8cy3Lv5j9IJEgtKv0eEsKqJomLqyWQSJ5xwApYsWYJPf/rTOPnkk+ngMp1ABr1CFYRE\ncEbd72DsOfglGXnv2pSltOXz1O2wZ+/9W76eOmsgM1YmNCiKnTzFVIeKqnDHfpSjuTG0J/gFL6j6\nXUK/s54Eey8Hxw/j4V2PI2vn8OHjP0i35xwT4alNAMIgyAxstVLaAM9LZcU/OvgiGOPmBH6x4bdM\n7r4WpPqVSL8fGD+E21+/CwBo2V/RU08zBmCEEUQ6zMSxGPo9oJFj1FCQePJQdhhpK83FTuMS5TYQ\nGGqTmRzSMrEQ6XfPk5yR6sKB8UNcpcKQpw4n0lOP6V7uPmGwyHsnz9J1Xfxk3S8AAD+78AfBNZg+\nmbVz1Jg2x5uh+8+X99SD/cfMMP0u1j334rCeGlx89kaE+t1ybQymh0PbWVBP3RfKAfkXH+EW6hE8\n9VTMW5u8kKfeEmvCAAYZ+t3r82x+OCkTC3g6ELb4VBT9HuWpa9DwT2d+DkPZEdpryAQjJgjlkpIF\nXkRPnWQHsH3IdV1q1Nn0OMuxuGWFAW8S58Lz3uOJVv/ZSDx1//wJPd6Y6vd0Oo3HHnsMTz75JD7w\ngQ9gaGgIIyP5Sw9ORQGSgVQAACAASURBVJABu3z6XWMKScirpYk5qzmJwWfzP8UBjPPUOaGcPJWI\nDGgt8WZYrh2ivsKeOmvU5TF1drtswQjRePHxYdZTDzwNgnFzgntGhT4I2TXEdhYjuIs+vxhT9wZP\nw88TB4CfvvpL/Hz9Hdx+4+YE935KLRMrah0IRMWvjH4fZuhsr8IcT7/nK37DGidiKMg7IKzAiC8G\nShqJUEydYIKhpYOYen6h3MymGQD4GLvFGdBxuK4bmhSwExHv/lzuWGJgB7ND0rayE4esnaNtb4k3\nhapMEs0BwXjIqIfrnlsuSfMKD7cxXS6Usx0bA2l5e8X71n2hHMAzClH7A8GEJ+fH1JtjTX67ozx1\nn3732UMyPhAjyuaHkzx1wOtrYijClKWfSmPqWaRiSaRiKRzVMpvG78nYGuSpe9sTDEMQ3DPfB2TX\nGsoOUzZDHLfFSVJaNsmTjFGspy6rO1BNFGXUv/nNb+Khhx7C1VdfjdbWVtx99934whe+UOWm1R5J\nI1z2shgE9KJBB9uodC2RipbnqQf5n2RQoeIsQSUutoFvV1AfujXuzSrF1Y7C6vdAKMctMMMtUMGK\nccKLpZCBnpbEjZgQkI+BFQQ2x5u4/QtRVwH9LkuTqXxKG6XfGUXvoYnDdDU8AnFiyNPvxeTKygcB\nEkeMa9H0+0iWN+q0olwR9HsQG47R9ygKhob8csOpWDLSqJOwjOWYkqVX+WGHGMYun4UYY+h38oy3\nDGzDv73wHewbOxAS2mWp58avkki+LXK/B8fDdRq8NrKTyBxdMa451kyfGbl3ck5iSEjhHELBSuue\nO5ZXOU6yTKgh0SWQ+y7oqRP6Xdfpe5DpZmT3Sd6zaZtIW2k0xZp8gZ+8bxBGsYWpIuiFgwj9HvBp\ncSGmzrJopmPCkoQIpJ66leXSZ+nYGqLfDf+6iXBMXbKOuhiqY2uz8wWFrBBrS95rWsLEksJDACOU\nEybGtUBR9Pv73vc+vPOd70Rrayv6+vrw/ve/H2effXa121ZziB9wsbAZT11Gv/MxdV4JLqPTbWYQ\nTBoJpK00UkYSE1aa65xOHqFcTDM4T7013oLD8GaaHcn2yONoTN3lY+qsKI+9B1J/XINGPdMEjcub\n0nsmyDk56JqOlJHEKLzBsSmW4j66QtWw8gvlCP1eWCj3wLYH8ba24/Ceo97NbRcNMKXfdQM5f+Gb\njJUNiZrEDAoNOuO9lEa/sxCFcsSYsx7aSI6l371a8DG/tGyh65uM0Yoy6iRPOGkkpdQxwAvINH8i\nEpXSRtrenvD65SgzUSR9gdQb984j0O+CoQ1qF/Bx1EMRgjl20sd76s20/xFjSfadlZqBQxM9lBUh\nfZ0uO2ryWpameAp2LvxO88XUB3L5PXWW6SLvKr+nzhp1v4COnUHGzqI51hSqTSG7FpmAe/R7IJRr\nFj11UmjJsfmJvGNJlf3SmLqdRWu8hf5N+n7Wd5hiOs9aiZ56XI8HjoVEiEvACin5DAFLqkMibQuO\n8Z7NjFQnDowfAhAUWBK/oVqgKE/9hhtuwGOPPYahoSEsXLgQ99xzD66//voqN632SOhxaJpWckyd\nLYMZk9Lv4TKRAO9Fkb+BwAgaukEpU3bGSkQ3+SrKxfSYn9LmG/WE93GEPXXRaBH6nf8Yo7xtQr+n\nmI+a9fZD98zS77aJhB7nvD2WXSD75ENUOV72t6gPqj89iFvX/Qp7R/fjmb0rsNKvVMVCNNbk+Rh6\nDJYfPiFrlrP7hj11efGZPaP7sLl/a2TbRYhGXVY2laXfbb+PxZlQTl6jToqo6HEkjJi/zeQETiRm\nnzJSRdHvrhAHFz1tx3WgQaN9lBWIBWtvB88zJJRjRGpA8ExywqSSNepizJsga+coy9AcbwpS2vzr\nk2vN8kMF5FmwbfBYHAuP7HoC+8cOYiw3jrZ4q9xTjyw+Uzz9bmgG4v67yrf4iGxJXcLqNMWbaL32\nfLUAiEcuCuWa4mxMPQ42fVNchraYmLqXhTCBFsaoB2sukFQ+3lNnC0MBXrhJVnRGZPUCQ6wLrIJF\nWSOxpnxGIpzsSnXSbaSPk0lvwxn1TZs24VOf+hQee+wx/O3f/i1+8pOfYPfu3dVuW82haZpXQaoC\n6vdsxApkPA1kS1e4cjhP3TMihEUAgjxwbnEX0VP3C0AE9Lv3cUwIM/mQpx6Z0iYvpjEqLDUIsOtN\n8zn53r95Ki5uxDmxVc7m03IKfQx5Y+oF1O/bhnZgy+A2bOjbxLWXO0cE/R7305aIR04MO4HYh6LU\n799fcwtuW//r0EBK2i56taJRl4ETysEL8chyh2Vg1e/EYOcEGnKYpd8LeOpE/c7GwTXJsBPXY9La\n9LK67OIzERkiEooKhHJh+j0qOyXHCOVaYs1BlUn//klf6kh2wNAMOoES6fesncOju57A4tU/hgsX\nbYm2ULsBv0wsMzEiE3bLtYqg3/k8dbadMrC0N7kPwrp4MXVesc5fS/DUXVH9zhh1I8YZdX4Z2oiU\nNqHK4WB2CC5czGzqotsMgVoXCzF5ZWIDo540EnR9BD7VT/DUxw4hrsfpAiz0nh2TGnWW3QR4o04c\nDxI+AviYOrtPLVCUUScDzrPPPosLL7wQAJDL1bb0Xa3QFEuVr35nKFaRxqH7CoKNcXMiqFAmGChD\nM6iRZUvGks58aLwHj7/1DFV0sojpMbgM/d7mG/VM0TF1M1L9LlvOlR0I4gL9zue483nqCT3OGTTT\n4VPaConc8sXUqfo9Ik+dUJWi2IlFaNKjk5Q2z1PPMp4R602KfchbepX3lNl+QY4dyY1ya9mLEHNz\nZQgJ5Wxv8kTin/k8dfK84wL9zgqGyPNMGUnOILETDfJsyeRQ4zz18LCTMBKhxTiA4L2y35MolCMg\nE19XeL6kXOmhicBTj2LSSJU1AGhLtAYZMUJMPWHE0Z5oo0aR0u8M9cyiLdEaEnEBXj9iPfUgpbQE\nT10PhHL5Yup0yVPXpc+GfL9NsVReJoc8f7p6n236RYpk6vc4zY5yXAn9XsTkud+v8zEzNYNuE/u8\nIdDvccFTT8aSdH2EqBCg4zo4ONGDuS2zJYWULIz7TGRnQjDqkroFXalgAsKq39l9aoGijPqJJ56I\nSy+9FOPj43jHO96BP//5z+jo6Ch84BREUyy8gEMhsHm4Mg9KVljFO872Fo6It8DwY+Dedu//hh9T\nB3j6nXTcgcwg/nfnY1jD1CsmoJ6635lbEkV66gz9HqV4Z2e9ZFBjP5qAcsqvPDUdEzGmEhXZxtPv\nxcXUyXPdNriTxsgo/R4xSybXpbnGUqPOTxZYoZzItET9G/BYIHHQPMwshTthpbFnZB/+Y8UNWHN4\nHSOaDKdAAQgJgqLguIGnXoxQjzVOxFBYjhnyAGN6jC4kQsBOzoJcX08ox1LuMqMe1+Oc0Ii2h9Lv\nQT/QNA0fPfFifOi4D3D7kuNDQjnHxpg5zsWbsxGT7qydw0huFEkjgYSRoN8DobWJdxzX42hPtmHU\nn4TRyZAhF8S1JVqjPXWG7SD9y3YsDKXzZxiRa+qMp56Pfifvlr3foazHBjTHmpkFh8L9I4ipe/R7\nlsmRB6LV77Zrc56x6ZgRjBh/TVJme6ZgKNl8fzFPPckstuT9HazzbkU4Fr3pfliOhbktR4UmXTnH\npCmLIU+djak7QUydbSsQCOUazqjfeOONuOmmm3DHHV7azsknn4wf/OAHBY6amkjFk0WtDsaCDPy6\npkk9KNaoiAZu3PLSSXQmnYRdjYsa9VjYqBPsGH4rpH4nMXWRfhdn8qKHyxafiUxp84/pSnYGngqz\nrwbNX7ZTElNnWAXb9fJ3WTFKzuZn8mTlKdM28cC2B7lyqGy7SCzwl6//Fn94c6l/D/lT2kKeulSV\nGy4TC3jegQuXX3fbijbqbEU58k5Y1e24maYD2UBmMLJaXTH0O99+h4Y5ismTD2jkGKfcFe+HMEes\nQZLSto4F0w3XyxaRMOJ5PXWWBdE1HZec+GH89Ykf4dtEVqFjJhReu5xQIReOSXN5xmQsN4Y2v/YC\n+R7I/myN945EOyzXxrgVpGGyBo2FF1MPD7e6pnMV5QKhqs31LRm42u9GYfU7Vbwz3wP5fpviqbxl\njIPiM947Iu+DeOTNoTz1YCVDMRMgKqY+YU7gmb0rYDkW+v3yxyz9DvDeemhBF+HZ05obrhWpoTgw\n5sXTj249SprjPl4E/W5S+p0x6iD0u1+MrNHU75lMBk8//TR++tOfQtM0vOtd78LJJ59c7bbVBalY\nklLAMrpMBjalTQbZespke9pK49jWubSsprePT+czQjmWfhdn/AfGDqJFqD4Xp0ad0O9eSpuo5oz0\n1N08MXX/o5iR6sTO4d3ehysUuRHXd2ZBFoexHBsxLcYNXqaT457RsreexoM7luFvT/5rPLN3BVJG\nCh99uzeYu67Lt8u1vVWn/PPZzCptskpkE9RTL4F+Z/LUvWOD9CvW8EUJ5TRodBLIGvW0NUGvbwqF\nf7jzCBXlCoEMop6n7tPveVZpCzx1ln63AMFYkP4YJZRjr2/aOc5wyYx/XJcbdWJwRU8dQGgQFovr\nsJNKUSAaRb9n7RxGzXGc4AvhyHMW9RlxI04H+pHsKCOUC+dKA0B7IiyUi/krnLG131OMdymyI943\nFY6LkwVdgGKNenCOCZq+1xRMOiX9I+eYfgVBr62kr9OYej6hXJFG/eFdT+C5fStxcPwQcv4EewZD\nv3vXi8GEryUQ8tTjBr/0KrtENDtxY8tc7xs7AMBbYXPLwDbuWmxMXSywJaffw0K5hlW/X3vttRgb\nG8PChQvx6U9/Gn19ffjP//zParetLghmosV762zxGamnHmHUiXK8Od7M0+9O4KmT1Zy6BGqHpaH2\njR0MGYGY5q23HdDvntEXU15EoxVVUY790B2/6ERnsgMuXAznRjgV9oSVpgtMeMfygzj523IsGL7H\nS8Cu0wx49OCElcaGvo3eM2OMqNh24k3kKM3IDyYiMkXE1GWrtAEB/c16gIVi6uT/jsRTnzDTtI1e\nCKK44jOFQPox66kXl6ce5+KBorEgzFEho+64DjJWlosby95FwohzYtBgX99Tt3hPHUDIeDb59Lvr\nszZsRTkSdgpqucvp98HMEBzXoZ46uQZhonKM8e7w9xnOjXBaBJmn3iqh34nGwdCNIAbrP6cJoXgR\n4IkXWQQxdYZ+zyeUYxZxEcEK5WTFiUxf/xJk93jXCYrPiEK5gMpnx458ZWLJ9hcPrMFAZgAaNE58\nBsg9dbqeuhBTJ8r5tJXhGVKmPaSS3HGtx4S+KdP21O8pIxVablpc4Ceux7j0O51pE9mnVihqut/X\n14ebb76Z/v2hD30In/vc56rWqHoiFQ/oJTL7HDPHYWiG1JMAeLU6+Y81OHxFOTbH2xPktITodxJT\n95ZKPGfOu7BjaBc9Ttc0aJpGY5imY3J5vABTqMG/dms8KqVNVL8zq7QxbRU9YkM3qKfSzyxeAwBz\nmmd7JTuFMp3BuWxaOlL0tkzblJZ83e7f/xhTbUxsO6VIfREPOyiajhVSak+EjLrcg9D8ErCWY1Gv\nKvDUA6OeyUO/a4xRJ+0m1B/gFWsh1xdDHyyCQhthY6ppGrqbZtK1utl2JHQ2pS06pk4MYYypKGfa\nZsjAUE89Qv3OYsJKczSq1KjridDACQT5xVmJUI5MbknbAqGcy71/27FpGd3OZAcOTfRECln7Mt53\n1OaXAI1RGpmnrklMHfA8dYuZDJXiqRN49Sgy9BmQfpUyUlSkmNBjYL/eYJW2QNsgVkBjn4/MUydo\njjcVpN8TTAVBMskKysTyMXU9Ik89F/F9O66Ddv+Zu3BxcPwwOpMdoWfJPkMxR7413hIo4pnxOh1a\nzpfx1Ef3oyvZidZES0hdT+rvt8abQ6ET0VP3+m+CsikN76mn02mk00F3mpiYQDZbmphsqqBJKEDj\nui7+7YXv4MaXb4o8hi0WA4Rj3lH0OymP2Rxv9gpWiPS7psPQDcxqmsHNIjVoIQVwX8io+znGjgkN\nGs0vFT316Dz1aPrd9mlzYtQH/FjwO2acin9/zzdwUucJHFUoo9/JRxbTY/jm2V/Fe486x2+vJS2R\nStrNlnNki6x424K0I5EdkH1UZIJDBkJWJWs7Njb2vwnTMTnlOqEfyYA8Viz97r8vg5nwsdQ956nb\nViRFLqbxsEjoccxtOYrbRgbfhBGHKNQTMZwdoe+Spd9zTjimTjx1cUEXGVy4nKc+u3lWaJ+4EaOF\niFgUylM3JMKoUXOcqroB3lMntfPZ83FG3ddskHBVjIoFeU89ocfQ4SuiWU+dTR1kIRPKsW0nrAiZ\nNBKjTnL3gWhWxBPKycvEsmE5toqciCaWfpdqI3hPXaTfvXeX8tsZndIWVYJbrK45YaVpLQAWbL8n\nz++vjj4XXz7z83h7x9voNl0PjHrGykhT2oazIxjOjeLYtqP9c4slZj36vSXREvqNT2nLIW54NU4I\nOxCo34NxuFYoylO/7LLLcMkll+Cd73wnAGDjxo246qqrqtqweoGmdPkdnwwORCUqg1hcI8bEkwHv\nY9o1vBsP73ycG1AJ/d4Sa+a8e7ZMLIHBdGZd072BLWJlRg3B2t2mT3GTOvKipx69Slt+9buu6zTN\ng4hakkYSx/kfSFyPM7Ftm24j1DKZLRu6gZM6T8BJnSdgQ99GP/4V7UkSI3rHG/dg69BO7rcsQ7+L\n55AadYkQiTyvDX2b8Os37qbtjmkx2Fqgs5B56uyAJcZDRfrddrzymTG/3OyElaYTBpZ+v/wdl+Gl\nQ69g59AuypAAcvo9bsQxt2UO1ve+EbTDb1O8QJ665Vj4wdpbaT9n1e+mY4ayEAKhXLhwigxsTH12\ncze+/b5/Rc9EL36x4bfc7ykh+6RQnnpMM2DBWwWN9N0Htj2IZ/euoPvYjoO0673rzpRn1KM8dXL/\nxGsk35HlCp66wXvqAcMRpt9jegwpIxWm0xnPMCFkuZB+3hZvoRP2KFaEFcqJRr013sKt0ua6rtxT\njzUFK6tJJtU5x0RropVhBIhRD96FlzmUyat+j6p457g2TJt3VMhSqyzYfk8m1qlYCmd1z/O3xehv\ntEKoQL+nrQxydi6g3v3riO+N3GNrvIUbf71z8p46eW9t8RYMZYfD6vdGo98/+clP4rzzzsPGjRuh\naRquvfZa3H333dVuW12Q8OlnMmtkKdIoEOEXQYhSdkys792ILYO8ECPw1L1ZslgsI+qcmqaHSmWy\nMBjP0rRNGP4koMlIFYypl+OpE9U298GxQjn/gyJrC9uODUuzQ/dFjX6edcrJRKg3PRAyNMSoysQ4\nspiWLP3HdEykkKSqYMB7nrquI4HAMJF2R9HvokfClki13WCRlc5kB/oyA5gwJ6DFNb8Nfl1tTcd7\n556D9849B//2wndoGMi7fvjTjRsxzBUKaBCPopCnvqFvEzdx5YRydjilLfDK4vS+8qnqReM/p7mb\nK5JDztMUS3HtsAt56iQcwdwfAPRlgiwJ27WREQqEiDF1sf0kpq5pGrc+Oku/k5DWmL8IUUwzuKqS\n9FzxVmiaFvKAOU+dZrkQT93r57ynHn7nhi+2owI20agnWgC/i7pwYbu2dILLeuryPHWPYqbFteia\n5kGbmuNNGMwOIZanTKzoVBB4Hr3XrmvOuRIt8Wapp847OvIKfeT/RGORtjPcJP+OjffihPbjceas\nMwAAx7XKPXWC1nhLaEznysQ6Flr9SaDoqbMFnGqF4iS0AObOnYu5c+fSvzds2FDwmMWLF2P9+vXQ\nNA2LFi3CWWedRX+7//778cADD0DXdZx++um47rrrsHr1alx11VU45RRv/dxTTz0V1157bSn3M2mI\nKSxsXWBxyT4C13U4OlykRk3Hgq2FPxQSUydCuaybpdcB+E7L/lvTwvQ7C9bom44ZlFGMJUODc/6l\nV9lYWA4Zy1s1yXYdxPQYOvwBcsCPqRshA23hlcOvYc3hdQA8b2wcE7h/659xgZ9jzD6rhH9Mvjzq\ncWsCjutIBwd2oBZ/Nx0TLx5Yg65UB94x41S4riv1GmhNbOZ4L+0oDldSaIUz6swkIarWgUe/B6vz\ndaZ8o26luZr5bF1tIBgciI4ikn5vFeh3xlPXGe+Jheu6eGHfKm6bl9LmU8+uHZoApQShnKHpeHf3\nmXilZ710DemkxKPnjZp3HjJZILFgKpSLKD5DzpHUwysaErDq9w6JUTddG02xFCbMNPWmSUwd8Ab7\nYB2BwKiTNK60NeGrw8mzEIy6fy5xssoaCkK/k4nCoD+xIQsxee2I033ZIjje/73nK7IBrHgL8FMM\nhXcT02Ne3fQI+v3wRC9s10ZzvCkQyllyT91rC0+/s8xQ1HfBprp2Jts5YTDf1rBQjvudEc+lmJi6\n+Oz3ju7HMa2ePZvVNNM/Rm4OW/zwKIs0l9KWo98BmYSJRt0sQXg9WRRt1EXI6gOzWL16NXbv3o0l\nS5Zgx44dWLRoEZYsWQLAi9E/8sgjuPfeexGPx3H55Zdj3Tpv4D/33HNxyy23lNusSSMQinkdnzXq\nWTsnz6V1nUgDTM6lSYwwqVzVEmvmYq1snjoBOwDo0ELpWSw4T90xqVeXMpJ0sGDbTkAEYSTHnP3t\nzk2/BwDc9qHvw3IspIxkXk89rnvq+z9ufZBOXsgA9Eb/FrzRvyX0rOJGHOlsJi/97hn0jNQgswpp\n8fecbeLeLX8EAPzog//PTy0LT7QINcker2s6/u8pH+OGS9JuNqbOXj+qKqGm6XCcoOY/ifFOmGk6\nyxfragPBs2UXXBERN+KY3dzNbWOFcrI8dcd1cPvrv8PWoR2cDiKmB+lBpmPS+yHGNkmFct4+hmbg\nC/M+g4WnfQI3v/pz7rsB5DS9OAkEAsOQNJLI2Bm61ClriFjjzSqfoya6bEydeOoc/W6biOtxtMQ1\nJoWJMaZajDI/rDFNGknomo4JK00nvOJ9ndp5Ek7uejttB3f/Ek+dpE4Rpog1ysTwJ4wEF8Mn7ZFB\nNOqyMq3kmYsLDtmOjbWHX8O6Xs+B+6u576ETi6wQUweAU7tORtZfWS1q6dUokHAUkF98yRreqFr6\nAKHfvfsSl0EGvHfR4xd/ImGUcjx1x3VguTZ9D6d1nYLdI3tpPyPvZTLLP5eKooRyMuQzKgCwatUq\nXHTRRQCAk046CcPDwxgb81fiamrC7373O8TjcaTTaYyNjaG7uzvf6WoG+hLssFGPoo5CSwRKhHKy\nymjEqDfHm3n1O637zQ7qTH1oTaPFDWTQmDrjpm3Sjp6KJZG1+RXF2AGeFWGJeeoEA5lBz+D460Br\n0DDhL4DBDdL+YM8aPdnALtLvOSdXcBAYyg5LKUTeU+eNOpuf/9y+FyPfJfHERE/9L7rfiXd1v5Np\nt8xTj1a/E5C1tkn/ao41efqDUJ66LTV6QW12WUw9FqJoWaGcLKb+1sgevN63GSe0H48rzvw83Z4w\n4sw1g8p5xBNJUaFc4J3qmo7meBMXP6fnk4i8OJaGeOoxsoBRsFqgmF4qE8oljURkSMpxHKqf6GCE\ncuQ7INoGtkId66kbOku/B0VmNE3zPHwr4y8+4onSdKZM7FVnfxl/feJ8ADJPPXz/xKgTI8TS7+TZ\ncEpzGouXaxu6fVHiLD/f23TMUHyXiGjF/vHiwTW4a/MSvN63Gce0zsVZ3fPo90pj6kw//OsT5+Pf\nz/0GFfiSc8mYN+LkkP+TAlOAvK8QxPI4T0DwTA1GKMeOQSz2jR30+qzPuMgmCQDx1IWYut9/aIqj\n//7+6uj34Pr3/xudUHC1HmqEvJ76+eefLzXeruticHBQckSAvr4+zJs3j/49Y8YM9Pb2orU1+Fhu\nv/123HXXXbj88stx3HHH4cCBA9i+fTu+8pWvYHh4GFdeeSXOO++8vNfp6mpGLFZczm4xSIz7H0mz\njlmzWrl60clWHd1dbaFjcm4O7clWdHd7v6USCRrHAgA9BlgSA0k82OPndCO5Iw4n7aC7uw2pHq8j\nzOhsoed0m+YEbYzHoOd8NTUz4BCDETMMNDeRIjI24rEYurvb0N7UAmfYQccMr8N1d7chFmcGSMPb\nL2HEAS1cTx4A+tzDsF0bqXgCs2e3IxlLUEq/taWJtrelyftQ2BlySzIFjPLn445JpWCOWkg28e+z\nPdmKkewYuptnoHdiALk4Xx2MINYU3ItQiwdGKriXFw+9jAtOfY/0HK0dCXTPbIO9NfgI4/5z4dp0\n2Ls/lnJ3DRvd3W1eaVZh4CTHx2MxZO0cWtq9d9zZ1oq2gRZknSyog6I7gO4iFguu25RIAuOAHvfO\nlcqGv8uEHkd3dxt++4mbcWi0F//+xH8h4y9cMqOjFbNmecxKImnQ8+63vGf9vre9Cx849d342Xrv\nXEfN7sRo1h8gE4CV8YqPdKTaMJobw6yODu8czf4ymExbm1Ph99zV3hp6htlkkJ7Y2eb93tXaDvQA\nLckmDOdGYMQ1tHbyg7yu6fRcyXgcSHt9q7NDeOk+4ikd5ngOCSOO44/ynIcVB17GigMv49cf/yFs\n2GhNNGPh2X+Nh3YuR8KI47ijuunYl4zF4cL7Ng1fmzmnuxPdrW1oTbZgwhxHxs6gs7nN+84OBu1g\n79nVhLTAZIL+3tbsGe85MzvRFAu0L3NnzKT7n3XMaZg39xRoGnDP+v/xnnUihe7uNjTn5GPgJ/7i\nIzjvpHfj0W3P4OmdK9HWmUQqxw/77U3eONPm9+m2dq9dGzd6tSHOPvpMfPKMSzFnpjchiukBc9He\n2hR6rwDQlfXup6k5hpZEeMKRiieRNjNIxBLIWlnEEjqyOe8bnTunKzKU0pQMnKfume3obuOv3WL6\nJWPjCRw725vQ5DT5BDttpTGjqRNzZnv31X5I3n/OOeGMkFF2XG8czfib25qapc/BafLeI/m2ZftU\nGnmN+n333VexC8no+iuuuAKXX345/umf/gnnnHMOTjjhBFx55ZW45JJLsHfvXlx++f/P3nvHyVXX\n+/+vU6bvbO81GJfCVQAAIABJREFUm0022WyyIT2kUgJCkBYEQSHqpYgYRK9cjFGJ5QqoXH/W74/y\nxasCV1DkXsNFDNKUkgIJpJNkUzfJZnvf6XO+f5z5nPmcmXNmZjdbksn7+XjwILs7c87nc8rn/Xn3\n1Xj11VdhNXgoGF1dxgv8cGHm946ePhw6eUrngz7V1g5XML7mvSfgRa41B21t6kqmhPULbr/XE9cZ\nCIju3D29YYSDapRuW1sf+vrVOfX3+tAmqcfkI0NDQUUraeqUHZrGbxEtCIV8gAL4fVzZVkVAW1sf\nxLB6u5tOt2NSeSna2vrg8fG+ShFtbX2QIGHQ7zNs6/lR034EwyEoYaCtrQ+yIMOLSCqZN6xdAyNr\nkxDmXAgRd0PAF/0OQuqL3NGrT1W7rOIi9AcGYZWseOnw39B4uin+4AA6e6Lfa+nsjlwT1aTcwm1C\nuzw9ONHaHvd9AGjt6EF2uA89A1GBoyiIjjGCzxO9vnZJLS3cOziAtrY+tb86FF3wFf9sBIMhtHSo\n4wt6FdhFO7p8PegfjPRG9/vgDwW1+wYAiDxTAx6vdo5YLJJF+7zPo947pql7B0Lo7lSPP+jxaZ9r\n6+qJjAPo7Ii+Sz2dXvQH1O/2D3rQ5x1Ua2sjor17FPVZjTR20o01FL/hCHrjr2HvIBdB7FWPh0DE\nWhQ5z6DXi1Ot+tLAghA9VyikRM4vo7/PePEeGPSi19sPh2RHf7f+wdx9rBH+oB+CRUB3pxc/Wroe\nvqAf7e3R+y8oIrwh9Zr1DarXqK/bB8nTB5tgQ4tPNeNaFRva2vrg80QFAD/n2I2eEuSei4B6zfp6\n/XDKDk2oK97oO+MbDGFZ+SJsb43GM4mKhLa2vrjAUKtoQVAJoafTC4eQiZBfvU5ff+UHcZqzBVb1\nmYo8051dAzgcbMae1oOYkFmJ26feBoSjY5UFGcFIVTfvYCjuvgLAQJ/6XPT0DcIvxysHNtEGD7yw\nCDJ88MHj9WMg4IUsyuhoN9asASAcjK5JPV1eyF79uZm2r4SBwV51jJ396jNuFMzpkl3cOxO/Rjtk\nBxyBTHRyhaJYTMOJlnbtPokh2fA69EXWV/bcGH1mOCTaHCQU6mVl8SkFqVJYWIj29ujC2draqpnY\nu7u7cfDgQcybNw92ux3Lli3D9u3bMWfOHKxcuRIAUFlZifz8fLS0tKCiomLY4xgqfEpbIhMuIxRW\nKyHxtdnjzO+hoM6nIgmSvhNbJN2MdZOKRr/rfW6sEAVfV91lcWlCXRZlIOSDJEg6fzwzK0WraUXn\nxedD8+Z3s3zSwz1HEVbCOl8mIuYtnX/cwITGm9Uckh0DwUG9CVKriqW/7sWuIkzLm4ItzdsAAG0e\nY4Gsa30auVcOix0BX7/W3xtQd9k9JimK7LoOxpjfY+HnGttzm107t8Wl65gGRPPUmTvGKlnhtDjQ\nPNCijT+R+T1q/jX2qfPn4VEbsMQHQmnjiBz/gbn3ot3TAVmUuRxttfWqWllL34uAjUOKeVZjMXS9\nxMRgAFHTcjRgM2Sa8w9Ei8LYJIth3Ir6mTAGgx64re64cQisXHHk/BkWV5wfWhIlhAKsj4A/cj71\nOHzNc9bsxKy8dGztASOfuiRIcFmcWpwKb36P1sHgAygjKVyirBNal1QuA7jSyFq525geDQoUzvwc\nfT52te9DWAljVuGMuHnIogSE2ByMNWqzQDmGXbYDvh7tuQ4pIbWbYJK6B7wZ3OjcvE9dSw+MrI92\nyRa3hvPlX41cWpdWLNXNB1AzIzq8nfByJan5Mrk80ej3sQuUG7ZPPRmLFy/Gxo0bAah57YWFhZrp\nPRgMYu3atRgYUIXBrl27UF1djQ0bNuCpp54CALS1taGjowNFRUXGJxgltLJ+YT/XxEC9YbEdzoDo\nAs4XzTCKfud96nxQi1N2qGkzkQeKpZ0A8Q9tpk29fiKXHsMvKuy8AgTd4qUJda0YA9fDnXvJ+WYh\nZt3RWK1kvodx9Pvxiw0PL3S0qnkxgXLq+AzScgAtbYSvmMbDL/5sY+BgVQGDeotObBU8hlH0u1Ex\nEX4zYpNssEm2qFYcGUdsvWggmtIW7eplgVN2QoGiuWNY5gF//zUBG8mXji0VrB4rOqZYtxlb9AF9\nHEUgxidYlVmBOUUXqL/jNhLekA82yRrXi0CLfhcT33sjP2nsphWIPqPMYqZuKPTPIu+zZs+RNWn0\nuxdO2W4Y8BRK0nCGxZgAfPEZdXwObjFnPnWz1D52DHbfeCEyLbcW1ZlVKHYVagGTgP79ZteLnyf/\nnvOxDEtKF+Dqmiu0n4022UwQxQv1MFoG1d7zE7MmxH3PonvnjTcw/AaSrTH88+qICbTUGg8lKTvM\n3z+jc4uCiBxbNnLtOVoxI+ZTN6pYGBs7wbiuZiVWTfokLq+6WD0v9zcWIDwQGNQ2CXyZXJ5onvpZ\n4lM/E2bPno36+nrcfPPNEAQB69evx4svvgi3243LLrsMX/7yl7F69WrIsowpU6bg0ksvxcDAAO6/\n/368/vrrCAQC+O53v5vQ9D4asMXEHwpoQiLHlgVP0KOVmuRhC7iuNWrMji8YU7zDKlrggSp02IvF\nv1DR1qsxvmWrG62D7eA9GS5uUVGj3iVddzf+OGwh5v3AukA5rrBJsmhNPj+YwQu6ZJp6bO1o/jux\n6VOsuhfrCR9bEpdpKLpAucgx2PVhfZEZHV69SZcR1ALl9NHvseg0dckGQNGC5phwd9vcQL/+eyJL\nadM0ZKu2qLKI50A4ACix15NFokcEQ6TPAO/rsyZYbGWuvjh/z6PR3OZBjCwNyipZNQ2d/V8Spcgz\nF5+exZNcU2eBctEAI2a9isv5N9DUrQkC5VgapCOygeZhmlZCoS5IuopyfCCYkwtaY0Kd3ZPY54Zt\nQOyyHZ6gRxfJPT2/DtPz63THYZ9liFqtA+NrbZMs2rsduwYZbbRcshMDgUFNIPEpj+y6uAyEVbK0\nMv5YYa7Zk02yautlbCCZKtSDSYU6Py+zaPVvL/i6Nl87V/fA6BnUaerc/Sh05mMmFxjLzzPfkYvD\nPUfR44+6PZwmQl0WJAgQxjT6fdSEOgDcf//9up+nTp2q/XvVqlVYtWqV7u8ZGRl47LHHRnNISdHy\n1MMBTUhk27NwauC0oabOtF6+GYUlxqQcCAfh48wvFu7hYg93tENStAFC7KLACmIMcloni1wFVFOi\nFIl812nqkV2zTauwFF0kzaLfGWZFRdjLpRMkMcVnYuFT1Zi/3igCOk5TtzBNXf1/bHU/q2iBN+TT\na+qBiFCPNLKJbb3JzJsui1P3txcbX8ZLhzfqImbN+mAz7LINChStxKqmqVviNfWo+V19yW1cy1G+\nBr0IQWchmJg9AR+27cIkTnOSI/ECLG6A32DFjlmtdJbc/K4bK0tvDPkRjJynLKMEdsmOPK57VpYt\nU3s2Af3zzTDMU9dp6vqUNlmUNWGaKPqdCUrVEmFsfmfmV6OFdyDyLiUS6moJ51Ckb7pfNxf+/XNF\n/h00cZEw4eaICHUzocRr6kbdGSUTTZ2/7rEWCaP5uSxOwMMpFpF1IhwORbu3xUacxhzLzNXAnidf\n2A+LEq2W542xbLL1ihVksluN+2sYzctsQ8GvxXwxI6Nn0Mz8blQVkMHy2nt8vdoG08z8LggC7LIt\nYWGmkWZUhfq5iFZRLuTXFhOWc3iw+xBePfomLqu6SFtYfJqmzgvR6ANhk21x5nc+aDA2nSLM1UqO\nK2IR0Vj7uKYm/K5eimgQoiDqLQesk5Kk9/0CxuZ3fjGyiLJhxzpZ+6yxUDfSDGIFa/x3IsFREaF+\nedXFqp8tsuBnWTMNNxkWiQl1A009ItSjZTcz0BfoR1vEhJ9jy9aNqyemnjxgItR1lbScWvnNEOcD\nZvmvscdSze9Rn7rWiCSy0QmGg5EqdtFrs7xsEQoceZiSE215zDQLp+xAj79PZ36PFXCyIGvaU6+/\nHy2DbShyFiTU1Nk82f2wSBasqFyOi8oX6+b/r7O/pHsO2ILONxNJpqmz77DFXhYkSKIc0dRjzO/c\n3LQeAoJkmubZF7HSGJlI2b1P5Mtl1zkUsbDoYkMSaOqxWqfWeEaKz2fnYccRBCHG1G3gU5d4TZ1f\ng/TzMTa/O3Vz4P3gLAbFaCPEX6vYWv3ROagbE94SYJdt6IncSjt3TrZpUjvBDcGnbnL9eFgxI8BM\nqHPm9wQbBv7nAk6oWw1iK2K5ufZ63UZttBk1n/q5CuuXrdPUI0K9sfsI/nL4FZyO+JsAxO08AX2B\nBJtki9TO5mvBR/8dq6mHucCSWGHCNFU+2INfVARBxITMSlRlVpj41KO1kBkhJRzNNdaCTKLjN9Ng\nNE3d4DyAcQEJI6FuZH5n41tathCfqr1Gd84cW3ylKeZL1An1iPkww8I0dXVhjy2Yk2dQijKWZOb3\nioxSXSwGs4SY+dQVKNGWqKJFZ2IFoqbIWJfOjPxpuuvN7o0jMkde2IgGmgYTBsd6m/DI+z/XamDH\nfpfHIsiaZYh9JvaZyLFnG5Yz5Z9No+IoRs9LvKYejDO/h7lNcTT+RDLX1CP33inHa4HsmUzsU1fH\nqdabCOg0YqfOpx4pHsRVnTMiOkfzYifsc7xVIupTNza/8/+O1dSNNi3MysD+H23oogYW2iSrcS44\ndyzWGjoWtjHpDwxoFooazsrE1lTWLCkcDiEQDsa1l407N1+EK0GpbAb/DBr71E00dTHW0sWb36NC\nnb0bZj51AJhbPAt1ebVJxzpSkFCPIRr97ueqfukFCa8pssWZj363sPrDkcj22N7IvFB3ajvWqGnU\nzKfuNtjt2SQbVxpRxJdn3o7b6m7S7UpZYJFN86nrze+WSBEZI/M7vzDxUcGygVavF9Dxi4hRJC2/\n87bGCHWjso1GQpgJBF1DlThNXX35mPbM7keeyaLEY6QB8nOtyqzQrrcv5Neur8vAdMnuabQmu1Wz\noMRi1upXG4Pm11UXlGTmd/53/pAfJ/pO6aLwjc8haxukZP5OhpH2YlSQRhAErpuV+veyjBIsKJ6D\nOUUXRNrdxmvq+jaaUauWmU89EGON4K8D00iNaukz2DMajJT31Wvq8YFy0frw+mOysqTFriJtzEa4\nYjRoRtSnbhIox7/zBvc/lmKXmo0ULZPK+dSDHp1rQXcsgRfqxuVcbZJaJ34gMKjdo0WlC/Dvi9bh\nSw1fQEOk7jqLAfKH1fa+iQrPANHNNKt5nwy9UI9eH+Yq4Tfe+qZZKWjq/l7NzWdmfh8PSKjHwDe1\nZ0IiK8aMyi8yLChKp6lraT4iXBZXXNlTXmuPaupR87tZ9HsGZyqKjtei7W5Z+VhBEIwD5Qyi38NK\nGKIoIt+Ri7zIw6oX6tF/My2Xn6PVxO1gtONeUroQ6xc+oNO2LQbaGhOKRppMvoEQ1vx3BmVimVBn\n15R1lgPUxTOZ4ATMNPXodanKLOeeG79hRkTssdg9sHHm91jMTJsMdp8MhXpsZDwXKMdo6j8ZV0M8\nFosoa8+v2Wfiv6N+jl/ozCqexbpxZFHG6mmfxqTsaq0pEPOJs3vF52TzfRLMNHUGu0ffv3AtPjv1\nUwCiPnXedWE2xlA4hEBI71M3Euos4O3CEn2Boy/PvB23TFmFC0vmanM1gh3HbtHff82nrss04DT1\nyP2RDQSe0YZsWdki/HDxt7TOiuz9DSthDAY8poJK5q6VkTUKUDdsLtmJAf+Abj3LsWdjen6d9qxK\nogRRlKKWqwT3AYheM6OMFCPsJkJ9QlYlrJIV2dyalshfz787bN1QNfVI7EECTX2sIZ96DKIoQhYk\ntd4108JjFlfeP24U/c6EkSTKhtoa78c2yhENDUFTZy0yvaFYs1xq0e+hsJoP/c35X9MeaqOoZED1\naZ9Es268/N/NNgOfmnwNbJINdln9j/+bkaYeHXf8i8s2HjxsMeODEZlQz7Dqrz+/Mcm0ulPSPo2E\nBf87h+zQBIYvFK2TbpdtaMiv1/nWNaEeuQeq+d14QUiuqUeEemTxtep86sk1taa+k3EpWmbnYGNN\nBXY/eE3PzBIgiRIQDhhuGIqdhdjdsQ/vt3wIi2hBpbsc+7saDXvNS4IEIYmOwp6vHHs2JmRWAuDM\n7wk0db4zWVAJ6c3vfJ565N/zimahJmtCnGk6y5aJJWULEQwHsbTsQswvnm14PnNNPaqlanOSePN7\nJNfdqISwSYopL7i10tLhALwhr6mgsnC9JMwC5QDVXdjp7dICf3VuPSEqnGVBgicQbTyUCM0qmcT3\nzuA3zPwafWvdjfAEPDqhr+vfEfP+CIKgnVsURGRZM9Hj69UULRLqZzmWSMMEv4lQ583nvmB8oBz/\nwPJCnfU2Xlw6H++e2grAIPpdCSOs1X6PyVM32BVbpWg7RF7QsNQ8Ng7+XPo89XBcCpyZoOaFU9Sn\nnjxPfUrOJJRy3cP4Y+o2EDELu9GLa6yps7aT8Zp6bPRuplUv1JmwYX3NjTCqBhhb0U1z24T9uo3e\nFxs+p/ucFKOpWyULHCaaelKhLsRo6jH9AfhANSOhfrzvpGY+TaSpR/89NE1d71M3EeoGAZeM2YUN\n2N2xD/2BATTk12vap9F9klPQ1Pnni40nFZ86GyOrXcBvPpn7zCHbtXdCEISEsRqyKOPmKdeb/t0l\nR3zqlhTM72L8nIw2KEbzi/W7s2eTtTc2inwHotkHbgPLIY9LduJksFkLCtU1qBKj8QGiKOq6CSYi\nan5PTVNnmzdA/wxmWt1x66lOUzdp68qufZYtE6cHW9EbCZZLtLkZa8j8boBVtOjMqLaYBU9nfo/J\nuwT4fG9Z54eeXzwbjy77HhaXLtB+p2nqke+0ezo0k2jsg1voLMDF5Utwd8PndWNlJiv+ZbeJ8WZx\nLVAuxqeeSKvTmd+tvKlKjvzdODda1vkd9YuTLiXGIFAu+jcjTT1+sWTH5zdbIc1kLOutDZym7rZm\naIui2QIGRIuG8MzIr8Pi0vlYN/9r6nm4YL1UzO9s02EVrboIXZ7YALpY2MJYm1ODKncFphZM0v1d\n3+UvflFvGWjVFnAzocbfx0Tds3jY53gt0Cy6XCtiZHDsGfnTtOejIX9a1JplsMliDWUSoc/pjhXq\n5osy+5t2z/jiURZ9sNlIkGlzI9uWheocfSVNFiCYzKdurKnHX99YEz2zBEQDC421T3bN3Camd4Yr\nEjzZG6mqqK/CqHa4c8h2SJHgUSD5MyYbWCsSMbuwAV+o/wyq3BW6zBEj9JH1xgWn2HVk68jpwdaz\nSksHSKgbYpUsEZ96VFP/6qwvYlreFAB6n7ih+d1EU7dKVjhkh+6BdMRo6v9nx2+wr/NA5Hf6B1cQ\nBHyq9hrMiASZAOrLys6nE+pyfEqbZn4P6ovPxEdKGwvaZJq6vje6PgOARxdcZ5CnzuZitEgbCvUE\nQSqSKOmsEG5rhpZbmml1cwLIfFE2EiKyKOMzUz+lBT9pWROhaPS7zUADjzW/J/Kpp2p+L3EV44F5\n98YJAdFAM2LkO/KgQMHJ/mat0IvxORIXlTFiQmYFJmZNQH3eVO17ZseXIpXxjBZpp8WBhvxpsIoW\nTM+viwZycfdD5KxQ/Dn4KHqGUfqXR/OJmt9/dgzmP7XqfOrqeYzcbMPFIsr4waJv4rMNem0+mupq\noqmb9HNnx2R8duqNuHriFXGfYcftiwhhM596byTGITOZph65Jn2+vrhxOWQHvnLBnbi25sqYjX1q\nKW2JNmGxzC26AA/MuxdVmYnLjesj642j/tm95xWcs02ok/ndAKtkRX9gkBPqVkzOqUFfYAB7O/br\n6vgapbRpPnVB0qIsAe6lE/UPt/pZg2CsFExMVsmiHU9vlos3v7N63rFlYuPqhPOVzCQzTd0gT91E\nU48VWnrzfuI4gFhYrj6PWfS4enwZNsnKlYq0RtplelTze2QsRpWzHLIDnqAnrlmGERYpVU2dRb/7\ntMpkZsI7UZoMmxtgvhDqhbr+M9WZVWj3dCCkhBJuHizD0NSzbVn4+px7tMhyM9M7oL4LVsliGsl8\na92NGAh44LZmaM8Eb37/tzlr8M6pLZhXdIGufHCOLRveYItaJpRVnePnEsn40FqcJhDKmvk9wMzv\n+rTCyyovQrGr0PT7w0EUxLhrYpQVw/v32UbFSODx2TeLSo07FLLjsrx+s40OE/rJNHVmpWT9D2IF\n5eScGvW8SXpG8AxVU9d9N8lGQO9Tj//stTVXap9hKXlA8vd0rCGhboBVtCDALc5sUWKLgt8o+p1v\n6MIiNEUpTlMHYKhRmNUxTjpWKVrzmi+faVYMx85VdQIQac5ibn6XdabrJD51g+Iz/PiixzR+efhc\nZ7OXVhAEfGv+v8IT9OKn2/8PgMQ7ZVmU9FW3RBlO2REV6iz9itNKbp16I5wWJ3a07caW09tS6oWs\nVSLkUtqMhJnEaepMOBjlzwKJNysAUOjIh0N2mBa24K977PWszCzD+y3bASReSC0660tqQp3Bnp1E\nQr3IWQCHZH7/7LI9WqhEq3gWDZSrzCzHZzLLAehjSlZN+iS6fN3429HXtQ0dPw5BEGCRLNq7bJRZ\nEp2Heu2imrr+Olw3aaXpd0cSo/oVVgNLhFEsSirxEOz69gfMK/ABUbN9ji2+YyUPW/ui5ndzaw0j\n1ZQ2cQiaOkMWZayoXI4Kt3GjMr1PPX6sC0rmaP+uya7W/n02pbMBJNQNsUhWBJUQvEGvzjQZXbiN\nzO/xWqbacSm64LLP8Ass86fGCj6jnbrhWEVLtBodtxs323XaJRu8QS/eO74Nr3z8FrwhX9wOmk/z\n4F8y3vdrFOBklKdupK3KorHJntfCE+2qSzOKdYVsEvmeeZMZG6/T4gC8qjtBS7/itJJ8Rx4m50zE\nno59AIx96rHoUtqCPsPNDKD3qbNAHTESqBibj53Mp37dpJVYWb3CNMhN5Cw0sc9SLpdWmEjoDif6\nPfpdtvEzP/6d01cbtvg1QjTQ1Hn4PPUpuZMgCiL+fuwt7Xex47dJVk2oJ6r4FWd+H+J1GCkUI6Fu\npKkbbIhLM4rx+Wm3oDqrMu5vjKimHhHqJsJqzcw78EbT27i4YknC8cZ1uzOrE8+n6KXoUzeaYypc\nP+kq078NpVpdecTtBozf82AGCXUD2E3qCwzohDW/cDN8IT8ECIYmZUmUdA92tL1i9CF2WoyFeqrR\nnVbJkjCASD0WH6BiR3v/Kfxs0//Vfhd7buYLVefC+8Z5s2P8gm2Up27kLzYrWOO0ODSTaLL588dI\nZD6WRVkXNGiRLJq5LNPq1oKP+Lkxi4HWrtLkuvKwc/jDAXhDPtMcc34Biy1i4gv5dVH4ZlHx2rEE\nMaHg14Q65065b9YXAeitOonyz8180qkgCiKsokXryGXEUKKGkz3nfJEgNndeo4udp020gjXGdVvN\nhTp7f4x86mNBWUYJTvY3ayly+iwTg5Q2E4E0r3hWwvOwTRML1HWZmN8rM8vx+fpbko47Ns7ATBAn\nCpaN+6yWpz7y0eapNKphCIKAydkTcbD7sNa58myBAuUMYC9/v7/fWKjzKW0hH+yyTacJydyDxz/Y\nNs38LnG/M64DnepDq9PUTZoG8Avn4tIFKHUV6/4eK9RzuCpR/AKqLwkbH/3Ov7TsGhpFdpvlqYuC\nqAnUZHmosQF2ZgtGrPndKlqQ78iFRbQgx5aNUlcRLipfrCsUwu4ZE4ap+NSjeer+hELdrBkH+zwf\n9JdMU09GVFOPXpvanBrU5tTo3UIpmt+HqqkDwGfrbsS1NSNjnjYKlOMxSmnTC0C9MOavfyqauuZT\nH2Oh/tVZX8SaC+5AdVYVAOOWtfy/h5teFbuRPlOzslFfesPzJukZwRNVmEZedPFrbiquzysmXApA\nbXN7NkGaugFsxxtUQjp/p5FP3Rv0xflEo+Z3NWWDNSHRek8L8RpF7AuVykMFqC9BNMfdTFOPHmtZ\n+YVYVn4hjvgO4dF3H1f/bvCyVbrLcbzvhK7NqaGmnqRLm7H53ThPHYg0XPH3JzWviTHX0CpZEQxG\ny5my9LbY6HdZlHFdzUpcUrFMW7RurL1Wd2ymoUR7mCfX1PUV5fy6QBqzceuEekSAO2WnZv480wAc\no7K/DD5+IVXz+3DMjHMjvdlHAvZ8md0Pw8p/OgGovw7subBLtoTCJM6nPsbmVqfFibrcaO1w/cbQ\nKE99ZIT6mT5//MZRgGC6pvEFopJq6mcQKJcMfe335MefmjsZjyx5cEQzH0YC0tQNMOt8ZGx+j9fK\n2ELIfJks4ETzqRvsMkVxaEL9rhmrceWEFZHCB5EAIjNN3SRdKPr3+HPdNWM1puZMxicnXh43Lx59\n8Rm9774utxYzCqbFfccsTx2IFrQIp+hnBVTNhd9Y8Y07WPQ7+7coiHBanAmjlSXOfQKYX1celtLm\nDfngD/nNze8mQUEOTahH3TFnKjyEBELdlUK1N8C4DOl4kcz8bhSDou/zrp8ne2ZiNcr4Y8SmtI3v\ndTAtPmOQXTO04+q/d+aaOt8BzXw9m5AdTTVL6lMXR1GoC/y6lJpodFszUlbAxgrS1A0wqtLE/9sb\n8mF3+z64LE4MBj0odObrvh998NSbzSrJWQ3M74zYByOZyXdmwXTMLJiuO56Zpm700BmVh+TJsWfj\n3ll3amlJsZ+LNq0wDnoTBAFrLrjDcDxm3wGiQt3DdaJLhiTo+8ezPNoMiwvZtmiEezIt4MbJ1+oK\n2FgSlA6NhZ2D1Sk3i2g3K/EZa353SPaUAiUTkUhTV1Pp1JS9RJuHMwmUG2mS+jkNdBS24WXpgzzs\nnrkS+NMBvvjM+JjfY+HfQ/6eJEppS4VYZeNM86/tsg159lx0eDsTRqvz9RWSa+rRzKKRRqsImMCq\ncC5AQt0Aq05T57uvqZdrX+cBrUAMAFxcsVT3fc1EFPk8M89Eo9/jHxi+IAyAuEjoRLDjmWmURsLe\naU0s1JP9jaV5mWnqiTALlAOiEfCemOuRCL7MrSiIWFQ6H0d6juH2+s/CKlvjUhLNuKhise7noSyO\nTAtk6TtfoW6/AAAgAElEQVRmBWWSm98dup/PBHYui8l9ybA4VaGeaqDcOAv1ZAutsU893k3EYPM2\n6qnAE/WpR6sAjif8Zk8yCAQ06m6YCrFFsUZCsFW6y9Dh7dRZN2PhhXqyfuqjqamzglfnskAHSKgb\nwr+0sVXOLKKsy1teXr4YswsbdN+P7STEijSwoDFZlHFV9WW6fEmWS8tINc1HPU/iACIjrT+Z+Z1h\nZFUA+J7RxtHvidCZ32O+w/KFUzF582Pk03lumbJK93f2t6FGb5s1WjGCnaM3iabOLxi8id4RI9RT\n6R6XDDGBpg6owWFtno6EQkoXKJekg9Zok+z5Mtq8aqmXBvc+an5PXBktPvr97EphYrCN4HA3X/yz\nOVJV0ird5fiwbVfCz+Q7o1Uik72jUYVp5IU6O36yHgJnOyTUDeAbFcQuzlbJqgn1uxs+ryvZyojd\nTV5VfRlm5NfpgpNWVl+m+w6rwT0con2Q9YKQpUcZNSTRC3XzFyRW4NdkTcChnqNcIxBzrduMaAOa\n+F3xcIJORE5TN4qaT1VTj2Ve0QVo7D6MpWUXJv2sJEqQBCmqqacQ/c4XPNHM76Mg1M0WQFZFLdFC\nqi8+M74aajLtLMPiggBBl8kgGQR0Mmya+T3xM8fe52g/gfG9DmZk27Jww+SrMTm7Zljf56/vSAl1\ns0IvPLzlIWTQgY9nqK1Xh4osSrp0z3MREuoGzCyYjmc+/hOA+MXAKloxANXPzJdN5ZFj/D6lGcW6\nLmVGVGaWY1vrDkiCZOobN0PUzO/670mihGAoZFg8hTctJzI3xfp172r4HPZ1HMCsiHWCX+iHErEP\nGG8ChrOYiIKQMPLXlqJPPRaLZMHqaZ9O+fNWyar5XY3qvqtjjV4jfgNTmlECSZBQ6ioCYG6+Hwos\nb9usrShL40okrM8kT32kSbaQy6KMX178iN48rWnq8XNkz4xR6WHdcWOu33i7IRJxSYwrcCjwm79E\nvRCGQipCHVAzUP554j2tl4IZ7F02s4SdKWoLXxLqaYfT4kChMx+tg+04Pdiq+xtvejNrPcg+MxTN\n8JKKpShzlaDT14X/+vjPQxqvJJpo6qIMX8iftHhKMp+625KBSTkTAajaEF/EgrUjFCGkHNilpfYZ\naNXDjbhlL7mRqZkVhhlt87GNE+qpRL/zUdezCmdget730ePvBTAyXb+Smd/Z+VPxqQsQhp0qNVLM\nKmzA346+jttm3WD6mdhnkG0EjHy17LlIlKMOxFs6bGeBpi4JkmEr5jNhNMzvzDqZw1UwNOKi8sW4\nqHxxws8AagDv56bdjIlZE0ZieHHIogxFSd31eTZCQt2EO6bfhl9/9FRcKUTe9GZWL7rAkY8bJ1+L\nurxaw78bIQoi6vJq8VHb7iGPle/FzsMWYbNIepY/n0wDenjJdxIKbKtoGVIKGrMSGAkJsypWiQgp\n4YSFN1KNfj9TjKLZY+FNnLGuBotkQZ49FysnrDB06wyV1IV68pQ2i0Gp2bHGbc3AQ0u+g4ICN9ra\n+pJ/AdHnIbbwDACUuUshQEBVpHa8GbEBk2eDT/2ny38w4gFd+kqXI1fP/KfL/x3iCGq/84tnj9ix\nYil0FmjleM9VSKibUJZRgoeWfDvu90z7tkt204IVgiDERVKninMYvlTJxPw+s2A6/nlyk2nLQato\nhTfkTbo4JFvMLaJlSC4DOUE+bbm7FDMLpuOCSLpeKoTCoYR1r4frUx8qfDnaVKLfjfKjBUHAVVxt\ngDOBBfyYRfGzuIhELTTZd8fb9D5ctOh3g/HX503Bzy96KGnQVaz5fbxjCwDzjdqZIAqiVqZ5JPvD\nnw2WjVRZM/P28R7CGUNCfYgwATESgUxGDKeKk1mg3KrJV2N2YYOuoxCPTbLAG/IOKdLcCLX/fOqf\n13zqBguTKIi4a8bqIZ3fLtt0BWZiGW70+1DhNV4z36Dep57Y7HumsOfCTADMLpoJu2zHtLwppsdg\nmvrZIMiGQ9T8bnzvU4mijo0/Ga3I67MBSRARVEJnXeexsSId7i0J9SGiLXKjtPscTgCIWZ66RZS1\nnsVG8PXKz4QlpQt19fCTkShQbig8uOB+HOg+jAp3GRq7jwAwfinZNR198zsz88sodBYYfkZKoqmP\nJMnM76IgYnp+XcJjaD3bxzmdbbhII2Bp4PO+R2szf7YgihIQCo2YT50Ye87NN3UcYRXHRsukNJxF\ng0XWT8mZNKTvRRvUnJlQv3zCxUP6PN9v/kwochWiKFLulZlXjTYKRc4CLCyZiwWj6IsDorELJa4i\nU5cG//vRFhDM/D6UynixsA3Y2RzxnYhExWdShdfUi5zm5YXTAbbpHKnod2LsIaE+RJhWO1pC3W3N\nwNUTr8AEEz+4EbMLZ8IiWjA5EqGeKsyk6gulrmWPBMynbpZqNRwSRb9LooTb6m4asXOZcWrgNACg\nxGWevsgL9dEOPBOTmN9TgX33bOsZnSqJUtpShb9+JZGUw3SFXS/S1M9dSKgPEb8m1EcnTxIArphw\nyZA+LwgCGgrqh3wetjFJVMJxNIj61EfOf2VLEP0+VrCiRJVu82jqsSxBadR6dahomvo5FOzEk6j4\nTMrH4Kw/6S7URU1TJ6F+rnJuF7kdB3whH4BzK6LTjPEW6sl6pg+FaPT7+O1Tvzzzdswrmo1FpfNN\nPzOWSWHR6PczMb+zQLlzc//PzMln4lM/PzV1Mr+fq5ybb+o4wkymZ1sP3eHAFrqxFuryCAXK8STK\nUx8rJmVXY5JJpgHDG9kUjgXJAuVSwR5p7JGsPvrZykj71NNfqEc0dTK/n7OQUB8id05fjQ2HX8En\nJ35ivIdyxpRGfL9FCXqLjwY2yQoBwohaOzSf+jhq6qkwlO5zZ4qW0nYG18Qh2/Gvs7+EfEfeSA1r\nTNHM72fwrPEuk5Gu4na2IYpqrvpIlCkmxoezewU8CynNKMbdDV8Y72GMCJdULoNVsmJu0QVjel67\nbMcX6m9BaZI6z0Mhx5aFDIsLZUlq7I833qCqqY9FfWkhSfGZVKnOqhqJ4YwLWqDcCAT65dtzx72q\n3mjjkB3ItLrP+faj5zMk1M9jLKIcVwZ3rJgzwhsJu2zHQ4u/fdYXj1hathB7O/bj+kkrR/1c0giY\n3891bCNULOr/W/7DtBfoALC67tNnnOJKjC/n79tOpB1nu0AHgCxbJh6Yd++YnGskUtrOdWYVzkBI\nCaM+b+oZHedsqPc+FhSPsSuOGHnO37edINKckYh+P9dxyA4sLVs43sMgiDGDHCcEkaZo0e/j3DKV\nIIixg4Q6QaQpIsinThDnGyTUCSJNifYSPz/8wQRBkE+dINKWuUUXYDDoQaW7bLyHQhDEGEFCnSDS\nlLKMEtwyZdV4D4MgiDGEzO8EQRAEkSaQUCcIgiCINGFUze8PPfQQduzYAUEQsG7dOjQ0NGh/++Mf\n/4gXXngBoihi6tSpWL9+PQRBSPgdgiAIgiDMGTWhvnXrVhw7dgzPP/88Dh06hHXr1uH5558HAHg8\nHrz88st49tlnYbFYsHr1anz44YcIBoOm3yEIgiAIIjGjZn7ftGkTVqxYAQCoqalBT08P+vv7AQAO\nhwO/+93vYLFY4PF40N/fj4KCgoTfIQiCIAgiMaMm1Nvb25GTk6P9nJubi7a2Nt1nnnjiCVx22WW4\n4oorUFFRkdJ3CIIgCIIwZsxS2hRFifvdXXfdhdWrV+POO+/EnDlzUvpOLDk5TsjyyJbBLChI757J\nDJpn+nG+zJXmmV7QPEeOURPqhYWFaG9v135ubW1FQUEBAKC7uxsHDx7EvHnzYLfbsWzZMmzfvj3h\nd8zo6hoc0XEXFLjR1tY3osc8G6F5ph/ny1xpnukFzXN4xzJj1MzvixcvxsaNGwEAe/bsQWFhITIy\nMgAAwWAQa9euxcDAAABg165dqK6uTvgdgiAIgiASM2qa+uzZs1FfX4+bb74ZgiBg/fr1ePHFF+F2\nu3HZZZfhy1/+MlavXg1ZljFlyhRceumlEAQh7jsEQRAEQaSGoKTiuD6LGWmzDZmC0ovzZZ7A+TNX\nmmd6QfMc3rHMoIpyBEEQBJEmkFAnCIIgiDSBhDpBEARBpAkk1AmCIAgiTSChThAEQRBpAgl1giAI\ngkgTSKgTBEEQRJpAQp0gCIIg0gQS6gRBEASRJpBQJwiCIIg0gYQ6QRAEQaQJJNQJgiAIIk0goU4Q\nBEEQaQIJdYIgCIJIE0ioEwRBEESaQEKdIAiCINIEEuoEQRAEkSaQUCcIgiCINIGEOkEQBEGkCSTU\nCYIgCCJNIKFOEARBEGkCCXWCIAiCSBNIqBMEQRBEmkBCnSAIgiDSBBLqBEEQBJEmkFAnCIIgiDSB\nhDpBEARBpAkk1AmCIAgiTSChThAEQRBpAgl1giAIgkgTSKgTBEEQRJpAQp0gCIIg0gQS6gRBEASR\nJpBQJwiCIIg0gYQ6QRAEQaQJJNQJgiAIIk0goU4QBEEQaQIJdYIgCIJIE0ioEwRBEESaQEKdIAiC\nINIEEuoEQRAEkSaQUCcIgiCINIGEOkEQBEGkCfJoHvyhhx7Cjh07IAgC1q1bh4aGBu1vmzdvxk9/\n+lOIoojq6mr88Ic/xPvvv4/77rsPkydPBgDU1tbiO9/5zmgOkSAIgiDShlET6lu3bsWxY8fw/PPP\n49ChQ1i3bh2ef/557e8PPvggfv/736O4uBhf+cpX8Pbbb8Nut2P+/Pn4xS9+MVrDIgiCIIi0ZdTM\n75s2bcKKFSsAADU1Nejp6UF/f7/29xdffBHFxcUAgNzcXHR1dY3WUAiCIAjivGDUNPX29nbU19dr\nP+fm5qKtrQ0ZGRkAoP2/tbUV7777Lu677z4cOHAAjY2NuPvuu9HT04M1a9Zg8eLFCc+Tk+OELEsj\nOvaCAveIHu9sheaZfpwvc6V5phc0z5FjVH3qPIqixP2uo6MDd999N9avX4+cnBxMmDABa9aswZVX\nXommpiasXr0ar776KqxWq+lxu7oGR3ScBQVutLX1Deu74bCCU+0DKC/MGNExjQZnMs9zifNlnsD5\nM1eaZ3pB8xzescwYNaFeWFiI9vZ27efW1lYUFBRoP/f39+POO+/EV7/6VSxZsgQAUFRUhJUrVwIA\nKisrkZ+fj5aWFlRUVIzWMHVs2duCfU37MaU8C/OmFuK1D07gZHs/Fk0vgcsuo6m1H/XVucjOsBl+\n/8//OIRXthzH12++APUTcsdkzARBEATBGDWhvnjxYvzyl7/EzTffjD179qCwsFAzuQPAI488gs99\n7nNYtmyZ9rsNGzagra0Nt99+O9ra2tDR0YGioqLRGmIcR5p78c8PT+KfH57E3iOdeHf3aQDA/uPd\n8AfD6B3wwyKLuPuaegRCYUwuz0aOWxXwHT1e/P2DEwCAd3c2Iz/LjrZuD060DsBmEbGwvhgOm4wT\nrf3Y39SNaRNyUJLnihuDzx/Cs68dQF1lDi6cXqz9XlFUK0BRrhOylHoohC8Qwsatx7FkRgkyHBYI\nggCLTJmMBEEQ6YigGNnFR4hHH30UH3zwAQRBwPr167F371643W4sWbIE8+bNw6xZs7TPfvKTn8RV\nV12F+++/H729vQgEAlizZg2WL1+e8BwjabZRFAXH2gfx/ae2aL9rqMnDzkMdAIDpE3Ox72gXQmH1\nklktIhZNL0GGQ8aWvS1o6/ZCEgUAqimev7Auu4ypVTnYtr8NAGCzSvjcFVOwoK4I/9hxCv/73lF8\n/dMX4JlXD2DfMTVo8P9+42IEgmG8t6sZ7+xqxpHmPkyfmIuv3NAAWRKxec9pvPTeUayYW4Edje1o\n6/ZgQV0Rrl48AYKgjuO//3kYL713FItnFONgUw8yM6xY+9nZKCrMJJNXmnG+zJXmmV7QPId3LDNG\nVaiPBSP9MOTlZeCOH76Ktm4vplZm419W1mHt45vhsEn40d2LsOdoJza8cwTTJuTi/Y9b0N3vBwDI\nkojlM0vhtMt46b2jcNllXDa3AoW5DrR1e/HXTcfgC4RQXuDCgmlFeOm9o/AHwqgtz8LR033wB8PI\ncdvQ1efTxvLNW2fjL+8cwd6jqpDPz7KjvceLJQ0l+JeVdfjWk5vR3BGNKbDIIgLBMBZPL4bVIqHx\nZA9augbhD4QhANom48aLazCztgilOXYA6maGbQJ4BrwB/PrFXVjSUIJF00vw0rvqWP710zNhiQQn\nKoqCxzfsgc8fwr03NEAUo8dpau1HpssKSRTQN+g3tEyMNufLggGcP3OleaYXNM/hHcuMMQuUO1cQ\nRQFLG0rx4j8PY2lDKfKzHbjvxgY4bDKcdhnzphZi3tRCAMBNl9Sg8UQPgmEFVUVuZDgs6B3wo73H\ni8vnVaCqOHrhL5xWhL3HurBwWhGsFglzpxbi+dcb8VGjGndgs0ro6vPBYZNwy6W1+M1f9+HhZ7YD\nAGZMzMPnrpgCl8OCR57Zjnd2NqOqyI3mjkHYrRIaavJw8awyFOc68ehzH2luAybI8zLt6Oj1qr8T\ngD+9eQh/evMQ7vzkNEytysH3f/s+Vswtx1UXTkB3vw/v7GyGoijo9wTx8fFuHD3dh/ZuL/7nnSMA\ngB2NHch0WfGXd46gINuBrftaAQD/+OgkLp5dDkB1Zfzw99swqSwToijg4IkefPcL81BWEB9EGAiG\n0N3vR0G2A4FgGC+8dQj11bloqMkb+RtMEASRxpCmHkNBgRvNp3twsKkbU6tyDDXYkWTv0U509fng\nD4bx9Mb9uPGiGqyYW4Gv/vIdeHxBVBZm4IHPzILTbgEAnGjrx/d/+z6CIfW2rf7EFFw0q0w7XjAU\nxgcft8IfDGN2bQFOdwwirCh45NntmFDsxqLpxThwogd7jnQiHFZQW5GNXYc7kJtpwyNfvBDf/+0H\nONEWrScgCAB7QuxWCV5/CPlZdnT0eDXNXwBgtUoQBWDNqgac7hzEax806awIAFBbkY0HbpmFV99v\nwj92nMKCukLUVeXgqZf3ob3Hi89fORX7j3dh054WFOc68cM7F8Rd/12HO/DPHafgD4QxfWIuVswp\nhyAI8PqDeOwve1BTmomrFk2AGPleQYEbra29Kd3H1q5BhBWgONeJcFjBsZY+VBW5ddaHsxnSeNIL\nmmd6Qeb3FBkNoT4eD5iiKDjc3IuJJZkQBAEHT3SjdyCAWZPz44TKlr0teHzDHgDAT9csNo3G54/9\n9w9OYEpFtmY9ONjch0d+/z74u7+wvgib97Rgfl0hnDYZb310CjdeXIPTHYOQJBErF1biFy/swom2\nfjhsMj53xRRs3NqEqZXZqCjKwJMb9uriCKqK3Th2Wr2WzFpQX52LPUc6AQCSKMBmkeALhCDLInz+\nkG7c3149FxNLM3HwRDc27WlBMBTGOzubdZ9ZNL0YF88uQ1NrP37/t/0A1DiI1Z+YgtxMO7Yf6sB/\nvrQHi2eUoK3bg+qSTHxy0QQ0dwzg3V2nceH0YuxobEdtRTZ++eedCIYUPHTXQvzxjUZs2nMayy8o\nxepPTNE2BYdO9mD7gTZcs6QaNkt8fYTOXi9e2XIcBdkOdPf5MLu2ANluK3oG/KgpzdI+d+x0Hwqy\nHXDaExvLwooCnz8Eh03/ud4BPwQBcDuj6Z60OKYXNM/0goR6iqSLUB8qx1v60O8JYNowU+cKCtzY\n+O5h/PGNRtRX5+LND08CABw2CQ/ddSEynRa093iRn2XXablvfXgSf3j9INasmoEZE/Xm8Xd2NuP1\nbSewsL4ItRXZqCjMwLonNgMA7r9lFh58agv8gTAcNgnXLZ2IP7x2EADw2ctqUVXkxov/PIRJ5Vko\nyXPhyZf2orzABZtVwuFTvdrmozDHgbuvrUeWy4af/2kHjreqVgW7Vd0c1JZnY39TN0RBQI7bprkd\neG67vBYvbz6Gzl5f3N8AIMulCmFmpVg0vVirs/BRYwc8viBuungSrlhQCUCNHbDKIgpzHPjpH3do\nmxYAKMlzIqwALZ2D+MKVU7F0Zin2HO3Efzz3EcoKXPjWbXPQ0unBhneP4OjpPtz2iSm4YFI+ACAU\nDuPXL+7G/qYuPHTXhchyWREKh/HU/+7Dln0tyHRZ8dCdC+GwyQiFw9h+qBMVeU4U5zq18weCIQx6\ng8iKbPwe+8tuHGvpx7ypBbh+6UTt3iqKgh2HOuB2WFBTFt18nI2cK+/omULzTC9IqKfI+SrUzxR+\nnoFgGGsf34SwomDNqhk6jdKIYCicclpdT78qOLMybHhlyzH86c1D+MyKybh0Tjl++eddkEQBX7p+\numYuB1Rhtvaxzejo9UIUBJQXunDt4mr0ewO4YFK+pp36AiHsaGzHn95sREevDxdMyse9N8zAu7tO\n480PT6Ct24v6iXn4xLxyvP9xK3IybPjjm4cQDIUBALNrC3C8pQ8TSzOxdV8r8jLtsMgiTncOoq4q\nB5++ZBIe37Anzo0gSwJcdgt+cMcC/PaVj7H9QBsssog5tQXYvLcFdVU5WNpQgk17WrDrcIf2PUEA\nvnXbXDy+YTfautXNRnlBBlq7BuEPhiEIgAABd10zDW9sO4GT7QMY8AYBADdfMgnz6oqwcetxvPp+\nEzIcFvR7Arh60QRcv2wiXtl8DH966xBy3DY8+Lm5yMqw4W9bjuOFtw4hrCi4/ao6FOU68dDT27Tx\n/OD2+SgryEAgGMYv/rwTe450wmGT8IPbF+BAUzcgAHNqC7SgSKPn4M//OISGiXmoG+bmUlHULBFx\nCG6u2Hc0EAxh675WzJtaCKuB9eRc5Xxci9IZEuopQkJ9eMTOc8AbgFUWTRfwkYDl2pfmu5L6uPs9\nAQx6A8jLskMSE28gmjsG8PwbjbhuaTUmFGfq/hY7zwNN3fiosR3FuU4sbSjRxrH/eBfyMtVsgBPt\nA5hZkwdBEBAOK9hztBNOm4xQWIE/EMK+4114ZfNxuOwyBrxBVJdkorljAF5/CJkuK9bdNgeF2Q40\nnujBQ8+oQvSG5RPx538chsMmw+ML4rK5Fejq82Lb/jZYLRLuumYa3A4rHn3uQ/iD6qYjw2HBhBI3\n9h3tgs0iweMLQoGaBfHNW+fg+799Hx5fEKuW1+DP/zgERVEFrcsuo6YsCzsPdSArwwqPLwibRUJV\nkRu7j3RiaUMJ3t7ZjOuXTcTViybgf94+jA3vHtWyL6wWEf6AOoYpFdn4yqfUQNFNu0+jvcejxSz8\n6c1GvLLlOIpznfj3OxdAAIYcg/LTP36E3n4/Hvz8PMPYhVA4DJ8/pMWUGN3TjVuP4/k3GrFkRgn+\n5aq6IZ3/bOZ8XYvSlbES6tJ3v/vd747IWcaJwUH/iB7P5bKN+DHPRmLnaZWlpMLzTBEEAZkua0oL\nv9UiweWwpKTBuZ1WLKwvNowtiJ1nXpYd9dW5qCp268aRn+WA026B025Bca5T+5sgCCjKcSI30468\nLDsKc5yoKc3E6c5BHGtRKwzef/MFWDCtCNMn5uEzKyYjM2JJyHHbcLpzELUV2Vi1vAbb9rehs8+H\ngmw77rl+Bi6sL8alc8qxcmElKgrdyM20IzvDhg8PtqO8IAM/vHMBlswoxdHmPjS19SM7w4qLZpXh\nMysmIz/LgeI8J7bsVa0BiqJg7eq5yM+04+jpPhxv6Uem04J/u3kWslxWfNTYgdZuD6qK3Ljzmml4\ndWsT9h7twp4jndi8twU5bhu+vXou3tnZDK8/hIXTilCQ7cDuI53o6PXiVPsA/uu1g/j4eDe6+nw4\ndLIXr2w5DkDdgG3b34a/bjkGAQL2HO3Eb1/5GCX5LhRmO/DBx63Yf7wL1SX6DdeJ1n48/0Yjegb8\n6B0M4M0PT2JiWRZcEQGuKAp+9eJu/H7jAWS7ragqcuvu6Ym2fjSe7MG7O5vR0evD8dZ++IMhTCzJ\nhEUW0dnrxd8/aMLbO5sxqSwLuw53xMUheHxBvLatCblue9L4htHko4Pt+M9X9uGCSflarMb5uhal\nKyM5T5fLPI6KNPUYaNeYXozWPBVFQVNrP0rzXSm7IrYfaMNTL+/DvatmYGpVjunnPj7WhfLCDGQ4\nVOF2vKUPL286huuXTdT5ywFgz9FOvL3jFK5YUIl5M8rQ1taHcFjBgDcAu1XWahe8vOkoLLKIJQ2l\nyHJZ8aNnt2N/UzcAYEKxG59ZUYtJ5VnYeagdR0/34aoLqyBAwA+f3oYjzb0AVAuBRRY1d0Reph0r\nL6zC0xv3G85jamU27ry6Hmsf34RAMIy7r62HRRaRnWFDVbEbf3jtIF7fdkL3nYrCDKz97Gy8suU4\nunq9WnomAFw8uwwrF1ShPxDG6dY+/H7jx/D41ODKolwnvL4gegb8mD4xF8saSvGbv+6DNxJ8OW9q\nId7/uBX5WXZ8//b5sFtlhBUFv/rzLnzU2I6KwgzcfW09sjNscUGJZrT3ePDaBycwZ0oBJpdnp/Qd\nM378X9vx8fFufPqSSfjEfDVWY6jPbjAUxl/eOYIZE/NQW3Fm4xlLaC0a3rHMIKEeAz1g6cX5Mk9g\naHPdf7wLr2w5jmuXVMdp0DxNrf34we/eR0G2A/ffPAsWWcSHB9oQVhTMryuC3Srhd3/bD6ss4vJ5\nFdh7rAthRcFbH57E8Yglgw8cZJTkOdHW7YHLbsGUymxsP9COKZXZEb++6qIA1PiFr9zQgD++2YgT\nbQNxx2HBjJ+6qAaXzi7Hz/60A/ubuiFLAmRJxCcXTcALbx3SfUcVelnYuq8VTa39WqomoFZ+XLmw\nCpkuK9p7vHhnZzOcdhlLG0pwyZxyiIKAA03dePEfh3DoVC9CYQV2q4S5UwqR4bTgUxfVQBQE+Pwh\nvLOrGfXVuXEbMUVR4PWHYLdKEAQBA94A7vv5OwgrCioKM7D+C/Ow+3AH3JkOFLqtmuUiGf/19wN4\nbdsJ1JRl4lu3zU36+abWfhxp7sWSGSVxro9jp/vw6vvHccuKWm1zmQoeXxDHW/owpdJ80xrL+fKO\nklBPERLqw4PmmX6M1ly7+nzIcMhDirfYtOc0nnxpLwCgLF+tovje7tOYO7UALZ0evP9xK3IzbVj9\niQZ+SxMAABELSURBVKmor86B1x+CzSLh2b8fwD8+OoWa0kxcOrccWS4b6qpy4POH8OaHJ3GgqRs1\nFdnw+4IoyXfiVPsgNm49jh/cPh+FOU4caOrGI8+qRZu+eE09Fkwrws/+tAM7D3XAahFRmufC0Uia\npSAA8+uKcONFNXjhLTWAcufhDi2eAFCzQYIhBYFgGJPKsjClMhsbtzYhFA6jstCNqVXqz4y5Uwrg\ntFuw71gn2rq9yHBYcPXiCdh3tAt9Hj8umVWO5944iL7BAK66sAqLphfjb1uO4+2dzZBEAaGwgpI8\np2YNschqpcobLqqBzSIhGAojEAzjD68dhNMu46aLJ0EUBWzeexpPbFCvtwDgp/cuQZZLdTMMeFX3\nyPGWPiyaXoKPGtvwzx3N6B1QTcH/srIOi2cU69xRzHLwifkV+PQlkxPe62AoDFEUIAoCfr9xP976\n8CTWfnZ2ytaC8+UdJaGeIiTUhwfNM/04m+YaCIbw21f2IzvDisvnV2oChtHvCcBulQxdF02t/SjK\ncZhGsvPzVBRV4PKf/a/XDkAUBHz6kkkQBAH/3HEKv33lY8yvK8RdV9fj4Ilu9A0GMLk8S0v1Y3T2\netF4sketmSAAc6cUwh8M45lX92t9Gxw2CfdcNwP11WrE/76jnQiEwnju9Uac7lSFsSgImDkpDx8d\nbEfsAitLIpw2CX2DAYgRQQ4AN186GS+81YhQSFFN+lW5+PuWY2jv8UbSGRX0ewKa8AeA+XWFWDKj\nBL/6710QBAEXTivCWx+dwsL6IlQWunGqfQBb97VowZfVJZk42dYPQRSQ67ahuWMQ5QUueP0hzKsr\nRE6GDafaB/DWR6cAqJuKh+5ciPd2N6O8MAOD3iB2NLbjEwsqUVOaBa8/iHVPbEZFoRtfuq4eX//1\ne/D4grhkdhkml2cjy2XFpPIs3X0+dKoHG945ipULKzGlMgcFBW58uKcZOw93YNbkfPzyz7tww/KJ\nmDOlUPuOoijoHfDH3a9EHG/pw/6mblw6u3zMC0gpioL9x7tRU5apbYZJqKcICfXhQfNMP86XuQ51\nnr5ACH9+6xAunl12Rv0Hjp7uRXefH1Mqsw397h5fEIdO9SAnw4b8LAdsVglHT/fidMcginKdeOvD\nk3hv92l88RrVd//ws9vgtMlYOK0YDruE65dORFhRoCiq4C8ocONUcw+ee+MgNu9pQZbLiuwMK/oG\nA5gxMQ+NJ3vQeLIHgKqdf+m66agsysDaxzfrxlWY7cDyC0qx9eNWrRjUlQsrceNFk/DIM9tw4ESP\n4Xzn1xVi675WzT3BBDNLCb14tlqamtWb4PtL8JUoC3McuPWyWgRDCja8ewTHW/oRVhTkZdrx73cu\nQH5+Bu750eto6/Zq/S0yHBZ84zOz8Nq2E/AFQijKceIv7xzBFfMr8amLVRfHoZM96B30Y+akfIiC\ngK4+H558aQ+yM2xYMbcCT7y0B61dHlw6pxy3XDoZggC89sEJBEJhzJlSgKIcJ0LhMI6e7kN1SSZE\nQcDOQ+0IhRXMmlyAM2Hb/lb8+r93Y86UAtxz3XQIgkBCPVVIqA8Pmmf6cb7M9Vye56A3qEXZHzvd\nh0yXVWvfHEuyeQaCYfzve0fx8fEu3LC8RjN3//5vH6PfE8CCaUVwO1VNWRQELfUPANbdOgeTyrM0\nK8b0ibloau1HlsuKZTNL4fEFceWCKjz3+kG8tu0EcjNtWrGm65ZW4/19rTjZrsY4iIKA8gKXVgiq\nuiQTR5p7kZVhxcyaPLyz8zTCigJRECAIajBkXqYd2w60YcXcclisMl5576g2L94SYURhtgOVxW5s\n298KRVGPN3dKAbYfaMexlqhrRVGi/8/KsKKuKgeb97QAUGM1VsytwNHmXnx8vBtXL5oAiyzixX8e\nBgDUVeWgLN+Fa5ZUI8NhQVefD3arpG3mwmEFvkAIL7x1CDVlmVg4rVhnDWDuCwBa8CMJ9RQhoT48\naJ7px/kyV5rn8Gjv9uCBxzYhw2HBz+5dAlFU6zB8sL8VM2vyIYqAJIm6NFJFUfDx8W5UFWXg7Z3N\n8AfDuHrRBASCIfz//7MHHzW2Y9bkfHzpuunYuq8FvkAYVUVuPLFhDz535VTUVeXg2Ok+/Pq/d6Fv\nMICv3TQTtRXZ8PqD+N5/vo+WLg8AoDTfhcnlWfjHR6dw86WT0dblQfeAD5PKsvDm9pNo7fbgs5fV\n4nhLHzbvbUEg0tWypjQT2w60aVaBxTOKUT8hF0++tBeCIOAbn52FTXtasGnPafj8IWQ6Lbh26URs\neOcIeiIxBaIgIBw5QF6mDZkum5bxUZLnxK2X1eLnL+yEJAlYUFeEk+0DONE2gAvri/DGdrUSZ0G2\nHfOmFmHGxFxkOCz4zlNbUVXsRlevF72DAdz2iSm46fKpJNRTgYT68KB5ph/ny1xpnsPnr5uPIddt\nw8L64jM+ViAYxju7mjFrcn7S/hOBYAhef0hXI6Cz14ufPPcRXA4L7l01A06bhB2NHZhdW6DTejt7\nvTjS3IfZtfkQBAGBYAgdvT7kum2wWiT0Dvhx6FQPnDYZkyuyIQoC9hzphD8Y0szoXX0+vPZBE+bV\nFWJCcSYGvUE0nlTLSfsCYfz6v3ehvCADX7tppubi+OvmY3j1/WgQpNMmYzCSlcEza3J+5HzRolH9\nngDWrJqBkjwnfvanHagpy8K3/mUhCfVUIKE+PGie6cf5MleaZ/oQCodRWJCJjo7+5B8eRU609qMg\nx6Fr0qQoCp57vRF//6AJF0zKxz3XT8eJtn5IoojH/rIbzR2DmFNbgC+vmgGPL4iDJ3rw7N/3o63b\ni2uXVOOaxRMgCAJC4bBaxKowk/qpEwRBEOmLJIpnRWvj8sKMuN8JgoBPXzoJDZPyMKlUjeBnZahX\nLZuIx/6yByvmlgMAHDYZDTV5mFw+H6c7B3W1H0a7UmcsJNQJgiAIwgBREFBv0KxozpRCPHZ/fpzA\ndtjkhMWcxoKx3UIQBEEQRBow1hp4qpydoyIIgiAIYsiQUCcIgiCINIGEOkEQBEGkCSTUCYIgCCJN\nIKFOEARBEGkCCXWCIAiCSBNIqBMEQRBEmkBCnSAIgiDSBBLqBEEQBJEmkFAnCIIgiDSBhDpBEARB\npAnnfOtVgiAIgiBUSFMnCIIgiDSBhDpBEARBpAkk1AmCIAgiTSChThAEQRBpAgl1giAIgkgTSKgT\nBEEQRJogj/cAziYeeugh7NixA4IgYN26dWhoaBjvIY0IW7ZswX333YfJkycDAGpra3HHHXfggQce\nQCgUQkFBAX7yk5/AarWO80iHz4EDB3DPPffg85//PG699VY0Nzcbzm/Dhg343e9+B1EUcdNNN+HG\nG28c76EPidh5rl27Fnv27EF2djYA4Pbbb8dFF110zs/zxz/+MbZt24ZgMIgvfvGLmDFjRlrez9h5\nvvHGG2l3Pz0eD9auXYuOjg74fD7cc889mDp1atrdT6N5bty4cezvp0IoiqIoW7ZsUe666y5FURSl\nsbFRuemmm8Z5RCPH5s2blXvvvVf3u7Vr1yp//etfFUVRlP/4j/9Qnn322fEY2ogwMDCg3Hrrrcq3\nv/1t5emnn1YUxXh+AwMDyuWXX6709vYqHo9Hueqqq5Surq7xHPqQMJrnN77xDeWNN96I+9y5PM9N\nmzYpd9xxh6IoitLZ2aksX748Le+n0TzT8X6+/PLLyhNPPKEoiqKcOHFCufzyy9PyfhrNczzuJ5nf\nI2zatAkrVqwAANTU1KCnpwf9/f3jPKrRY8uWLbj00ksBABdffDE2bdo0ziMaPlarFU8++SQKCwu1\n3xnNb8eOHZgxYwbcbjfsdjtmz56N7du3j9ewh4zRPI041+c5b948/PznPwcAZGZmwuPxpOX9NJpn\nKBSK+9y5Ps+VK1fizjvvBAA0NzejqKgoLe+n0TyNGO15klCP0N7ejpycHO3n3NxctLW1jeOIRpbG\nxkbcfffduOWWW/Duu+/C4/Fo5va8vLxzeq6yLMNut+t+ZzS/9vZ25Obmap851+6x0TwB4JlnnsHq\n1avxta99DZ2dnef8PCVJgtPpBAC88MILWLZsWVreT6N5SpKUdveTcfPNN+P+++/HunXr0vJ+Mvh5\nAmP/fpJP3QQljarnTpgwAWvWrMGVV16JpqYmrF69WqcRpNNcjTCbXzrM+9prr0V2djbq6urwxBNP\n4Fe/+hVmzZql+8y5Os/XXnsNL7zwAn7zm9/g8ssv136fbveTn+fu3bvT9n4+99xz2LdvH/7t3/5N\nN4d0u5/8PNetWzfm95M09QiFhYVob2/Xfm5tbUVBQcE4jmjkKCoqwsqVKyEIAiorK5Gfn4+enh54\nvV4AQEtLS1KT7rmG0+mMm5/RPT7X533hhReirq4OAHDJJZfgwIEDaTHPt99+G4899hiefPJJuN3u\ntL2fsfNMx/u5e/duNDc3AwDq6uoQCoXgcrnS7n4azbO2tnbM7ycJ9QiLFy/Gxo0bAQB79uxBYWEh\nMjIyxnlUI8OGDRvw1FNPAQDa2trQ0dGBVatWafN99dVXsXTp0vEc4oizaNGiuPnNnDkTu3btQm9v\nLwYGBrB9+3bMnTt3nEd6Ztx7771oamoCoMYRTJ48+ZyfZ19fH3784x/j8ccf16KG0/F+Gs0zHe/n\nBx98gN/85jcAVDfn4OBgWt5Po3k++OCDY34/qUsbx6OPPooPPvgAgiBg/fr1mDp16ngPaUTo7+/H\n/fffj97eXgQCAaxZswZ1dXX4xje+AZ/Ph9LSUjz88MOwWCzjPdRhsXv3bvzoRz/CyZMnIcsyioqK\n8Oijj2Lt2rVx8/vb3/6Gp556CoIg4NZbb8U111wz3sNPGaN53nrrrXjiiSfgcDjgdDrx8MMPIy8v\n75ye5/PPP49f/vKXqK6u1n73yCOP4Nvf/nZa3U+jea5atQrPPPNMWt1Pr9eLb33rW2hubobX68Wa\nNWswffp0w/Un3ebpdDrxk5/8ZEzvJwl1giAIgkgTyPxOEARBEGkCCXWCIAiCSBNIqBMEQRBEmkBC\nnSAIgiDSBBLqBEEQBJEmUEU5gjiPOXHiBK644oq4KlfLly/HHXfcccbH37JlC372s5/hD3/4wxkf\niyCI5JBQJ4jznNzcXDz99NPjPQyCIEYAEuoEQRgybdo03HPPPdiyZQsGBgbwyCOPoLa2Fjt27MAj\njzwCWZYhCAIefPBBTJo0CUePHsV3vvMdhMNh2Gw2PPzwwwCAcDiM9evXY9++fbBarXj88cfhcrnG\neXYEkZ6QT50gCENCoRAmT56Mp59+Grfccgt+8YtfAAAeeOABfPOb38TTTz+NL3zhC/je974HAFi/\nfj1uv/12PPvss7jhhhvwyiuvAAD+X3t3iKs4FMVh/CutxUGCQGHqCEFUsQPWwBIwJMgmKEINO8BC\nwgLYACRgEJCAw+AhoSsYgZmZvDcZMTMv034/eWvuUf+ec5N7b7cbw+GQ9XpNFEXsdrsvq0kqOjt1\nqeSezyeDweCHtfF4DECv1wOg2+2yWCzI85zH40G73QYgSRJGoxEA5/OZJEkA6Pf7wPtMvdVqUavV\nAGg0GuR5/veLkkrKUJdK7ldn6t/fIh0EAUEQfPod3qP2n4Vh+Ad2Kel3OH6X9KnD4QDA8XgkjmOq\n1Sr1ep3T6QTAfr+n0+kA725+u90CsNlsmM/nX7NpqcTs1KWS+2j83mw2Abher6xWK16vF1mWAZBl\nGbPZjDAMqVQqTCYTANI0JU1TlsslURQxnU653+//tBap7HylTdKH4jjmcrkQRf77S/8Lx++SJBWE\nnbokSQVhpy5JUkEY6pIkFYShLklSQRjqkiQVhKEuSVJBGOqSJBXEN7bfnkLLX6fVAAAAAElFTkSu\nQmCC\n",
            "text/plain": [
              "<matplotlib.figure.Figure at 0x7f4d100e52b0>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "metadata": {
        "id": "UyMkJPJGP_cW",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Evaluate the model on the test set"
      ]
    },
    {
      "metadata": {
        "id": "fwamKizxeu62",
        "colab_type": "code",
        "outputId": "5746ce90-472e-4e01-94cb-754a7ffc60d2",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 68
        }
      },
      "cell_type": "code",
      "source": [
        "# evaluate the model on the test set\n",
        "\n",
        "x_test = df_ADL_Falls_test[x_columns]\n",
        "y = df_ADL_Falls_test.loc[:,['Fall_ADL_BIN']]\n",
        "y_test = np.array(y)\n",
        "\n",
        "# Vamos a evaluar el \"accuracy\" del modelo\n",
        "test_loss, test_acc = my_model_NN.evaluate(x_test, y_test)\n",
        "print('El \"accuracy\" del modelo (en el conjunto de Test) es:', test_acc)\n",
        "print('El \"loss\" del modelo (en el conjunto de Test) es:', test_loss)"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "14883/14883 [==============================] - 1s 91us/step\n",
            "El \"accuracy\" del modelo (en el conjunto de Test) es: 0.8523147214943224\n",
            "El \"loss\" del modelo (en el conjunto de Test) es: 0.32740583687757124\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "270xdeakeu7Y",
        "colab_type": "code",
        "outputId": "5ca2a73c-26b4-4457-fcd5-19048da2fcdc",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 68
        }
      },
      "cell_type": "code",
      "source": [
        "# HACEMOS PREDICCIONES EN TEST Y VEMOS EL ACCURACY:\n",
        "\n",
        "y_pred = my_model_NN.predict(x_test)\n",
        "Y_pred = np.argmax(y_pred, axis=1)\n",
        "\n",
        "#cm = tf.confusion_matrix(y_test, Y_pred,2)\n",
        "cm = tf.confusion_matrix(y_test, Y_pred,2)\n",
        "\n",
        "import tensorflow as tf\n",
        "\n",
        "#initialize the variable\n",
        "init_op = tf.initialize_all_variables()\n",
        "\n",
        "print(\"Confusion matrix (en Test):\")\n",
        "#run the graph\n",
        "with tf.Session() as sess:\n",
        "    sess.run(init_op) #execute init_op\n",
        "    #print the confussion matrix\n",
        "    #print (sess.run(cm))\n",
        "    array = cm.eval(session=sess)\n",
        "    print(array)\n"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Confusion matrix (en Test):\n",
            "[[6193 1269]\n",
            " [ 929 6492]]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "IrKSf0jYeu7u",
        "colab_type": "code",
        "outputId": "8841e8f7-749a-4992-c1c8-eac3e290be3a",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 119
        }
      },
      "cell_type": "code",
      "source": [
        "# calculations of measurements of performance\n",
        "\n",
        "n_TP = array[0,0]  \n",
        "n_FP = array[1,0] \n",
        "n_TN = array[1,1]\n",
        "n_FN = array[0,1]\n",
        "\n",
        "print(\"-----------------------------\")\n",
        "print(\"-----------------------------\")\n",
        "\n",
        "# SENSITIVITY = TP / (TP + FN)\n",
        "NN_Sensitivity = n_TP / (n_TP + n_FN)\n",
        "print(\"NN_Sensitivity = \"+ str(NN_Sensitivity))\n",
        "\n",
        "# SPECIFICITY = TN / (FP + TN)\n",
        "NN_Specificity = n_TN / (n_FP + n_TN)\n",
        "print(\"NN_Specificity = \"+ str(NN_Specificity))\n",
        "\n",
        "# Precision = TP / (TP + FP)\n",
        "NN_Precision = n_TP / (n_TP + n_FP)\n",
        "print(\"NN_Precision = \"+ str(NN_Precision))\n",
        "\n",
        "# Accuracy = (TP + TN) / (TP + FP + TN + FN)\n",
        "NN_Accuracy = (n_TP + n_TN) / (n_TP + n_FP + n_TN + n_FN)\n",
        "print(\"NN_Accuracy = \"+ str(NN_Accuracy))"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "-----------------------------\n",
            "-----------------------------\n",
            "NN_Sensitivity = 0.8299383543285982\n",
            "NN_Specificity = 0.8748147149979787\n",
            "NN_Precision = 0.8695591126088178\n",
            "NN_Accuracy = 0.8523147214943224\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "JZU5gNBXQKhR",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Evaluate the model on the TRAINING set"
      ]
    },
    {
      "metadata": {
        "id": "O5XxqGFweu9K",
        "colab_type": "code",
        "outputId": "6e49dee0-2f79-44ec-b08a-8e349d1d9929",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 68
        }
      },
      "cell_type": "code",
      "source": [
        "# Evaluate the model on the TRAINING set:\n",
        "\n",
        "y_pred = my_model_NN.predict(x_train)\n",
        "Y_pred = np.argmax(y_pred, axis=1)\n",
        "\n",
        "#cm = tf.confusion_matrix(y_test, Y_pred,2)\n",
        "cm = tf.confusion_matrix(y_train, Y_pred,2)\n",
        "\n",
        "import tensorflow as tf\n",
        "\n",
        "#initialize the variable\n",
        "init_op = tf.initialize_all_variables()\n",
        "\n",
        "print(\"Confusion matrix (en Training):\")\n",
        "#run the graph\n",
        "with tf.Session() as sess:\n",
        "    sess.run(init_op) #execute init_op\n",
        "    #print the confussion matrix\n",
        "    # print (sess.run(cm))\n",
        "    array = cm.eval(session=sess)\n",
        "    print(array)\n"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Confusion matrix (en Training):\n",
            "[[44771  6930]\n",
            " [ 3457 48121]]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "e9j652D9eu9i",
        "colab_type": "code",
        "outputId": "b7412787-9f8b-40e4-9bbf-3e96fe032d07",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 68
        }
      },
      "cell_type": "code",
      "source": [
        "total= array[0,0]+array[0,1]\n",
        "\n",
        "cm = cm/total\n",
        "#initialize the variable\n",
        "init_op = tf.initialize_all_variables()\n",
        "\n",
        "print(\"Confusion matrix (normalized):\")\n",
        "#run the graph\n",
        "with tf.Session() as sess:\n",
        "    sess.run(init_op) #execute init_op\n",
        "    #print the confussion matrix\n",
        "    print (sess.run(cm))"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Confusion matrix (normalized):\n",
            "[[0.86596004 0.13403996]\n",
            " [0.06686524 0.93075569]]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "DAeclN67eu-N",
        "colab_type": "code",
        "outputId": "50a24b56-f267-4c93-c92b-6f2724f6085b",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 119
        }
      },
      "cell_type": "code",
      "source": [
        "# calculations of measurements of performance\n",
        "\n",
        "n_TP = array[0,0]  \n",
        "n_FP = array[1,0] \n",
        "n_TN = array[1,1]\n",
        "n_FN = array[0,1]\n",
        "\n",
        "print(\"-----------------------------\")\n",
        "print(\"-----------------------------\")\n",
        "\n",
        "# SENSITIVITY = TP / (TP + FN)\n",
        "NN_Sensitivity = n_TP / (n_TP + n_FN)\n",
        "print(\"NN_Sensitivity = \"+ str(NN_Sensitivity))\n",
        "\n",
        "# SPECIFICITY = TN / (FP + TN)\n",
        "NN_Specificity = n_TN / (n_FP + n_TN)\n",
        "print(\"NN_Specificity = \"+ str(NN_Specificity))\n",
        "\n",
        "# Precision = TP / (TP + FP)\n",
        "NN_Precision = n_TP / (n_TP + n_FP)\n",
        "print(\"NN_Precision = \"+ str(NN_Precision))\n",
        "\n",
        "# Accuracy = (TP + TN) / (TP + FP + TN + FN)\n",
        "NN_Accuracy = (n_TP + n_TN) / (n_TP + n_FP + n_TN + n_FN)\n",
        "print(\"NN_Accuracy = \"+ str(NN_Accuracy))"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "-----------------------------\n",
            "-----------------------------\n",
            "NN_Sensitivity = 0.8659600394576508\n",
            "NN_Specificity = 0.9329752995463182\n",
            "NN_Precision = 0.9283196483370656\n",
            "NN_Accuracy = 0.8994277636305541\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "0PYtaB3bQQ1u",
        "colab_type": "text"
      },
      "cell_type": "markdown",
      "source": [
        "### Evaluate the model on the VALIDATION set"
      ]
    },
    {
      "metadata": {
        "id": "blGi7Gyqeu_C",
        "colab_type": "code",
        "outputId": "36c89809-edf5-4ae5-e12e-959e8e54300d",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 68
        }
      },
      "cell_type": "code",
      "source": [
        "# HACEMOS PREDICCIONES EN VALIDATION Y VEMOS EL ACCURACY:\n",
        "\n",
        "y_pred = my_model_NN.predict(x_val)\n",
        "Y_pred = np.argmax(y_pred, axis=1)\n",
        "\n",
        "#cm = tf.confusion_matrix(y_test, Y_pred,2)\n",
        "cm = tf.confusion_matrix(y_val, Y_pred,2)\n",
        "\n",
        "import tensorflow as tf\n",
        "\n",
        "#initialize the variable\n",
        "init_op = tf.initialize_all_variables()\n",
        "\n",
        "print(\"Confusion matrix (en Validation):\")\n",
        "#run the graph\n",
        "with tf.Session() as sess:\n",
        "    sess.run(init_op) #execute init_op\n",
        "    #print the confussion matrix\n",
        "    #print (sess.run(cm))\n",
        "    array = cm.eval(session=sess)\n",
        "    print(array)"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Confusion matrix (en Validation):\n",
            "[[11732  2987]\n",
            " [ 1479 13240]]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "Q8hC74PYeu_d",
        "colab_type": "code",
        "outputId": "3445ac70-1424-4f07-acf4-7bc6f7db5852",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 68
        }
      },
      "cell_type": "code",
      "source": [
        "total= array[0,0]+array[0,1]\n",
        "\n",
        "cm = cm/total\n",
        "#initialize the variable\n",
        "init_op = tf.initialize_all_variables()\n",
        "\n",
        "print(\"Confusion matrix (normalized):\")\n",
        "#run the graph\n",
        "with tf.Session() as sess:\n",
        "    sess.run(init_op) #execute init_op\n",
        "    #print the confussion matrix\n",
        "    print (sess.run(cm))"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Confusion matrix (normalized):\n",
            "[[0.79706502 0.20293498]\n",
            " [0.10048237 0.89951763]]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "metadata": {
        "id": "_zdhNcxHevAy",
        "colab_type": "code",
        "outputId": "c2d361f2-23e7-49cc-9765-becc400ed1d6",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 119
        }
      },
      "cell_type": "code",
      "source": [
        "# calculations of measurements of performance\n",
        "\n",
        "n_TP = array[0,0]  \n",
        "n_FP = array[1,0] \n",
        "n_TN = array[1,1]\n",
        "n_FN = array[0,1]\n",
        "\n",
        "print(\"-----------------------------\")\n",
        "print(\"-----------------------------\")\n",
        "\n",
        "# SENSITIVITY = TP / (TP + FN)\n",
        "NN_Sensitivity = n_TP / (n_TP + n_FN)\n",
        "print(\"NN_Sensitivity = \"+ str(NN_Sensitivity))\n",
        "\n",
        "# SPECIFICITY = TN / (FP + TN)\n",
        "NN_Specificity = n_TN / (n_FP + n_TN)\n",
        "print(\"NN_Specificity = \"+ str(NN_Specificity))\n",
        "\n",
        "# Precision = TP / (TP + FP)\n",
        "NN_Precision = n_TP / (n_TP + n_FP)\n",
        "print(\"NN_Precision = \"+ str(NN_Precision))\n",
        "\n",
        "# Accuracy = (TP + TN) / (TP + FP + TN + FN)\n",
        "NN_Accuracy = (n_TP + n_TN) / (n_TP + n_FP + n_TN + n_FN)\n",
        "print(\"NN_Accuracy = \"+ str(NN_Accuracy))"
      ],
      "execution_count": 0,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "-----------------------------\n",
            "-----------------------------\n",
            "NN_Sensitivity = 0.7970650180039405\n",
            "NN_Specificity = 0.8995176302737957\n",
            "NN_Precision = 0.8880478389221104\n",
            "NN_Accuracy = 0.8482913241388681\n"
          ],
          "name": "stdout"
        }
      ]
    }
  ]
}