{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "name": "Diabetes.ipynb",
      "version": "0.3.2",
      "provenance": []
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    }
  },
  "cells": [
    {
      "cell_type": "code",
      "metadata": {
        "id": "yAYN26w6JCHD",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 54
        },
        "outputId": "8bec5fb8-d19d-4734-d984-f666077a2ad6"
      },
      "source": [
        "#Description: This program detects/predicts if a person has diabetes (1) or not (0)\n",
        "\n",
        "#Data: https://www.kaggle.com/uciml/pima-indians-diabetes-database\n",
        "\n",
        "'''\n",
        "The pima-indians-diabetes data set comes from the Pima people.\n",
        "The Pima are a group of Native Americans living in an area consisting of what is now central and southern Arizona. \n",
        "Thy have the highest reported prevalence of diabetes of any population in the world, \n",
        "and have contributed to numerous scientific gains through their willingness to participate in some research. \n",
        "Their involvement has led to significant findings with regard to the epidemiology, physiology,\n",
        "clinical assessment, and genetics of both type 2 diabetes and obesity. - National Center for Biotechnology Information\n",
        "'''"
      ],
      "execution_count": 1,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "'\\nThe data set comes from the Pima people.\\nThe Pima are a group of Native Americans living in an area consisting of what is now central and southern Arizona. \\nThe Pima have the highest reported prevalence of diabetes of any population in the world, \\nand have contributed to numerous scientific gains through their willingness to participate in the research process. \\nTheir involvement has led to significant findings with regard to the epidemiology, physiology,\\nclinical assessment, and genetics of both type 2 diabetes and obesity.\\u200a-\\u200aNational Center for Biotechnology Information\\n'"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 1
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "MMktO83TSCS_",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 34
        },
        "outputId": "9f84d875-dd6f-48f6-f10c-f78c0f703100"
      },
      "source": [
        "#Load libraries\n",
        "from keras.models import Sequential\n",
        "from keras.layers import Dense\n",
        "import pandas as pd\n",
        "from sklearn.model_selection import train_test_split\n",
        "import matplotlib.pyplot as plt\n",
        "plt.style.use('fivethirtyeight')"
      ],
      "execution_count": 2,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Using TensorFlow backend.\n"
          ],
          "name": "stderr"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "ExcRVJZMUREM",
        "colab_type": "code",
        "colab": {
          "resources": {
            "http://localhost:8080/nbextensions/google.colab/files.js": {
              "data": "Ly8gQ29weXJpZ2h0IDIwMTcgR29vZ2xlIExMQwovLwovLyBMaWNlbnNlZCB1bmRlciB0aGUgQXBhY2hlIExpY2Vuc2UsIFZlcnNpb24gMi4wICh0aGUgIkxpY2Vuc2UiKTsKLy8geW91IG1heSBub3QgdXNlIHRoaXMgZmlsZSBleGNlcHQgaW4gY29tcGxpYW5jZSB3aXRoIHRoZSBMaWNlbnNlLgovLyBZb3UgbWF5IG9idGFpbiBhIGNvcHkgb2YgdGhlIExpY2Vuc2UgYXQKLy8KLy8gICAgICBodHRwOi8vd3d3LmFwYWNoZS5vcmcvbGljZW5zZXMvTElDRU5TRS0yLjAKLy8KLy8gVW5sZXNzIHJlcXVpcmVkIGJ5IGFwcGxpY2FibGUgbGF3IG9yIGFncmVlZCB0byBpbiB3cml0aW5nLCBzb2Z0d2FyZQovLyBkaXN0cmlidXRlZCB1bmRlciB0aGUgTGljZW5zZSBpcyBkaXN0cmlidXRlZCBvbiBhbiAiQVMgSVMiIEJBU0lTLAovLyBXSVRIT1VUIFdBUlJBTlRJRVMgT1IgQ09ORElUSU9OUyBPRiBBTlkgS0lORCwgZWl0aGVyIGV4cHJlc3Mgb3IgaW1wbGllZC4KLy8gU2VlIHRoZSBMaWNlbnNlIGZvciB0aGUgc3BlY2lmaWMgbGFuZ3VhZ2UgZ292ZXJuaW5nIHBlcm1pc3Npb25zIGFuZAovLyBsaW1pdGF0aW9ucyB1bmRlciB0aGUgTGljZW5zZS4KCi8qKgogKiBAZmlsZW92ZXJ2aWV3IEhlbHBlcnMgZm9yIGdvb2dsZS5jb2xhYiBQeXRob24gbW9kdWxlLgogKi8KKGZ1bmN0aW9uKHNjb3BlKSB7CmZ1bmN0aW9uIHNwYW4odGV4dCwgc3R5bGVBdHRyaWJ1dGVzID0ge30pIHsKICBjb25zdCBlbGVtZW50ID0gZG9jdW1lbnQuY3JlYXRlRWxlbWVudCgnc3BhbicpOwogIGVsZW1lbnQudGV4dENvbnRlbnQgPSB0ZXh0OwogIGZvciAoY29uc3Qga2V5IG9mIE9iamVjdC5rZXlzKHN0eWxlQXR0cmlidXRlcykpIHsKICAgIGVsZW1lbnQuc3R5bGVba2V5XSA9IHN0eWxlQXR0cmlidXRlc1trZXldOwogIH0KICByZXR1cm4gZWxlbWVudDsKfQoKLy8gTWF4IG51bWJlciBvZiBieXRlcyB3aGljaCB3aWxsIGJlIHVwbG9hZGVkIGF0IGEgdGltZS4KY29uc3QgTUFYX1BBWUxPQURfU0laRSA9IDEwMCAqIDEwMjQ7Ci8vIE1heCBhbW91bnQgb2YgdGltZSB0byBibG9jayB3YWl0aW5nIGZvciB0aGUgdXNlci4KY29uc3QgRklMRV9DSEFOR0VfVElNRU9VVF9NUyA9IDMwICogMTAwMDsKCmZ1bmN0aW9uIF91cGxvYWRGaWxlcyhpbnB1dElkLCBvdXRwdXRJZCkgewogIGNvbnN0IHN0ZXBzID0gdXBsb2FkRmlsZXNTdGVwKGlucHV0SWQsIG91dHB1dElkKTsKICBjb25zdCBvdXRwdXRFbGVtZW50ID0gZG9jdW1lbnQuZ2V0RWxlbWVudEJ5SWQob3V0cHV0SWQpOwogIC8vIENhY2hlIHN0ZXBzIG9uIHRoZSBvdXRwdXRFbGVtZW50IHRvIG1ha2UgaXQgYXZhaWxhYmxlIGZvciB0aGUgbmV4dCBjYWxsCiAgLy8gdG8gdXBsb2FkRmlsZXNDb250aW51ZSBmcm9tIFB5dGhvbi4KICBvdXRwdXRFbGVtZW50LnN0ZXBzID0gc3RlcHM7CgogIHJldHVybiBfdXBsb2FkRmlsZXNDb250aW51ZShvdXRwdXRJZCk7Cn0KCi8vIFRoaXMgaXMgcm91Z2hseSBhbiBhc3luYyBnZW5lcmF0b3IgKG5vdCBzdXBwb3J0ZWQgaW4gdGhlIGJyb3dzZXIgeWV0KSwKLy8gd2hlcmUgdGhlcmUgYXJlIG11bHRpcGxlIGFzeW5jaHJvbm91cyBzdGVwcyBhbmQgdGhlIFB5dGhvbiBzaWRlIGlzIGdvaW5nCi8vIHRvIHBvbGwgZm9yIGNvbXBsZXRpb24gb2YgZWFjaCBzdGVwLgovLyBUaGlzIHVzZXMgYSBQcm9taXNlIHRvIGJsb2NrIHRoZSBweXRob24gc2lkZSBvbiBjb21wbGV0aW9uIG9mIGVhY2ggc3RlcCwKLy8gdGhlbiBwYXNzZXMgdGhlIHJlc3VsdCBvZiB0aGUgcHJldmlvdXMgc3RlcCBhcyB0aGUgaW5wdXQgdG8gdGhlIG5leHQgc3RlcC4KZnVuY3Rpb24gX3VwbG9hZEZpbGVzQ29udGludWUob3V0cHV0SWQpIHsKICBjb25zdCBvdXRwdXRFbGVtZW50ID0gZG9jdW1lbnQuZ2V0RWxlbWVudEJ5SWQob3V0cHV0SWQpOwogIGNvbnN0IHN0ZXBzID0gb3V0cHV0RWxlbWVudC5zdGVwczsKCiAgY29uc3QgbmV4dCA9IHN0ZXBzLm5leHQob3V0cHV0RWxlbWVudC5sYXN0UHJvbWlzZVZhbHVlKTsKICByZXR1cm4gUHJvbWlzZS5yZXNvbHZlKG5leHQudmFsdWUucHJvbWlzZSkudGhlbigodmFsdWUpID0+IHsKICAgIC8vIENhY2hlIHRoZSBsYXN0IHByb21pc2UgdmFsdWUgdG8gbWFrZSBpdCBhdmFpbGFibGUgdG8gdGhlIG5leHQKICAgIC8vIHN0ZXAgb2YgdGhlIGdlbmVyYXRvci4KICAgIG91dHB1dEVsZW1lbnQubGFzdFByb21pc2VWYWx1ZSA9IHZhbHVlOwogICAgcmV0dXJuIG5leHQudmFsdWUucmVzcG9uc2U7CiAgfSk7Cn0KCi8qKgogKiBHZW5lcmF0b3IgZnVuY3Rpb24gd2hpY2ggaXMgY2FsbGVkIGJldHdlZW4gZWFjaCBhc3luYyBzdGVwIG9mIHRoZSB1cGxvYWQKICogcHJvY2Vzcy4KICogQHBhcmFtIHtzdHJpbmd9IGlucHV0SWQgRWxlbWVudCBJRCBvZiB0aGUgaW5wdXQgZmlsZSBwaWNrZXIgZWxlbWVudC4KICogQHBhcmFtIHtzdHJpbmd9IG91dHB1dElkIEVsZW1lbnQgSUQgb2YgdGhlIG91dHB1dCBkaXNwbGF5LgogKiBAcmV0dXJuIHshSXRlcmFibGU8IU9iamVjdD59IEl0ZXJhYmxlIG9mIG5leHQgc3RlcHMuCiAqLwpmdW5jdGlvbiogdXBsb2FkRmlsZXNTdGVwKGlucHV0SWQsIG91dHB1dElkKSB7CiAgY29uc3QgaW5wdXRFbGVtZW50ID0gZG9jdW1lbnQuZ2V0RWxlbWVudEJ5SWQoaW5wdXRJZCk7CiAgaW5wdXRFbGVtZW50LmRpc2FibGVkID0gZmFsc2U7CgogIGNvbnN0IG91dHB1dEVsZW1lbnQgPSBkb2N1bWVudC5nZXRFbGVtZW50QnlJZChvdXRwdXRJZCk7CiAgb3V0cHV0RWxlbWVudC5pbm5lckhUTUwgPSAnJzsKCiAgY29uc3QgcGlja2VkUHJvbWlzZSA9IG5ldyBQcm9taXNlKChyZXNvbHZlKSA9PiB7CiAgICBpbnB1dEVsZW1lbnQuYWRkRXZlbnRMaXN0ZW5lcignY2hhbmdlJywgKGUpID0+IHsKICAgICAgcmVzb2x2ZShlLnRhcmdldC5maWxlcyk7CiAgICB9KTsKICB9KTsKCiAgY29uc3QgY2FuY2VsID0gZG9jdW1lbnQuY3JlYXRlRWxlbWVudCgnYnV0dG9uJyk7CiAgaW5wdXRFbGVtZW50LnBhcmVudEVsZW1lbnQuYXBwZW5kQ2hpbGQoY2FuY2VsKTsKICBjYW5jZWwudGV4dENvbnRlbnQgPSAnQ2FuY2VsIHVwbG9hZCc7CiAgY29uc3QgY2FuY2VsUHJvbWlzZSA9IG5ldyBQcm9taXNlKChyZXNvbHZlKSA9PiB7CiAgICBjYW5jZWwub25jbGljayA9ICgpID0+IHsKICAgICAgcmVzb2x2ZShudWxsKTsKICAgIH07CiAgfSk7CgogIC8vIENhbmNlbCB1cGxvYWQgaWYgdXNlciBoYXNuJ3QgcGlja2VkIGFueXRoaW5nIGluIHRpbWVvdXQuCiAgY29uc3QgdGltZW91dFByb21pc2UgPSBuZXcgUHJvbWlzZSgocmVzb2x2ZSkgPT4gewogICAgc2V0VGltZW91dCgoKSA9PiB7CiAgICAgIHJlc29sdmUobnVsbCk7CiAgICB9LCBGSUxFX0NIQU5HRV9USU1FT1VUX01TKTsKICB9KTsKCiAgLy8gV2FpdCBmb3IgdGhlIHVzZXIgdG8gcGljayB0aGUgZmlsZXMuCiAgY29uc3QgZmlsZXMgPSB5aWVsZCB7CiAgICBwcm9taXNlOiBQcm9taXNlLnJhY2UoW3BpY2tlZFByb21pc2UsIHRpbWVvdXRQcm9taXNlLCBjYW5jZWxQcm9taXNlXSksCiAgICByZXNwb25zZTogewogICAgICBhY3Rpb246ICdzdGFydGluZycsCiAgICB9CiAgfTsKCiAgaWYgKCFmaWxlcykgewogICAgcmV0dXJuIHsKICAgICAgcmVzcG9uc2U6IHsKICAgICAgICBhY3Rpb246ICdjb21wbGV0ZScsCiAgICAgIH0KICAgIH07CiAgfQoKICBjYW5jZWwucmVtb3ZlKCk7CgogIC8vIERpc2FibGUgdGhlIGlucHV0IGVsZW1lbnQgc2luY2UgZnVydGhlciBwaWNrcyBhcmUgbm90IGFsbG93ZWQuCiAgaW5wdXRFbGVtZW50LmRpc2FibGVkID0gdHJ1ZTsKCiAgZm9yIChjb25zdCBmaWxlIG9mIGZpbGVzKSB7CiAgICBjb25zdCBsaSA9IGRvY3VtZW50LmNyZWF0ZUVsZW1lbnQoJ2xpJyk7CiAgICBsaS5hcHBlbmQoc3BhbihmaWxlLm5hbWUsIHtmb250V2VpZ2h0OiAnYm9sZCd9KSk7CiAgICBsaS5hcHBlbmQoc3BhbigKICAgICAgICBgKCR7ZmlsZS50eXBlIHx8ICduL2EnfSkgLSAke2ZpbGUuc2l6ZX0gYnl0ZXMsIGAgKwogICAgICAgIGBsYXN0IG1vZGlmaWVkOiAkewogICAgICAgICAgICBmaWxlLmxhc3RNb2RpZmllZERhdGUgPyBmaWxlLmxhc3RNb2RpZmllZERhdGUudG9Mb2NhbGVEYXRlU3RyaW5nKCkgOgogICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAnbi9hJ30gLSBgKSk7CiAgICBjb25zdCBwZXJjZW50ID0gc3BhbignMCUgZG9uZScpOwogICAgbGkuYXBwZW5kQ2hpbGQocGVyY2VudCk7CgogICAgb3V0cHV0RWxlbWVudC5hcHBlbmRDaGlsZChsaSk7CgogICAgY29uc3QgZmlsZURhdGFQcm9taXNlID0gbmV3IFByb21pc2UoKHJlc29sdmUpID0+IHsKICAgICAgY29uc3QgcmVhZGVyID0gbmV3IEZpbGVSZWFkZXIoKTsKICAgICAgcmVhZGVyLm9ubG9hZCA9IChlKSA9PiB7CiAgICAgICAgcmVzb2x2ZShlLnRhcmdldC5yZXN1bHQpOwogICAgICB9OwogICAgICByZWFkZXIucmVhZEFzQXJyYXlCdWZmZXIoZmlsZSk7CiAgICB9KTsKICAgIC8vIFdhaXQgZm9yIHRoZSBkYXRhIHRvIGJlIHJlYWR5LgogICAgbGV0IGZpbGVEYXRhID0geWllbGQgewogICAgICBwcm9taXNlOiBmaWxlRGF0YVByb21pc2UsCiAgICAgIHJlc3BvbnNlOiB7CiAgICAgICAgYWN0aW9uOiAnY29udGludWUnLAogICAgICB9CiAgICB9OwoKICAgIC8vIFVzZSBhIGNodW5rZWQgc2VuZGluZyB0byBhdm9pZCBtZXNzYWdlIHNpemUgbGltaXRzLiBTZWUgYi82MjExNTY2MC4KICAgIGxldCBwb3NpdGlvbiA9IDA7CiAgICB3aGlsZSAocG9zaXRpb24gPCBmaWxlRGF0YS5ieXRlTGVuZ3RoKSB7CiAgICAgIGNvbnN0IGxlbmd0aCA9IE1hdGgubWluKGZpbGVEYXRhLmJ5dGVMZW5ndGggLSBwb3NpdGlvbiwgTUFYX1BBWUxPQURfU0laRSk7CiAgICAgIGNvbnN0IGNodW5rID0gbmV3IFVpbnQ4QXJyYXkoZmlsZURhdGEsIHBvc2l0aW9uLCBsZW5ndGgpOwogICAgICBwb3NpdGlvbiArPSBsZW5ndGg7CgogICAgICBjb25zdCBiYXNlNjQgPSBidG9hKFN0cmluZy5mcm9tQ2hhckNvZGUuYXBwbHkobnVsbCwgY2h1bmspKTsKICAgICAgeWllbGQgewogICAgICAgIHJlc3BvbnNlOiB7CiAgICAgICAgICBhY3Rpb246ICdhcHBlbmQnLAogICAgICAgICAgZmlsZTogZmlsZS5uYW1lLAogICAgICAgICAgZGF0YTogYmFzZTY0LAogICAgICAgIH0sCiAgICAgIH07CiAgICAgIHBlcmNlbnQudGV4dENvbnRlbnQgPQogICAgICAgICAgYCR7TWF0aC5yb3VuZCgocG9zaXRpb24gLyBmaWxlRGF0YS5ieXRlTGVuZ3RoKSAqIDEwMCl9JSBkb25lYDsKICAgIH0KICB9CgogIC8vIEFsbCBkb25lLgogIHlpZWxkIHsKICAgIHJlc3BvbnNlOiB7CiAgICAgIGFjdGlvbjogJ2NvbXBsZXRlJywKICAgIH0KICB9Owp9CgpzY29wZS5nb29nbGUgPSBzY29wZS5nb29nbGUgfHwge307CnNjb3BlLmdvb2dsZS5jb2xhYiA9IHNjb3BlLmdvb2dsZS5jb2xhYiB8fCB7fTsKc2NvcGUuZ29vZ2xlLmNvbGFiLl9maWxlcyA9IHsKICBfdXBsb2FkRmlsZXMsCiAgX3VwbG9hZEZpbGVzQ29udGludWUsCn07Cn0pKHNlbGYpOwo=",
              "ok": true,
              "headers": [
                [
                  "content-type",
                  "application/javascript"
                ]
              ],
              "status": 200,
              "status_text": ""
            }
          },
          "base_uri": "https://localhost:8080/",
          "height": 74
        },
        "outputId": "adfe5254-8182-480e-bbd2-a8106486779f"
      },
      "source": [
        "#Load the data\n",
        "from google.colab import files # Use to load data on Google Colab\n",
        "uploaded = files.upload() # Use to load data on Google Colab"
      ],
      "execution_count": 3,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "text/html": [
              "\n",
              "     <input type=\"file\" id=\"files-a1d620a0-b4b2-40c2-9316-a16f9f1c0466\" name=\"files[]\" multiple disabled />\n",
              "     <output id=\"result-a1d620a0-b4b2-40c2-9316-a16f9f1c0466\">\n",
              "      Upload widget is only available when the cell has been executed in the\n",
              "      current browser session. Please rerun this cell to enable.\n",
              "      </output>\n",
              "      <script src=\"/nbextensions/google.colab/files.js\"></script> "
            ],
            "text/plain": [
              "<IPython.core.display.HTML object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        },
        {
          "output_type": "stream",
          "text": [
            "Saving diabetes.csv to diabetes.csv\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "q3W_v8LTSkPb",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 286
        },
        "outputId": "a6782153-5550-47c9-9f2b-2b8693a2821c"
      },
      "source": [
        "#Store the data set\n",
        "df = pd.read_csv('diabetes.csv')\n",
        "\n",
        "#Look at first 7 rows of data\n",
        "df.head(7)"
      ],
      "execution_count": 4,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/html": [
              "<div>\n",
              "<style scoped>\n",
              "    .dataframe tbody tr th:only-of-type {\n",
              "        vertical-align: middle;\n",
              "    }\n",
              "\n",
              "    .dataframe tbody tr th {\n",
              "        vertical-align: top;\n",
              "    }\n",
              "\n",
              "    .dataframe thead th {\n",
              "        text-align: right;\n",
              "    }\n",
              "</style>\n",
              "<table border=\"1\" class=\"dataframe\">\n",
              "  <thead>\n",
              "    <tr style=\"text-align: right;\">\n",
              "      <th></th>\n",
              "      <th>Pregnancies</th>\n",
              "      <th>Glucose</th>\n",
              "      <th>BloodPressure</th>\n",
              "      <th>SkinThickness</th>\n",
              "      <th>Insulin</th>\n",
              "      <th>BMI</th>\n",
              "      <th>DiabetesPedigreeFunction</th>\n",
              "      <th>Age</th>\n",
              "      <th>Outcome</th>\n",
              "    </tr>\n",
              "  </thead>\n",
              "  <tbody>\n",
              "    <tr>\n",
              "      <th>0</th>\n",
              "      <td>6</td>\n",
              "      <td>148</td>\n",
              "      <td>72</td>\n",
              "      <td>35</td>\n",
              "      <td>0</td>\n",
              "      <td>33.6</td>\n",
              "      <td>0.627</td>\n",
              "      <td>50</td>\n",
              "      <td>1</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1</th>\n",
              "      <td>1</td>\n",
              "      <td>85</td>\n",
              "      <td>66</td>\n",
              "      <td>29</td>\n",
              "      <td>0</td>\n",
              "      <td>26.6</td>\n",
              "      <td>0.351</td>\n",
              "      <td>31</td>\n",
              "      <td>0</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>2</th>\n",
              "      <td>8</td>\n",
              "      <td>183</td>\n",
              "      <td>64</td>\n",
              "      <td>0</td>\n",
              "      <td>0</td>\n",
              "      <td>23.3</td>\n",
              "      <td>0.672</td>\n",
              "      <td>32</td>\n",
              "      <td>1</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3</th>\n",
              "      <td>1</td>\n",
              "      <td>89</td>\n",
              "      <td>66</td>\n",
              "      <td>23</td>\n",
              "      <td>94</td>\n",
              "      <td>28.1</td>\n",
              "      <td>0.167</td>\n",
              "      <td>21</td>\n",
              "      <td>0</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>4</th>\n",
              "      <td>0</td>\n",
              "      <td>137</td>\n",
              "      <td>40</td>\n",
              "      <td>35</td>\n",
              "      <td>168</td>\n",
              "      <td>43.1</td>\n",
              "      <td>2.288</td>\n",
              "      <td>33</td>\n",
              "      <td>1</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>5</th>\n",
              "      <td>5</td>\n",
              "      <td>116</td>\n",
              "      <td>74</td>\n",
              "      <td>0</td>\n",
              "      <td>0</td>\n",
              "      <td>25.6</td>\n",
              "      <td>0.201</td>\n",
              "      <td>30</td>\n",
              "      <td>0</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>6</th>\n",
              "      <td>3</td>\n",
              "      <td>78</td>\n",
              "      <td>50</td>\n",
              "      <td>32</td>\n",
              "      <td>88</td>\n",
              "      <td>31.0</td>\n",
              "      <td>0.248</td>\n",
              "      <td>26</td>\n",
              "      <td>1</td>\n",
              "    </tr>\n",
              "  </tbody>\n",
              "</table>\n",
              "</div>"
            ],
            "text/plain": [
              "   Pregnancies  Glucose  BloodPressure  ...  DiabetesPedigreeFunction  Age  Outcome\n",
              "0            6      148             72  ...                     0.627   50        1\n",
              "1            1       85             66  ...                     0.351   31        0\n",
              "2            8      183             64  ...                     0.672   32        1\n",
              "3            1       89             66  ...                     0.167   21        0\n",
              "4            0      137             40  ...                     2.288   33        1\n",
              "5            5      116             74  ...                     0.201   30        0\n",
              "6            3       78             50  ...                     0.248   26        1\n",
              "\n",
              "[7 rows x 9 columns]"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 4
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "suyx1vqSWJ8C",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 34
        },
        "outputId": "09b16bc0-5420-42e7-83bd-d4674c7ab439"
      },
      "source": [
        "#Show the shape (number of rows & columns)\n",
        "df.shape"
      ],
      "execution_count": 5,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "(768, 9)"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 5
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "UN-puWagVxz7",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "#Checking for duplicates and removing them\n",
        "df.drop_duplicates(inplace = True)"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "oYuvIPwHV1Oz",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 34
        },
        "outputId": "7f2760af-910f-466d-a6e8-416015d2e63e"
      },
      "source": [
        "#Show the shape to see if any rows were dropped (number of rows & columns)\n",
        "df.shape"
      ],
      "execution_count": 7,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "(768, 9)"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 7
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "TCSHE0k5V4oT",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 187
        },
        "outputId": "3889e182-d4c1-4bcd-cfd0-5d129e130a8b"
      },
      "source": [
        "#Show the number of missing (NAN, NaN, na) data for each column\n",
        "df.isnull().sum()"
      ],
      "execution_count": 8,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "Pregnancies                 0\n",
              "Glucose                     0\n",
              "BloodPressure               0\n",
              "SkinThickness               0\n",
              "Insulin                     0\n",
              "BMI                         0\n",
              "DiabetesPedigreeFunction    0\n",
              "Age                         0\n",
              "Outcome                     0\n",
              "dtype: int64"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 8
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "MuDDQ0lAcim-",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 136
        },
        "outputId": "4b961631-7627-4fd3-b23d-a8b47f625f32"
      },
      "source": [
        "#Convert the data into an array\n",
        "dataset = df.values\n",
        "dataset"
      ],
      "execution_count": 9,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "array([[  6.   , 148.   ,  72.   , ...,   0.627,  50.   ,   1.   ],\n",
              "       [  1.   ,  85.   ,  66.   , ...,   0.351,  31.   ,   0.   ],\n",
              "       [  8.   , 183.   ,  64.   , ...,   0.672,  32.   ,   1.   ],\n",
              "       ...,\n",
              "       [  5.   , 121.   ,  72.   , ...,   0.245,  30.   ,   0.   ],\n",
              "       [  1.   , 126.   ,  60.   , ...,   0.349,  47.   ,   1.   ],\n",
              "       [  1.   ,  93.   ,  70.   , ...,   0.315,  23.   ,   0.   ]])"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 9
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "HNH8JpXNVDfa",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "# Get all of the rows from the first eight columns of the dataset\n",
        "X = dataset[:,0:8] #X = dataset[:,0:8]   #X = df.iloc[:, 0:8] \n",
        "# Get all of the rows from the last column\n",
        "y = dataset[:,8] #y = dataset[:,8]     #y = df.iloc[:, 8] \n"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "2XusCN4qAzsz",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 238
        },
        "outputId": "f3c43b3d-5420-47db-c72b-d20a62ece05e"
      },
      "source": [
        "#Process the data\n",
        "#the min-max scaler method scales the dataset so that all the input features lie between 0 and 1 inclusive\n",
        "from sklearn import preprocessing\n",
        "min_max_scaler = preprocessing.MinMaxScaler()\n",
        "X_scale = min_max_scaler.fit_transform(X)\n",
        "X_scale"
      ],
      "execution_count": 11,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "array([[0.35294118, 0.74371859, 0.59016393, ..., 0.50074516, 0.23441503,\n",
              "        0.48333333],\n",
              "       [0.05882353, 0.42713568, 0.54098361, ..., 0.39642325, 0.11656704,\n",
              "        0.16666667],\n",
              "       [0.47058824, 0.91959799, 0.52459016, ..., 0.34724292, 0.25362938,\n",
              "        0.18333333],\n",
              "       ...,\n",
              "       [0.29411765, 0.6080402 , 0.59016393, ..., 0.390462  , 0.07130658,\n",
              "        0.15      ],\n",
              "       [0.05882353, 0.63316583, 0.49180328, ..., 0.4485842 , 0.11571307,\n",
              "        0.43333333],\n",
              "       [0.05882353, 0.46733668, 0.57377049, ..., 0.45305514, 0.10119556,\n",
              "        0.03333333]])"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 11
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "UfSCkvtcUxZC",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "#Split the data into 80% training and 20%\n",
        "\n",
        "#train_test_split splits arrays or matrices into random train and test subsets. \n",
        "#That means that everytime you run it without specifying random_state, you will get a different result, this is expected behavior.\n",
        "X_train, X_test, y_train, y_test = train_test_split(X_scale, y, test_size=0.2, random_state = 4)"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "2nYoUoX3TNWZ",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 156
        },
        "outputId": "45b3899e-01fe-4fef-e1be-8a8f0ad4bcc5"
      },
      "source": [
        "\n",
        "#Build the model and architecture of the neural network\n",
        "\n",
        "# The models architechture 3 layers,\n",
        "# 1st layer with 12 neurons and activation function 'relu'\n",
        "# 2nd layer with 15 neurons and activation function 'relu'\n",
        "# the last layer has 1 neuron with an activation function = sigmoid function which returns a value btwn 0 and 1\n",
        "# The input shape/ input_dim = 10 the number of features in the data set\n",
        "model = Sequential([\n",
        "    Dense(12, activation='relu', input_shape=( 8 ,)),\n",
        "    Dense(15, activation='relu'),\n",
        "    Dense(1, activation='sigmoid')\n",
        "])\n",
        "\n",
        "\n"
      ],
      "execution_count": 14,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "WARNING: Logging before flag parsing goes to stderr.\n",
            "W0809 22:31:06.408399 139911896921984 deprecation_wrapper.py:119] From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:74: The name tf.get_default_graph is deprecated. Please use tf.compat.v1.get_default_graph instead.\n",
            "\n",
            "W0809 22:31:06.460541 139911896921984 deprecation_wrapper.py:119] From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:517: The name tf.placeholder is deprecated. Please use tf.compat.v1.placeholder instead.\n",
            "\n",
            "W0809 22:31:06.471525 139911896921984 deprecation_wrapper.py:119] From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:4138: The name tf.random_uniform is deprecated. Please use tf.random.uniform instead.\n",
            "\n"
          ],
          "name": "stderr"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "7HTT4DNRTTTJ",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 156
        },
        "outputId": "38bd439c-1032-4d97-eb22-885340da8033"
      },
      "source": [
        "# Loss measuers how well the model did on training , and then tries to improve on it using the optimizer\n",
        "model.compile(optimizer='sgd', #Stochastic gradient descent optimizer.\n",
        "              loss='binary_crossentropy', #Used for binary classification\n",
        "              metrics=['accuracy'])"
      ],
      "execution_count": 15,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "W0809 22:31:09.226096 139911896921984 deprecation_wrapper.py:119] From /usr/local/lib/python3.6/dist-packages/keras/optimizers.py:790: The name tf.train.Optimizer is deprecated. Please use tf.compat.v1.train.Optimizer instead.\n",
            "\n",
            "W0809 22:31:09.256962 139911896921984 deprecation_wrapper.py:119] From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:3376: The name tf.log is deprecated. Please use tf.math.log instead.\n",
            "\n",
            "W0809 22:31:09.266288 139911896921984 deprecation.py:323] From /usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/nn_impl.py:180: add_dispatch_support.<locals>.wrapper (from tensorflow.python.ops.array_ops) is deprecated and will be removed in a future version.\n",
            "Instructions for updating:\n",
            "Use tf.where in 2.0, which has the same broadcast rule as np.where\n"
          ],
          "name": "stderr"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "RjbHMHfMTX_5",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 1000
        },
        "outputId": "89bc2fdb-25f0-434b-d439-38ba408caef5"
      },
      "source": [
        "#Train the model\n",
        "\n",
        "# Split the data into 20% validation data\n",
        "hist = model.fit(X_train, y_train,\n",
        "          batch_size=57, epochs=1000, validation_split=0.2)"
      ],
      "execution_count": 16,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "W0809 22:31:11.499556 139911896921984 deprecation_wrapper.py:119] From /usr/local/lib/python3.6/dist-packages/keras/backend/tensorflow_backend.py:986: The name tf.assign_add is deprecated. Please use tf.compat.v1.assign_add instead.\n",
            "\n"
          ],
          "name": "stderr"
        },
        {
          "output_type": "stream",
          "text": [
            "Train on 491 samples, validate on 123 samples\n",
            "Epoch 1/1000\n",
            "491/491 [==============================] - 1s 1ms/step - loss: 0.6683 - acc: 0.6477 - val_loss: 0.6709 - val_acc: 0.6504\n",
            "Epoch 2/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.6667 - acc: 0.6477 - val_loss: 0.6696 - val_acc: 0.6504\n",
            "Epoch 3/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.6655 - acc: 0.6477 - val_loss: 0.6683 - val_acc: 0.6504\n",
            "Epoch 4/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.6644 - acc: 0.6477 - val_loss: 0.6671 - val_acc: 0.6504\n",
            "Epoch 5/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.6633 - acc: 0.6477 - val_loss: 0.6660 - val_acc: 0.6504\n",
            "Epoch 6/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.6622 - acc: 0.6477 - val_loss: 0.6650 - val_acc: 0.6504\n",
            "Epoch 7/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6614 - acc: 0.6477 - val_loss: 0.6643 - val_acc: 0.6504\n",
            "Epoch 8/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6607 - acc: 0.6477 - val_loss: 0.6635 - val_acc: 0.6504\n",
            "Epoch 9/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.6599 - acc: 0.6477 - val_loss: 0.6629 - val_acc: 0.6504\n",
            "Epoch 10/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6594 - acc: 0.6477 - val_loss: 0.6622 - val_acc: 0.6504\n",
            "Epoch 11/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.6588 - acc: 0.6477 - val_loss: 0.6616 - val_acc: 0.6504\n",
            "Epoch 12/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.6582 - acc: 0.6477 - val_loss: 0.6611 - val_acc: 0.6504\n",
            "Epoch 13/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.6578 - acc: 0.6477 - val_loss: 0.6605 - val_acc: 0.6504\n",
            "Epoch 14/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.6572 - acc: 0.6477 - val_loss: 0.6602 - val_acc: 0.6504\n",
            "Epoch 15/1000\n",
            "491/491 [==============================] - 0s 50us/step - loss: 0.6568 - acc: 0.6477 - val_loss: 0.6597 - val_acc: 0.6504\n",
            "Epoch 16/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.6566 - acc: 0.6477 - val_loss: 0.6594 - val_acc: 0.6504\n",
            "Epoch 17/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6561 - acc: 0.6477 - val_loss: 0.6590 - val_acc: 0.6504\n",
            "Epoch 18/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6557 - acc: 0.6477 - val_loss: 0.6586 - val_acc: 0.6504\n",
            "Epoch 19/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.6552 - acc: 0.6477 - val_loss: 0.6582 - val_acc: 0.6504\n",
            "Epoch 20/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.6549 - acc: 0.6477 - val_loss: 0.6578 - val_acc: 0.6504\n",
            "Epoch 21/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.6545 - acc: 0.6477 - val_loss: 0.6574 - val_acc: 0.6504\n",
            "Epoch 22/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6542 - acc: 0.6477 - val_loss: 0.6571 - val_acc: 0.6504\n",
            "Epoch 23/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.6538 - acc: 0.6477 - val_loss: 0.6567 - val_acc: 0.6504\n",
            "Epoch 24/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6535 - acc: 0.6477 - val_loss: 0.6563 - val_acc: 0.6504\n",
            "Epoch 25/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.6531 - acc: 0.6477 - val_loss: 0.6560 - val_acc: 0.6504\n",
            "Epoch 26/1000\n",
            "491/491 [==============================] - 0s 47us/step - loss: 0.6527 - acc: 0.6477 - val_loss: 0.6556 - val_acc: 0.6504\n",
            "Epoch 27/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6524 - acc: 0.6477 - val_loss: 0.6553 - val_acc: 0.6504\n",
            "Epoch 28/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.6521 - acc: 0.6477 - val_loss: 0.6549 - val_acc: 0.6504\n",
            "Epoch 29/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.6518 - acc: 0.6477 - val_loss: 0.6546 - val_acc: 0.6504\n",
            "Epoch 30/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6513 - acc: 0.6477 - val_loss: 0.6541 - val_acc: 0.6504\n",
            "Epoch 31/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6510 - acc: 0.6477 - val_loss: 0.6537 - val_acc: 0.6504\n",
            "Epoch 32/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.6505 - acc: 0.6477 - val_loss: 0.6533 - val_acc: 0.6504\n",
            "Epoch 33/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6499 - acc: 0.6477 - val_loss: 0.6529 - val_acc: 0.6504\n",
            "Epoch 34/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.6495 - acc: 0.6477 - val_loss: 0.6526 - val_acc: 0.6504\n",
            "Epoch 35/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6491 - acc: 0.6477 - val_loss: 0.6523 - val_acc: 0.6504\n",
            "Epoch 36/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6486 - acc: 0.6477 - val_loss: 0.6520 - val_acc: 0.6504\n",
            "Epoch 37/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.6482 - acc: 0.6477 - val_loss: 0.6516 - val_acc: 0.6504\n",
            "Epoch 38/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6479 - acc: 0.6477 - val_loss: 0.6513 - val_acc: 0.6504\n",
            "Epoch 39/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6475 - acc: 0.6477 - val_loss: 0.6510 - val_acc: 0.6504\n",
            "Epoch 40/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.6472 - acc: 0.6477 - val_loss: 0.6507 - val_acc: 0.6504\n",
            "Epoch 41/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6468 - acc: 0.6477 - val_loss: 0.6504 - val_acc: 0.6504\n",
            "Epoch 42/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6465 - acc: 0.6477 - val_loss: 0.6501 - val_acc: 0.6504\n",
            "Epoch 43/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6461 - acc: 0.6477 - val_loss: 0.6497 - val_acc: 0.6504\n",
            "Epoch 44/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.6459 - acc: 0.6477 - val_loss: 0.6494 - val_acc: 0.6504\n",
            "Epoch 45/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6455 - acc: 0.6477 - val_loss: 0.6491 - val_acc: 0.6504\n",
            "Epoch 46/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.6451 - acc: 0.6477 - val_loss: 0.6488 - val_acc: 0.6504\n",
            "Epoch 47/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.6447 - acc: 0.6477 - val_loss: 0.6485 - val_acc: 0.6504\n",
            "Epoch 48/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.6444 - acc: 0.6477 - val_loss: 0.6481 - val_acc: 0.6504\n",
            "Epoch 49/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.6441 - acc: 0.6477 - val_loss: 0.6478 - val_acc: 0.6504\n",
            "Epoch 50/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.6438 - acc: 0.6477 - val_loss: 0.6475 - val_acc: 0.6504\n",
            "Epoch 51/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.6435 - acc: 0.6477 - val_loss: 0.6472 - val_acc: 0.6504\n",
            "Epoch 52/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.6431 - acc: 0.6477 - val_loss: 0.6469 - val_acc: 0.6504\n",
            "Epoch 53/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.6428 - acc: 0.6477 - val_loss: 0.6466 - val_acc: 0.6504\n",
            "Epoch 54/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6425 - acc: 0.6477 - val_loss: 0.6463 - val_acc: 0.6504\n",
            "Epoch 55/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6421 - acc: 0.6477 - val_loss: 0.6460 - val_acc: 0.6504\n",
            "Epoch 56/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6417 - acc: 0.6477 - val_loss: 0.6457 - val_acc: 0.6504\n",
            "Epoch 57/1000\n",
            "491/491 [==============================] - 0s 47us/step - loss: 0.6415 - acc: 0.6477 - val_loss: 0.6453 - val_acc: 0.6504\n",
            "Epoch 58/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.6411 - acc: 0.6477 - val_loss: 0.6450 - val_acc: 0.6504\n",
            "Epoch 59/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.6408 - acc: 0.6477 - val_loss: 0.6447 - val_acc: 0.6504\n",
            "Epoch 60/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6405 - acc: 0.6477 - val_loss: 0.6444 - val_acc: 0.6504\n",
            "Epoch 61/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6401 - acc: 0.6477 - val_loss: 0.6441 - val_acc: 0.6504\n",
            "Epoch 62/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.6399 - acc: 0.6477 - val_loss: 0.6438 - val_acc: 0.6504\n",
            "Epoch 63/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.6395 - acc: 0.6477 - val_loss: 0.6435 - val_acc: 0.6504\n",
            "Epoch 64/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.6391 - acc: 0.6477 - val_loss: 0.6432 - val_acc: 0.6504\n",
            "Epoch 65/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6388 - acc: 0.6477 - val_loss: 0.6428 - val_acc: 0.6504\n",
            "Epoch 66/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.6384 - acc: 0.6477 - val_loss: 0.6425 - val_acc: 0.6504\n",
            "Epoch 67/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6382 - acc: 0.6477 - val_loss: 0.6422 - val_acc: 0.6504\n",
            "Epoch 68/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6378 - acc: 0.6477 - val_loss: 0.6419 - val_acc: 0.6504\n",
            "Epoch 69/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.6375 - acc: 0.6477 - val_loss: 0.6416 - val_acc: 0.6504\n",
            "Epoch 70/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.6371 - acc: 0.6477 - val_loss: 0.6413 - val_acc: 0.6504\n",
            "Epoch 71/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.6369 - acc: 0.6477 - val_loss: 0.6410 - val_acc: 0.6504\n",
            "Epoch 72/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6365 - acc: 0.6477 - val_loss: 0.6407 - val_acc: 0.6504\n",
            "Epoch 73/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6361 - acc: 0.6477 - val_loss: 0.6404 - val_acc: 0.6504\n",
            "Epoch 74/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6357 - acc: 0.6477 - val_loss: 0.6401 - val_acc: 0.6504\n",
            "Epoch 75/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.6355 - acc: 0.6477 - val_loss: 0.6398 - val_acc: 0.6504\n",
            "Epoch 76/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6351 - acc: 0.6477 - val_loss: 0.6395 - val_acc: 0.6504\n",
            "Epoch 77/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.6348 - acc: 0.6477 - val_loss: 0.6392 - val_acc: 0.6504\n",
            "Epoch 78/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6345 - acc: 0.6477 - val_loss: 0.6388 - val_acc: 0.6504\n",
            "Epoch 79/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.6341 - acc: 0.6477 - val_loss: 0.6385 - val_acc: 0.6504\n",
            "Epoch 80/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6337 - acc: 0.6477 - val_loss: 0.6382 - val_acc: 0.6504\n",
            "Epoch 81/1000\n",
            "491/491 [==============================] - 0s 51us/step - loss: 0.6334 - acc: 0.6477 - val_loss: 0.6379 - val_acc: 0.6504\n",
            "Epoch 82/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.6330 - acc: 0.6477 - val_loss: 0.6375 - val_acc: 0.6504\n",
            "Epoch 83/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.6327 - acc: 0.6477 - val_loss: 0.6372 - val_acc: 0.6504\n",
            "Epoch 84/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6324 - acc: 0.6477 - val_loss: 0.6369 - val_acc: 0.6504\n",
            "Epoch 85/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.6320 - acc: 0.6477 - val_loss: 0.6366 - val_acc: 0.6504\n",
            "Epoch 86/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.6316 - acc: 0.6477 - val_loss: 0.6363 - val_acc: 0.6504\n",
            "Epoch 87/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6312 - acc: 0.6477 - val_loss: 0.6360 - val_acc: 0.6504\n",
            "Epoch 88/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6310 - acc: 0.6477 - val_loss: 0.6356 - val_acc: 0.6504\n",
            "Epoch 89/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.6306 - acc: 0.6477 - val_loss: 0.6353 - val_acc: 0.6504\n",
            "Epoch 90/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.6302 - acc: 0.6477 - val_loss: 0.6350 - val_acc: 0.6504\n",
            "Epoch 91/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.6299 - acc: 0.6477 - val_loss: 0.6346 - val_acc: 0.6504\n",
            "Epoch 92/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.6295 - acc: 0.6477 - val_loss: 0.6343 - val_acc: 0.6504\n",
            "Epoch 93/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6292 - acc: 0.6477 - val_loss: 0.6340 - val_acc: 0.6504\n",
            "Epoch 94/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.6289 - acc: 0.6477 - val_loss: 0.6336 - val_acc: 0.6504\n",
            "Epoch 95/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6285 - acc: 0.6477 - val_loss: 0.6333 - val_acc: 0.6504\n",
            "Epoch 96/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6282 - acc: 0.6477 - val_loss: 0.6330 - val_acc: 0.6504\n",
            "Epoch 97/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.6277 - acc: 0.6477 - val_loss: 0.6327 - val_acc: 0.6504\n",
            "Epoch 98/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6273 - acc: 0.6477 - val_loss: 0.6323 - val_acc: 0.6504\n",
            "Epoch 99/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.6270 - acc: 0.6477 - val_loss: 0.6320 - val_acc: 0.6504\n",
            "Epoch 100/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6266 - acc: 0.6477 - val_loss: 0.6317 - val_acc: 0.6504\n",
            "Epoch 101/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.6264 - acc: 0.6477 - val_loss: 0.6314 - val_acc: 0.6504\n",
            "Epoch 102/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.6259 - acc: 0.6477 - val_loss: 0.6311 - val_acc: 0.6504\n",
            "Epoch 103/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6258 - acc: 0.6477 - val_loss: 0.6308 - val_acc: 0.6504\n",
            "Epoch 104/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6254 - acc: 0.6477 - val_loss: 0.6305 - val_acc: 0.6504\n",
            "Epoch 105/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6249 - acc: 0.6477 - val_loss: 0.6301 - val_acc: 0.6504\n",
            "Epoch 106/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.6246 - acc: 0.6477 - val_loss: 0.6298 - val_acc: 0.6504\n",
            "Epoch 107/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.6243 - acc: 0.6477 - val_loss: 0.6295 - val_acc: 0.6504\n",
            "Epoch 108/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.6240 - acc: 0.6477 - val_loss: 0.6292 - val_acc: 0.6504\n",
            "Epoch 109/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6235 - acc: 0.6477 - val_loss: 0.6288 - val_acc: 0.6504\n",
            "Epoch 110/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.6233 - acc: 0.6477 - val_loss: 0.6285 - val_acc: 0.6504\n",
            "Epoch 111/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6230 - acc: 0.6477 - val_loss: 0.6282 - val_acc: 0.6504\n",
            "Epoch 112/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.6226 - acc: 0.6477 - val_loss: 0.6278 - val_acc: 0.6504\n",
            "Epoch 113/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6222 - acc: 0.6477 - val_loss: 0.6275 - val_acc: 0.6504\n",
            "Epoch 114/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6219 - acc: 0.6477 - val_loss: 0.6272 - val_acc: 0.6504\n",
            "Epoch 115/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.6216 - acc: 0.6477 - val_loss: 0.6268 - val_acc: 0.6504\n",
            "Epoch 116/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.6212 - acc: 0.6477 - val_loss: 0.6265 - val_acc: 0.6504\n",
            "Epoch 117/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.6209 - acc: 0.6477 - val_loss: 0.6261 - val_acc: 0.6504\n",
            "Epoch 118/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.6204 - acc: 0.6477 - val_loss: 0.6258 - val_acc: 0.6504\n",
            "Epoch 119/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6202 - acc: 0.6477 - val_loss: 0.6255 - val_acc: 0.6504\n",
            "Epoch 120/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.6198 - acc: 0.6477 - val_loss: 0.6251 - val_acc: 0.6504\n",
            "Epoch 121/1000\n",
            "491/491 [==============================] - 0s 49us/step - loss: 0.6195 - acc: 0.6477 - val_loss: 0.6248 - val_acc: 0.6504\n",
            "Epoch 122/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.6190 - acc: 0.6477 - val_loss: 0.6245 - val_acc: 0.6504\n",
            "Epoch 123/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.6188 - acc: 0.6477 - val_loss: 0.6242 - val_acc: 0.6504\n",
            "Epoch 124/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.6183 - acc: 0.6477 - val_loss: 0.6238 - val_acc: 0.6504\n",
            "Epoch 125/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.6180 - acc: 0.6477 - val_loss: 0.6235 - val_acc: 0.6504\n",
            "Epoch 126/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.6177 - acc: 0.6477 - val_loss: 0.6232 - val_acc: 0.6504\n",
            "Epoch 127/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.6172 - acc: 0.6477 - val_loss: 0.6228 - val_acc: 0.6504\n",
            "Epoch 128/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.6169 - acc: 0.6477 - val_loss: 0.6225 - val_acc: 0.6504\n",
            "Epoch 129/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6165 - acc: 0.6477 - val_loss: 0.6221 - val_acc: 0.6504\n",
            "Epoch 130/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.6162 - acc: 0.6477 - val_loss: 0.6218 - val_acc: 0.6504\n",
            "Epoch 131/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.6158 - acc: 0.6477 - val_loss: 0.6214 - val_acc: 0.6504\n",
            "Epoch 132/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.6153 - acc: 0.6477 - val_loss: 0.6211 - val_acc: 0.6504\n",
            "Epoch 133/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6151 - acc: 0.6477 - val_loss: 0.6207 - val_acc: 0.6504\n",
            "Epoch 134/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.6147 - acc: 0.6477 - val_loss: 0.6204 - val_acc: 0.6504\n",
            "Epoch 135/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.6144 - acc: 0.6477 - val_loss: 0.6200 - val_acc: 0.6504\n",
            "Epoch 136/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6140 - acc: 0.6477 - val_loss: 0.6197 - val_acc: 0.6504\n",
            "Epoch 137/1000\n",
            "491/491 [==============================] - 0s 30us/step - loss: 0.6134 - acc: 0.6477 - val_loss: 0.6193 - val_acc: 0.6504\n",
            "Epoch 138/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6131 - acc: 0.6477 - val_loss: 0.6189 - val_acc: 0.6504\n",
            "Epoch 139/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6127 - acc: 0.6477 - val_loss: 0.6186 - val_acc: 0.6504\n",
            "Epoch 140/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.6124 - acc: 0.6477 - val_loss: 0.6182 - val_acc: 0.6504\n",
            "Epoch 141/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.6119 - acc: 0.6477 - val_loss: 0.6179 - val_acc: 0.6504\n",
            "Epoch 142/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.6115 - acc: 0.6477 - val_loss: 0.6175 - val_acc: 0.6504\n",
            "Epoch 143/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.6111 - acc: 0.6477 - val_loss: 0.6172 - val_acc: 0.6504\n",
            "Epoch 144/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6106 - acc: 0.6477 - val_loss: 0.6168 - val_acc: 0.6504\n",
            "Epoch 145/1000\n",
            "491/491 [==============================] - 0s 53us/step - loss: 0.6103 - acc: 0.6477 - val_loss: 0.6164 - val_acc: 0.6504\n",
            "Epoch 146/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6099 - acc: 0.6477 - val_loss: 0.6161 - val_acc: 0.6504\n",
            "Epoch 147/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.6094 - acc: 0.6477 - val_loss: 0.6157 - val_acc: 0.6504\n",
            "Epoch 148/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.6093 - acc: 0.6477 - val_loss: 0.6153 - val_acc: 0.6504\n",
            "Epoch 149/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6086 - acc: 0.6477 - val_loss: 0.6149 - val_acc: 0.6504\n",
            "Epoch 150/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.6082 - acc: 0.6477 - val_loss: 0.6145 - val_acc: 0.6504\n",
            "Epoch 151/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.6078 - acc: 0.6477 - val_loss: 0.6142 - val_acc: 0.6504\n",
            "Epoch 152/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.6075 - acc: 0.6477 - val_loss: 0.6138 - val_acc: 0.6504\n",
            "Epoch 153/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.6069 - acc: 0.6477 - val_loss: 0.6134 - val_acc: 0.6504\n",
            "Epoch 154/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.6066 - acc: 0.6477 - val_loss: 0.6130 - val_acc: 0.6504\n",
            "Epoch 155/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.6060 - acc: 0.6477 - val_loss: 0.6126 - val_acc: 0.6504\n",
            "Epoch 156/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.6056 - acc: 0.6477 - val_loss: 0.6123 - val_acc: 0.6504\n",
            "Epoch 157/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.6051 - acc: 0.6477 - val_loss: 0.6119 - val_acc: 0.6504\n",
            "Epoch 158/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.6048 - acc: 0.6477 - val_loss: 0.6115 - val_acc: 0.6504\n",
            "Epoch 159/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.6042 - acc: 0.6497 - val_loss: 0.6111 - val_acc: 0.6504\n",
            "Epoch 160/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6037 - acc: 0.6517 - val_loss: 0.6107 - val_acc: 0.6504\n",
            "Epoch 161/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.6033 - acc: 0.6517 - val_loss: 0.6103 - val_acc: 0.6504\n",
            "Epoch 162/1000\n",
            "491/491 [==============================] - 0s 47us/step - loss: 0.6028 - acc: 0.6538 - val_loss: 0.6098 - val_acc: 0.6504\n",
            "Epoch 163/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.6024 - acc: 0.6538 - val_loss: 0.6094 - val_acc: 0.6504\n",
            "Epoch 164/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.6020 - acc: 0.6538 - val_loss: 0.6090 - val_acc: 0.6504\n",
            "Epoch 165/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.6016 - acc: 0.6538 - val_loss: 0.6086 - val_acc: 0.6504\n",
            "Epoch 166/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.6011 - acc: 0.6538 - val_loss: 0.6082 - val_acc: 0.6504\n",
            "Epoch 167/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.6005 - acc: 0.6538 - val_loss: 0.6077 - val_acc: 0.6504\n",
            "Epoch 168/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.6001 - acc: 0.6538 - val_loss: 0.6073 - val_acc: 0.6504\n",
            "Epoch 169/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.5995 - acc: 0.6538 - val_loss: 0.6069 - val_acc: 0.6504\n",
            "Epoch 170/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.5990 - acc: 0.6538 - val_loss: 0.6065 - val_acc: 0.6504\n",
            "Epoch 171/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.5986 - acc: 0.6538 - val_loss: 0.6060 - val_acc: 0.6504\n",
            "Epoch 172/1000\n",
            "491/491 [==============================] - 0s 29us/step - loss: 0.5981 - acc: 0.6578 - val_loss: 0.6055 - val_acc: 0.6504\n",
            "Epoch 173/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5976 - acc: 0.6558 - val_loss: 0.6051 - val_acc: 0.6504\n",
            "Epoch 174/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5971 - acc: 0.6599 - val_loss: 0.6047 - val_acc: 0.6504\n",
            "Epoch 175/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5966 - acc: 0.6599 - val_loss: 0.6043 - val_acc: 0.6504\n",
            "Epoch 176/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.5961 - acc: 0.6578 - val_loss: 0.6038 - val_acc: 0.6504\n",
            "Epoch 177/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5955 - acc: 0.6599 - val_loss: 0.6034 - val_acc: 0.6504\n",
            "Epoch 178/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.5952 - acc: 0.6578 - val_loss: 0.6030 - val_acc: 0.6504\n",
            "Epoch 179/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5946 - acc: 0.6599 - val_loss: 0.6025 - val_acc: 0.6504\n",
            "Epoch 180/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.5944 - acc: 0.6599 - val_loss: 0.6020 - val_acc: 0.6504\n",
            "Epoch 181/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5935 - acc: 0.6578 - val_loss: 0.6016 - val_acc: 0.6504\n",
            "Epoch 182/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5930 - acc: 0.6599 - val_loss: 0.6011 - val_acc: 0.6504\n",
            "Epoch 183/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.5925 - acc: 0.6599 - val_loss: 0.6007 - val_acc: 0.6504\n",
            "Epoch 184/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.5921 - acc: 0.6619 - val_loss: 0.6002 - val_acc: 0.6585\n",
            "Epoch 185/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.5915 - acc: 0.6599 - val_loss: 0.5998 - val_acc: 0.6585\n",
            "Epoch 186/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.5910 - acc: 0.6619 - val_loss: 0.5993 - val_acc: 0.6585\n",
            "Epoch 187/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.5904 - acc: 0.6619 - val_loss: 0.5989 - val_acc: 0.6667\n",
            "Epoch 188/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5899 - acc: 0.6640 - val_loss: 0.5985 - val_acc: 0.6667\n",
            "Epoch 189/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5894 - acc: 0.6680 - val_loss: 0.5980 - val_acc: 0.6667\n",
            "Epoch 190/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5888 - acc: 0.6680 - val_loss: 0.5975 - val_acc: 0.6667\n",
            "Epoch 191/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5882 - acc: 0.6701 - val_loss: 0.5970 - val_acc: 0.6667\n",
            "Epoch 192/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5876 - acc: 0.6680 - val_loss: 0.5965 - val_acc: 0.6667\n",
            "Epoch 193/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.5871 - acc: 0.6721 - val_loss: 0.5961 - val_acc: 0.6585\n",
            "Epoch 194/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5866 - acc: 0.6741 - val_loss: 0.5956 - val_acc: 0.6585\n",
            "Epoch 195/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.5860 - acc: 0.6741 - val_loss: 0.5951 - val_acc: 0.6504\n",
            "Epoch 196/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5854 - acc: 0.6782 - val_loss: 0.5946 - val_acc: 0.6504\n",
            "Epoch 197/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5849 - acc: 0.6762 - val_loss: 0.5942 - val_acc: 0.6504\n",
            "Epoch 198/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5843 - acc: 0.6802 - val_loss: 0.5936 - val_acc: 0.6504\n",
            "Epoch 199/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5838 - acc: 0.6843 - val_loss: 0.5931 - val_acc: 0.6504\n",
            "Epoch 200/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.5832 - acc: 0.6782 - val_loss: 0.5926 - val_acc: 0.6504\n",
            "Epoch 201/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.5825 - acc: 0.6843 - val_loss: 0.5921 - val_acc: 0.6504\n",
            "Epoch 202/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.5821 - acc: 0.6864 - val_loss: 0.5916 - val_acc: 0.6585\n",
            "Epoch 203/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.5817 - acc: 0.6843 - val_loss: 0.5911 - val_acc: 0.6667\n",
            "Epoch 204/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5811 - acc: 0.6965 - val_loss: 0.5906 - val_acc: 0.6585\n",
            "Epoch 205/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.5804 - acc: 0.6945 - val_loss: 0.5901 - val_acc: 0.6667\n",
            "Epoch 206/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5798 - acc: 0.6986 - val_loss: 0.5895 - val_acc: 0.6667\n",
            "Epoch 207/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5793 - acc: 0.7006 - val_loss: 0.5890 - val_acc: 0.6585\n",
            "Epoch 208/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.5787 - acc: 0.7006 - val_loss: 0.5885 - val_acc: 0.6667\n",
            "Epoch 209/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5780 - acc: 0.6965 - val_loss: 0.5880 - val_acc: 0.6585\n",
            "Epoch 210/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5776 - acc: 0.7026 - val_loss: 0.5875 - val_acc: 0.6585\n",
            "Epoch 211/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5770 - acc: 0.7067 - val_loss: 0.5870 - val_acc: 0.6585\n",
            "Epoch 212/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5762 - acc: 0.7108 - val_loss: 0.5865 - val_acc: 0.6585\n",
            "Epoch 213/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5761 - acc: 0.7067 - val_loss: 0.5861 - val_acc: 0.6585\n",
            "Epoch 214/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.5751 - acc: 0.7149 - val_loss: 0.5856 - val_acc: 0.6585\n",
            "Epoch 215/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.5745 - acc: 0.7128 - val_loss: 0.5851 - val_acc: 0.6585\n",
            "Epoch 216/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5739 - acc: 0.7149 - val_loss: 0.5846 - val_acc: 0.6667\n",
            "Epoch 217/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5732 - acc: 0.7210 - val_loss: 0.5841 - val_acc: 0.6667\n",
            "Epoch 218/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5728 - acc: 0.7169 - val_loss: 0.5835 - val_acc: 0.6667\n",
            "Epoch 219/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.5722 - acc: 0.7189 - val_loss: 0.5831 - val_acc: 0.6748\n",
            "Epoch 220/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.5716 - acc: 0.7189 - val_loss: 0.5825 - val_acc: 0.6748\n",
            "Epoch 221/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5712 - acc: 0.7210 - val_loss: 0.5821 - val_acc: 0.6829\n",
            "Epoch 222/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.5705 - acc: 0.7210 - val_loss: 0.5816 - val_acc: 0.6829\n",
            "Epoch 223/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.5697 - acc: 0.7251 - val_loss: 0.5810 - val_acc: 0.6829\n",
            "Epoch 224/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5691 - acc: 0.7210 - val_loss: 0.5805 - val_acc: 0.6829\n",
            "Epoch 225/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.5685 - acc: 0.7230 - val_loss: 0.5800 - val_acc: 0.6829\n",
            "Epoch 226/1000\n",
            "491/491 [==============================] - 0s 47us/step - loss: 0.5679 - acc: 0.7251 - val_loss: 0.5795 - val_acc: 0.6829\n",
            "Epoch 227/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.5675 - acc: 0.7291 - val_loss: 0.5789 - val_acc: 0.6911\n",
            "Epoch 228/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.5665 - acc: 0.7251 - val_loss: 0.5784 - val_acc: 0.6911\n",
            "Epoch 229/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.5660 - acc: 0.7271 - val_loss: 0.5779 - val_acc: 0.6911\n",
            "Epoch 230/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5654 - acc: 0.7271 - val_loss: 0.5774 - val_acc: 0.6911\n",
            "Epoch 231/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.5649 - acc: 0.7291 - val_loss: 0.5770 - val_acc: 0.6911\n",
            "Epoch 232/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.5644 - acc: 0.7230 - val_loss: 0.5764 - val_acc: 0.6911\n",
            "Epoch 233/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.5637 - acc: 0.7312 - val_loss: 0.5758 - val_acc: 0.6911\n",
            "Epoch 234/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5631 - acc: 0.7251 - val_loss: 0.5753 - val_acc: 0.6911\n",
            "Epoch 235/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5626 - acc: 0.7251 - val_loss: 0.5748 - val_acc: 0.6911\n",
            "Epoch 236/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.5620 - acc: 0.7332 - val_loss: 0.5744 - val_acc: 0.7073\n",
            "Epoch 237/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5613 - acc: 0.7251 - val_loss: 0.5738 - val_acc: 0.6911\n",
            "Epoch 238/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5608 - acc: 0.7251 - val_loss: 0.5733 - val_acc: 0.7073\n",
            "Epoch 239/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.5603 - acc: 0.7210 - val_loss: 0.5728 - val_acc: 0.7073\n",
            "Epoch 240/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.5596 - acc: 0.7230 - val_loss: 0.5723 - val_acc: 0.7073\n",
            "Epoch 241/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5589 - acc: 0.7210 - val_loss: 0.5717 - val_acc: 0.7073\n",
            "Epoch 242/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5582 - acc: 0.7210 - val_loss: 0.5712 - val_acc: 0.7073\n",
            "Epoch 243/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.5575 - acc: 0.7251 - val_loss: 0.5708 - val_acc: 0.7073\n",
            "Epoch 244/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.5570 - acc: 0.7230 - val_loss: 0.5703 - val_acc: 0.7073\n",
            "Epoch 245/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.5564 - acc: 0.7230 - val_loss: 0.5699 - val_acc: 0.7154\n",
            "Epoch 246/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5558 - acc: 0.7210 - val_loss: 0.5693 - val_acc: 0.7073\n",
            "Epoch 247/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5551 - acc: 0.7230 - val_loss: 0.5687 - val_acc: 0.7154\n",
            "Epoch 248/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.5545 - acc: 0.7230 - val_loss: 0.5682 - val_acc: 0.7154\n",
            "Epoch 249/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5539 - acc: 0.7251 - val_loss: 0.5677 - val_acc: 0.7154\n",
            "Epoch 250/1000\n",
            "491/491 [==============================] - 0s 29us/step - loss: 0.5533 - acc: 0.7210 - val_loss: 0.5672 - val_acc: 0.7154\n",
            "Epoch 251/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5529 - acc: 0.7251 - val_loss: 0.5668 - val_acc: 0.7154\n",
            "Epoch 252/1000\n",
            "491/491 [==============================] - 0s 50us/step - loss: 0.5522 - acc: 0.7251 - val_loss: 0.5662 - val_acc: 0.7154\n",
            "Epoch 253/1000\n",
            "491/491 [==============================] - 0s 58us/step - loss: 0.5514 - acc: 0.7230 - val_loss: 0.5658 - val_acc: 0.7154\n",
            "Epoch 254/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.5508 - acc: 0.7271 - val_loss: 0.5651 - val_acc: 0.7154\n",
            "Epoch 255/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5502 - acc: 0.7210 - val_loss: 0.5648 - val_acc: 0.7154\n",
            "Epoch 256/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5497 - acc: 0.7271 - val_loss: 0.5643 - val_acc: 0.7154\n",
            "Epoch 257/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5489 - acc: 0.7251 - val_loss: 0.5638 - val_acc: 0.7154\n",
            "Epoch 258/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.5482 - acc: 0.7251 - val_loss: 0.5633 - val_acc: 0.7154\n",
            "Epoch 259/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5477 - acc: 0.7271 - val_loss: 0.5628 - val_acc: 0.7154\n",
            "Epoch 260/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5470 - acc: 0.7271 - val_loss: 0.5624 - val_acc: 0.7154\n",
            "Epoch 261/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5465 - acc: 0.7251 - val_loss: 0.5621 - val_acc: 0.7236\n",
            "Epoch 262/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.5458 - acc: 0.7312 - val_loss: 0.5616 - val_acc: 0.7236\n",
            "Epoch 263/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5454 - acc: 0.7312 - val_loss: 0.5611 - val_acc: 0.7154\n",
            "Epoch 264/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5446 - acc: 0.7373 - val_loss: 0.5605 - val_acc: 0.7236\n",
            "Epoch 265/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5441 - acc: 0.7352 - val_loss: 0.5601 - val_acc: 0.7154\n",
            "Epoch 266/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5434 - acc: 0.7312 - val_loss: 0.5597 - val_acc: 0.7073\n",
            "Epoch 267/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.5431 - acc: 0.7291 - val_loss: 0.5593 - val_acc: 0.6911\n",
            "Epoch 268/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.5422 - acc: 0.7332 - val_loss: 0.5588 - val_acc: 0.6911\n",
            "Epoch 269/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5416 - acc: 0.7332 - val_loss: 0.5582 - val_acc: 0.6911\n",
            "Epoch 270/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.5412 - acc: 0.7312 - val_loss: 0.5577 - val_acc: 0.6911\n",
            "Epoch 271/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5404 - acc: 0.7312 - val_loss: 0.5573 - val_acc: 0.6911\n",
            "Epoch 272/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5398 - acc: 0.7312 - val_loss: 0.5567 - val_acc: 0.6911\n",
            "Epoch 273/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5392 - acc: 0.7352 - val_loss: 0.5562 - val_acc: 0.6911\n",
            "Epoch 274/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.5387 - acc: 0.7352 - val_loss: 0.5557 - val_acc: 0.6911\n",
            "Epoch 275/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.5379 - acc: 0.7352 - val_loss: 0.5552 - val_acc: 0.6911\n",
            "Epoch 276/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.5377 - acc: 0.7352 - val_loss: 0.5547 - val_acc: 0.6911\n",
            "Epoch 277/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.5369 - acc: 0.7373 - val_loss: 0.5544 - val_acc: 0.6911\n",
            "Epoch 278/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.5362 - acc: 0.7393 - val_loss: 0.5537 - val_acc: 0.6911\n",
            "Epoch 279/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.5356 - acc: 0.7352 - val_loss: 0.5534 - val_acc: 0.6911\n",
            "Epoch 280/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.5350 - acc: 0.7373 - val_loss: 0.5530 - val_acc: 0.6911\n",
            "Epoch 281/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.5345 - acc: 0.7352 - val_loss: 0.5523 - val_acc: 0.6911\n",
            "Epoch 282/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5336 - acc: 0.7393 - val_loss: 0.5520 - val_acc: 0.6992\n",
            "Epoch 283/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5331 - acc: 0.7373 - val_loss: 0.5515 - val_acc: 0.6992\n",
            "Epoch 284/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.5325 - acc: 0.7393 - val_loss: 0.5510 - val_acc: 0.6992\n",
            "Epoch 285/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5323 - acc: 0.7352 - val_loss: 0.5505 - val_acc: 0.6911\n",
            "Epoch 286/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5313 - acc: 0.7393 - val_loss: 0.5500 - val_acc: 0.6992\n",
            "Epoch 287/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.5306 - acc: 0.7413 - val_loss: 0.5496 - val_acc: 0.7073\n",
            "Epoch 288/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5304 - acc: 0.7373 - val_loss: 0.5493 - val_acc: 0.7073\n",
            "Epoch 289/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5294 - acc: 0.7413 - val_loss: 0.5487 - val_acc: 0.7073\n",
            "Epoch 290/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5291 - acc: 0.7413 - val_loss: 0.5483 - val_acc: 0.7073\n",
            "Epoch 291/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5283 - acc: 0.7373 - val_loss: 0.5478 - val_acc: 0.7073\n",
            "Epoch 292/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.5277 - acc: 0.7352 - val_loss: 0.5473 - val_acc: 0.7073\n",
            "Epoch 293/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.5272 - acc: 0.7393 - val_loss: 0.5469 - val_acc: 0.7073\n",
            "Epoch 294/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5267 - acc: 0.7373 - val_loss: 0.5464 - val_acc: 0.7073\n",
            "Epoch 295/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5259 - acc: 0.7413 - val_loss: 0.5459 - val_acc: 0.7073\n",
            "Epoch 296/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5255 - acc: 0.7434 - val_loss: 0.5457 - val_acc: 0.6992\n",
            "Epoch 297/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.5250 - acc: 0.7291 - val_loss: 0.5451 - val_acc: 0.7073\n",
            "Epoch 298/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5244 - acc: 0.7373 - val_loss: 0.5448 - val_acc: 0.6992\n",
            "Epoch 299/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5240 - acc: 0.7332 - val_loss: 0.5442 - val_acc: 0.6992\n",
            "Epoch 300/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.5230 - acc: 0.7352 - val_loss: 0.5438 - val_acc: 0.7073\n",
            "Epoch 301/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.5224 - acc: 0.7393 - val_loss: 0.5433 - val_acc: 0.7073\n",
            "Epoch 302/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5220 - acc: 0.7373 - val_loss: 0.5430 - val_acc: 0.6992\n",
            "Epoch 303/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5217 - acc: 0.7413 - val_loss: 0.5426 - val_acc: 0.6992\n",
            "Epoch 304/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5209 - acc: 0.7393 - val_loss: 0.5422 - val_acc: 0.6992\n",
            "Epoch 305/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5203 - acc: 0.7373 - val_loss: 0.5417 - val_acc: 0.7073\n",
            "Epoch 306/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5200 - acc: 0.7393 - val_loss: 0.5414 - val_acc: 0.6992\n",
            "Epoch 307/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5190 - acc: 0.7352 - val_loss: 0.5409 - val_acc: 0.6992\n",
            "Epoch 308/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5189 - acc: 0.7434 - val_loss: 0.5406 - val_acc: 0.6992\n",
            "Epoch 309/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5182 - acc: 0.7413 - val_loss: 0.5401 - val_acc: 0.7073\n",
            "Epoch 310/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5174 - acc: 0.7434 - val_loss: 0.5396 - val_acc: 0.7073\n",
            "Epoch 311/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.5172 - acc: 0.7393 - val_loss: 0.5394 - val_acc: 0.6992\n",
            "Epoch 312/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.5165 - acc: 0.7413 - val_loss: 0.5390 - val_acc: 0.6992\n",
            "Epoch 313/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5163 - acc: 0.7434 - val_loss: 0.5387 - val_acc: 0.7073\n",
            "Epoch 314/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.5155 - acc: 0.7454 - val_loss: 0.5381 - val_acc: 0.7073\n",
            "Epoch 315/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5151 - acc: 0.7393 - val_loss: 0.5376 - val_acc: 0.7073\n",
            "Epoch 316/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.5145 - acc: 0.7434 - val_loss: 0.5372 - val_acc: 0.7073\n",
            "Epoch 317/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.5140 - acc: 0.7454 - val_loss: 0.5369 - val_acc: 0.7073\n",
            "Epoch 318/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.5133 - acc: 0.7434 - val_loss: 0.5367 - val_acc: 0.7073\n",
            "Epoch 319/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.5127 - acc: 0.7475 - val_loss: 0.5362 - val_acc: 0.7073\n",
            "Epoch 320/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.5123 - acc: 0.7475 - val_loss: 0.5359 - val_acc: 0.7073\n",
            "Epoch 321/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5118 - acc: 0.7454 - val_loss: 0.5355 - val_acc: 0.7073\n",
            "Epoch 322/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5111 - acc: 0.7536 - val_loss: 0.5351 - val_acc: 0.7073\n",
            "Epoch 323/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.5107 - acc: 0.7454 - val_loss: 0.5346 - val_acc: 0.7073\n",
            "Epoch 324/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.5100 - acc: 0.7515 - val_loss: 0.5342 - val_acc: 0.7073\n",
            "Epoch 325/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.5098 - acc: 0.7475 - val_loss: 0.5339 - val_acc: 0.7073\n",
            "Epoch 326/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.5092 - acc: 0.7454 - val_loss: 0.5337 - val_acc: 0.7073\n",
            "Epoch 327/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.5085 - acc: 0.7454 - val_loss: 0.5333 - val_acc: 0.7073\n",
            "Epoch 328/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5083 - acc: 0.7475 - val_loss: 0.5327 - val_acc: 0.7073\n",
            "Epoch 329/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.5075 - acc: 0.7515 - val_loss: 0.5323 - val_acc: 0.7073\n",
            "Epoch 330/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5068 - acc: 0.7515 - val_loss: 0.5320 - val_acc: 0.7073\n",
            "Epoch 331/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.5066 - acc: 0.7475 - val_loss: 0.5316 - val_acc: 0.7073\n",
            "Epoch 332/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.5059 - acc: 0.7536 - val_loss: 0.5313 - val_acc: 0.7073\n",
            "Epoch 333/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5055 - acc: 0.7515 - val_loss: 0.5309 - val_acc: 0.7073\n",
            "Epoch 334/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.5050 - acc: 0.7556 - val_loss: 0.5307 - val_acc: 0.7154\n",
            "Epoch 335/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.5046 - acc: 0.7454 - val_loss: 0.5302 - val_acc: 0.7073\n",
            "Epoch 336/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.5042 - acc: 0.7536 - val_loss: 0.5299 - val_acc: 0.7154\n",
            "Epoch 337/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.5037 - acc: 0.7495 - val_loss: 0.5296 - val_acc: 0.7154\n",
            "Epoch 338/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.5030 - acc: 0.7475 - val_loss: 0.5293 - val_acc: 0.7154\n",
            "Epoch 339/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.5026 - acc: 0.7536 - val_loss: 0.5290 - val_acc: 0.7154\n",
            "Epoch 340/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.5023 - acc: 0.7454 - val_loss: 0.5287 - val_acc: 0.7154\n",
            "Epoch 341/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.5018 - acc: 0.7434 - val_loss: 0.5284 - val_acc: 0.7154\n",
            "Epoch 342/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.5012 - acc: 0.7495 - val_loss: 0.5281 - val_acc: 0.7154\n",
            "Epoch 343/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.5006 - acc: 0.7536 - val_loss: 0.5276 - val_acc: 0.7154\n",
            "Epoch 344/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.5002 - acc: 0.7536 - val_loss: 0.5274 - val_acc: 0.7236\n",
            "Epoch 345/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4998 - acc: 0.7515 - val_loss: 0.5271 - val_acc: 0.7236\n",
            "Epoch 346/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4996 - acc: 0.7556 - val_loss: 0.5269 - val_acc: 0.7154\n",
            "Epoch 347/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4988 - acc: 0.7515 - val_loss: 0.5266 - val_acc: 0.7154\n",
            "Epoch 348/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4983 - acc: 0.7495 - val_loss: 0.5263 - val_acc: 0.7154\n",
            "Epoch 349/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4983 - acc: 0.7536 - val_loss: 0.5262 - val_acc: 0.7154\n",
            "Epoch 350/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4979 - acc: 0.7556 - val_loss: 0.5258 - val_acc: 0.7154\n",
            "Epoch 351/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4973 - acc: 0.7536 - val_loss: 0.5254 - val_acc: 0.7154\n",
            "Epoch 352/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4966 - acc: 0.7556 - val_loss: 0.5250 - val_acc: 0.7154\n",
            "Epoch 353/1000\n",
            "491/491 [==============================] - 0s 52us/step - loss: 0.4962 - acc: 0.7556 - val_loss: 0.5248 - val_acc: 0.7154\n",
            "Epoch 354/1000\n",
            "491/491 [==============================] - 0s 50us/step - loss: 0.4960 - acc: 0.7637 - val_loss: 0.5245 - val_acc: 0.7154\n",
            "Epoch 355/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4957 - acc: 0.7576 - val_loss: 0.5242 - val_acc: 0.7154\n",
            "Epoch 356/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4948 - acc: 0.7576 - val_loss: 0.5240 - val_acc: 0.7154\n",
            "Epoch 357/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4945 - acc: 0.7576 - val_loss: 0.5237 - val_acc: 0.7154\n",
            "Epoch 358/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4941 - acc: 0.7617 - val_loss: 0.5234 - val_acc: 0.7154\n",
            "Epoch 359/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4936 - acc: 0.7617 - val_loss: 0.5230 - val_acc: 0.7154\n",
            "Epoch 360/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4937 - acc: 0.7556 - val_loss: 0.5229 - val_acc: 0.7154\n",
            "Epoch 361/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4928 - acc: 0.7617 - val_loss: 0.5226 - val_acc: 0.7154\n",
            "Epoch 362/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4923 - acc: 0.7617 - val_loss: 0.5223 - val_acc: 0.7154\n",
            "Epoch 363/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4919 - acc: 0.7597 - val_loss: 0.5222 - val_acc: 0.7154\n",
            "Epoch 364/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4921 - acc: 0.7597 - val_loss: 0.5221 - val_acc: 0.7073\n",
            "Epoch 365/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4916 - acc: 0.7556 - val_loss: 0.5216 - val_acc: 0.7154\n",
            "Epoch 366/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4909 - acc: 0.7576 - val_loss: 0.5213 - val_acc: 0.7236\n",
            "Epoch 367/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4910 - acc: 0.7637 - val_loss: 0.5210 - val_acc: 0.7236\n",
            "Epoch 368/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4906 - acc: 0.7556 - val_loss: 0.5209 - val_acc: 0.7073\n",
            "Epoch 369/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4895 - acc: 0.7637 - val_loss: 0.5206 - val_acc: 0.7154\n",
            "Epoch 370/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4894 - acc: 0.7658 - val_loss: 0.5204 - val_acc: 0.7154\n",
            "Epoch 371/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4890 - acc: 0.7637 - val_loss: 0.5203 - val_acc: 0.7073\n",
            "Epoch 372/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4883 - acc: 0.7637 - val_loss: 0.5202 - val_acc: 0.7073\n",
            "Epoch 373/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4881 - acc: 0.7536 - val_loss: 0.5199 - val_acc: 0.7073\n",
            "Epoch 374/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4879 - acc: 0.7617 - val_loss: 0.5195 - val_acc: 0.7073\n",
            "Epoch 375/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4874 - acc: 0.7637 - val_loss: 0.5195 - val_acc: 0.7073\n",
            "Epoch 376/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4871 - acc: 0.7597 - val_loss: 0.5191 - val_acc: 0.7073\n",
            "Epoch 377/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4870 - acc: 0.7617 - val_loss: 0.5190 - val_acc: 0.7073\n",
            "Epoch 378/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.4862 - acc: 0.7597 - val_loss: 0.5189 - val_acc: 0.7073\n",
            "Epoch 379/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4864 - acc: 0.7597 - val_loss: 0.5186 - val_acc: 0.7073\n",
            "Epoch 380/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4854 - acc: 0.7576 - val_loss: 0.5182 - val_acc: 0.7073\n",
            "Epoch 381/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4856 - acc: 0.7556 - val_loss: 0.5180 - val_acc: 0.7154\n",
            "Epoch 382/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4851 - acc: 0.7637 - val_loss: 0.5179 - val_acc: 0.7073\n",
            "Epoch 383/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4844 - acc: 0.7617 - val_loss: 0.5177 - val_acc: 0.7073\n",
            "Epoch 384/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4842 - acc: 0.7637 - val_loss: 0.5175 - val_acc: 0.7073\n",
            "Epoch 385/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4835 - acc: 0.7617 - val_loss: 0.5172 - val_acc: 0.7154\n",
            "Epoch 386/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4834 - acc: 0.7617 - val_loss: 0.5171 - val_acc: 0.7073\n",
            "Epoch 387/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4830 - acc: 0.7597 - val_loss: 0.5168 - val_acc: 0.7154\n",
            "Epoch 388/1000\n",
            "491/491 [==============================] - 0s 48us/step - loss: 0.4826 - acc: 0.7658 - val_loss: 0.5166 - val_acc: 0.7073\n",
            "Epoch 389/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4824 - acc: 0.7678 - val_loss: 0.5164 - val_acc: 0.7154\n",
            "Epoch 390/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4818 - acc: 0.7597 - val_loss: 0.5162 - val_acc: 0.7154\n",
            "Epoch 391/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4818 - acc: 0.7658 - val_loss: 0.5160 - val_acc: 0.7154\n",
            "Epoch 392/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.4809 - acc: 0.7617 - val_loss: 0.5158 - val_acc: 0.7154\n",
            "Epoch 393/1000\n",
            "491/491 [==============================] - 0s 55us/step - loss: 0.4816 - acc: 0.7617 - val_loss: 0.5156 - val_acc: 0.7236\n",
            "Epoch 394/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4806 - acc: 0.7637 - val_loss: 0.5154 - val_acc: 0.7154\n",
            "Epoch 395/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4799 - acc: 0.7617 - val_loss: 0.5153 - val_acc: 0.7154\n",
            "Epoch 396/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4798 - acc: 0.7617 - val_loss: 0.5152 - val_acc: 0.7154\n",
            "Epoch 397/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4795 - acc: 0.7617 - val_loss: 0.5150 - val_acc: 0.7154\n",
            "Epoch 398/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4788 - acc: 0.7617 - val_loss: 0.5149 - val_acc: 0.7154\n",
            "Epoch 399/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4789 - acc: 0.7597 - val_loss: 0.5147 - val_acc: 0.7154\n",
            "Epoch 400/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4781 - acc: 0.7617 - val_loss: 0.5145 - val_acc: 0.7154\n",
            "Epoch 401/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.4784 - acc: 0.7637 - val_loss: 0.5143 - val_acc: 0.7154\n",
            "Epoch 402/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4777 - acc: 0.7658 - val_loss: 0.5142 - val_acc: 0.7154\n",
            "Epoch 403/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4779 - acc: 0.7597 - val_loss: 0.5140 - val_acc: 0.7236\n",
            "Epoch 404/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4773 - acc: 0.7617 - val_loss: 0.5139 - val_acc: 0.7154\n",
            "Epoch 405/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4772 - acc: 0.7658 - val_loss: 0.5138 - val_acc: 0.7154\n",
            "Epoch 406/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4766 - acc: 0.7699 - val_loss: 0.5136 - val_acc: 0.7236\n",
            "Epoch 407/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4761 - acc: 0.7617 - val_loss: 0.5135 - val_acc: 0.7154\n",
            "Epoch 408/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4759 - acc: 0.7699 - val_loss: 0.5133 - val_acc: 0.7236\n",
            "Epoch 409/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4760 - acc: 0.7617 - val_loss: 0.5132 - val_acc: 0.7236\n",
            "Epoch 410/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4752 - acc: 0.7678 - val_loss: 0.5131 - val_acc: 0.7236\n",
            "Epoch 411/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4751 - acc: 0.7678 - val_loss: 0.5129 - val_acc: 0.7236\n",
            "Epoch 412/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4752 - acc: 0.7617 - val_loss: 0.5128 - val_acc: 0.7236\n",
            "Epoch 413/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4744 - acc: 0.7678 - val_loss: 0.5127 - val_acc: 0.7236\n",
            "Epoch 414/1000\n",
            "491/491 [==============================] - 0s 52us/step - loss: 0.4742 - acc: 0.7719 - val_loss: 0.5126 - val_acc: 0.7236\n",
            "Epoch 415/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4736 - acc: 0.7699 - val_loss: 0.5125 - val_acc: 0.7236\n",
            "Epoch 416/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4735 - acc: 0.7719 - val_loss: 0.5124 - val_acc: 0.7236\n",
            "Epoch 417/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4733 - acc: 0.7739 - val_loss: 0.5123 - val_acc: 0.7236\n",
            "Epoch 418/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4733 - acc: 0.7719 - val_loss: 0.5121 - val_acc: 0.7236\n",
            "Epoch 419/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4729 - acc: 0.7699 - val_loss: 0.5120 - val_acc: 0.7236\n",
            "Epoch 420/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4729 - acc: 0.7719 - val_loss: 0.5120 - val_acc: 0.7154\n",
            "Epoch 421/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4724 - acc: 0.7719 - val_loss: 0.5118 - val_acc: 0.7236\n",
            "Epoch 422/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4722 - acc: 0.7719 - val_loss: 0.5117 - val_acc: 0.7154\n",
            "Epoch 423/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4718 - acc: 0.7719 - val_loss: 0.5117 - val_acc: 0.7398\n",
            "Epoch 424/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4717 - acc: 0.7760 - val_loss: 0.5115 - val_acc: 0.7236\n",
            "Epoch 425/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4714 - acc: 0.7780 - val_loss: 0.5114 - val_acc: 0.7236\n",
            "Epoch 426/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4712 - acc: 0.7719 - val_loss: 0.5113 - val_acc: 0.7236\n",
            "Epoch 427/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4712 - acc: 0.7719 - val_loss: 0.5112 - val_acc: 0.7236\n",
            "Epoch 428/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4706 - acc: 0.7739 - val_loss: 0.5111 - val_acc: 0.7154\n",
            "Epoch 429/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4701 - acc: 0.7780 - val_loss: 0.5110 - val_acc: 0.7236\n",
            "Epoch 430/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4703 - acc: 0.7760 - val_loss: 0.5109 - val_acc: 0.7236\n",
            "Epoch 431/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4698 - acc: 0.7780 - val_loss: 0.5108 - val_acc: 0.7480\n",
            "Epoch 432/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4696 - acc: 0.7760 - val_loss: 0.5108 - val_acc: 0.7398\n",
            "Epoch 433/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4696 - acc: 0.7780 - val_loss: 0.5108 - val_acc: 0.7480\n",
            "Epoch 434/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4692 - acc: 0.7719 - val_loss: 0.5105 - val_acc: 0.7154\n",
            "Epoch 435/1000\n",
            "491/491 [==============================] - 0s 49us/step - loss: 0.4691 - acc: 0.7780 - val_loss: 0.5104 - val_acc: 0.7480\n",
            "Epoch 436/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4685 - acc: 0.7760 - val_loss: 0.5102 - val_acc: 0.7398\n",
            "Epoch 437/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4682 - acc: 0.7760 - val_loss: 0.5102 - val_acc: 0.7480\n",
            "Epoch 438/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4681 - acc: 0.7780 - val_loss: 0.5101 - val_acc: 0.7480\n",
            "Epoch 439/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4678 - acc: 0.7760 - val_loss: 0.5100 - val_acc: 0.7236\n",
            "Epoch 440/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4677 - acc: 0.7800 - val_loss: 0.5099 - val_acc: 0.7480\n",
            "Epoch 441/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4679 - acc: 0.7780 - val_loss: 0.5098 - val_acc: 0.7236\n",
            "Epoch 442/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4675 - acc: 0.7780 - val_loss: 0.5097 - val_acc: 0.7154\n",
            "Epoch 443/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4670 - acc: 0.7780 - val_loss: 0.5095 - val_acc: 0.7236\n",
            "Epoch 444/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4670 - acc: 0.7841 - val_loss: 0.5094 - val_acc: 0.7398\n",
            "Epoch 445/1000\n",
            "491/491 [==============================] - 0s 55us/step - loss: 0.4663 - acc: 0.7800 - val_loss: 0.5093 - val_acc: 0.7154\n",
            "Epoch 446/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4661 - acc: 0.7841 - val_loss: 0.5092 - val_acc: 0.7398\n",
            "Epoch 447/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4659 - acc: 0.7760 - val_loss: 0.5090 - val_acc: 0.7154\n",
            "Epoch 448/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4658 - acc: 0.7841 - val_loss: 0.5089 - val_acc: 0.7154\n",
            "Epoch 449/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4654 - acc: 0.7800 - val_loss: 0.5088 - val_acc: 0.7236\n",
            "Epoch 450/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4662 - acc: 0.7800 - val_loss: 0.5088 - val_acc: 0.7398\n",
            "Epoch 451/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4656 - acc: 0.7739 - val_loss: 0.5086 - val_acc: 0.7398\n",
            "Epoch 452/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.4649 - acc: 0.7780 - val_loss: 0.5085 - val_acc: 0.7398\n",
            "Epoch 453/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4644 - acc: 0.7821 - val_loss: 0.5083 - val_acc: 0.7398\n",
            "Epoch 454/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4644 - acc: 0.7780 - val_loss: 0.5081 - val_acc: 0.7398\n",
            "Epoch 455/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.4643 - acc: 0.7821 - val_loss: 0.5080 - val_acc: 0.7398\n",
            "Epoch 456/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4641 - acc: 0.7800 - val_loss: 0.5078 - val_acc: 0.7398\n",
            "Epoch 457/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4636 - acc: 0.7760 - val_loss: 0.5076 - val_acc: 0.7398\n",
            "Epoch 458/1000\n",
            "491/491 [==============================] - 0s 47us/step - loss: 0.4638 - acc: 0.7800 - val_loss: 0.5075 - val_acc: 0.7398\n",
            "Epoch 459/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4632 - acc: 0.7800 - val_loss: 0.5074 - val_acc: 0.7398\n",
            "Epoch 460/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4630 - acc: 0.7821 - val_loss: 0.5073 - val_acc: 0.7398\n",
            "Epoch 461/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4630 - acc: 0.7780 - val_loss: 0.5072 - val_acc: 0.7398\n",
            "Epoch 462/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4629 - acc: 0.7780 - val_loss: 0.5071 - val_acc: 0.7398\n",
            "Epoch 463/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4624 - acc: 0.7800 - val_loss: 0.5069 - val_acc: 0.7398\n",
            "Epoch 464/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4623 - acc: 0.7841 - val_loss: 0.5069 - val_acc: 0.7480\n",
            "Epoch 465/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4621 - acc: 0.7780 - val_loss: 0.5068 - val_acc: 0.7480\n",
            "Epoch 466/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4619 - acc: 0.7841 - val_loss: 0.5067 - val_acc: 0.7398\n",
            "Epoch 467/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4617 - acc: 0.7800 - val_loss: 0.5067 - val_acc: 0.7480\n",
            "Epoch 468/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4616 - acc: 0.7821 - val_loss: 0.5065 - val_acc: 0.7480\n",
            "Epoch 469/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4621 - acc: 0.7760 - val_loss: 0.5066 - val_acc: 0.7480\n",
            "Epoch 470/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4612 - acc: 0.7800 - val_loss: 0.5065 - val_acc: 0.7480\n",
            "Epoch 471/1000\n",
            "491/491 [==============================] - 0s 52us/step - loss: 0.4610 - acc: 0.7800 - val_loss: 0.5064 - val_acc: 0.7480\n",
            "Epoch 472/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4610 - acc: 0.7780 - val_loss: 0.5063 - val_acc: 0.7480\n",
            "Epoch 473/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4607 - acc: 0.7800 - val_loss: 0.5062 - val_acc: 0.7480\n",
            "Epoch 474/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4603 - acc: 0.7821 - val_loss: 0.5061 - val_acc: 0.7480\n",
            "Epoch 475/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4602 - acc: 0.7800 - val_loss: 0.5061 - val_acc: 0.7480\n",
            "Epoch 476/1000\n",
            "491/491 [==============================] - 0s 52us/step - loss: 0.4600 - acc: 0.7841 - val_loss: 0.5060 - val_acc: 0.7480\n",
            "Epoch 477/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4596 - acc: 0.7780 - val_loss: 0.5060 - val_acc: 0.7561\n",
            "Epoch 478/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4596 - acc: 0.7780 - val_loss: 0.5059 - val_acc: 0.7480\n",
            "Epoch 479/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4593 - acc: 0.7841 - val_loss: 0.5058 - val_acc: 0.7480\n",
            "Epoch 480/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4596 - acc: 0.7862 - val_loss: 0.5058 - val_acc: 0.7480\n",
            "Epoch 481/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4589 - acc: 0.7841 - val_loss: 0.5058 - val_acc: 0.7480\n",
            "Epoch 482/1000\n",
            "491/491 [==============================] - 0s 48us/step - loss: 0.4591 - acc: 0.7902 - val_loss: 0.5056 - val_acc: 0.7480\n",
            "Epoch 483/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4588 - acc: 0.7800 - val_loss: 0.5056 - val_acc: 0.7480\n",
            "Epoch 484/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4584 - acc: 0.7862 - val_loss: 0.5055 - val_acc: 0.7480\n",
            "Epoch 485/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4586 - acc: 0.7821 - val_loss: 0.5055 - val_acc: 0.7480\n",
            "Epoch 486/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4582 - acc: 0.7821 - val_loss: 0.5055 - val_acc: 0.7480\n",
            "Epoch 487/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4578 - acc: 0.7821 - val_loss: 0.5055 - val_acc: 0.7398\n",
            "Epoch 488/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4582 - acc: 0.7841 - val_loss: 0.5054 - val_acc: 0.7398\n",
            "Epoch 489/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4574 - acc: 0.7862 - val_loss: 0.5053 - val_acc: 0.7480\n",
            "Epoch 490/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4572 - acc: 0.7800 - val_loss: 0.5052 - val_acc: 0.7480\n",
            "Epoch 491/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4574 - acc: 0.7841 - val_loss: 0.5052 - val_acc: 0.7480\n",
            "Epoch 492/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4572 - acc: 0.7862 - val_loss: 0.5052 - val_acc: 0.7480\n",
            "Epoch 493/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.4573 - acc: 0.7841 - val_loss: 0.5051 - val_acc: 0.7480\n",
            "Epoch 494/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4567 - acc: 0.7882 - val_loss: 0.5052 - val_acc: 0.7642\n",
            "Epoch 495/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4568 - acc: 0.7841 - val_loss: 0.5051 - val_acc: 0.7480\n",
            "Epoch 496/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4565 - acc: 0.7841 - val_loss: 0.5051 - val_acc: 0.7398\n",
            "Epoch 497/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4563 - acc: 0.7862 - val_loss: 0.5050 - val_acc: 0.7398\n",
            "Epoch 498/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4563 - acc: 0.7882 - val_loss: 0.5050 - val_acc: 0.7642\n",
            "Epoch 499/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4564 - acc: 0.7882 - val_loss: 0.5050 - val_acc: 0.7724\n",
            "Epoch 500/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4563 - acc: 0.7882 - val_loss: 0.5048 - val_acc: 0.7642\n",
            "Epoch 501/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4557 - acc: 0.7902 - val_loss: 0.5048 - val_acc: 0.7642\n",
            "Epoch 502/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4555 - acc: 0.7882 - val_loss: 0.5047 - val_acc: 0.7561\n",
            "Epoch 503/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4554 - acc: 0.7862 - val_loss: 0.5049 - val_acc: 0.7724\n",
            "Epoch 504/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4556 - acc: 0.7902 - val_loss: 0.5046 - val_acc: 0.7398\n",
            "Epoch 505/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4559 - acc: 0.7923 - val_loss: 0.5050 - val_acc: 0.7317\n",
            "Epoch 506/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4564 - acc: 0.7882 - val_loss: 0.5049 - val_acc: 0.7317\n",
            "Epoch 507/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4552 - acc: 0.7862 - val_loss: 0.5044 - val_acc: 0.7561\n",
            "Epoch 508/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4547 - acc: 0.7923 - val_loss: 0.5043 - val_acc: 0.7642\n",
            "Epoch 509/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4544 - acc: 0.7923 - val_loss: 0.5042 - val_acc: 0.7561\n",
            "Epoch 510/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4548 - acc: 0.7902 - val_loss: 0.5042 - val_acc: 0.7317\n",
            "Epoch 511/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4540 - acc: 0.7923 - val_loss: 0.5041 - val_acc: 0.7724\n",
            "Epoch 512/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4550 - acc: 0.7902 - val_loss: 0.5041 - val_acc: 0.7724\n",
            "Epoch 513/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4544 - acc: 0.7923 - val_loss: 0.5039 - val_acc: 0.7561\n",
            "Epoch 514/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4540 - acc: 0.7923 - val_loss: 0.5038 - val_acc: 0.7561\n",
            "Epoch 515/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4540 - acc: 0.7923 - val_loss: 0.5038 - val_acc: 0.7724\n",
            "Epoch 516/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4542 - acc: 0.7902 - val_loss: 0.5036 - val_acc: 0.7642\n",
            "Epoch 517/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4543 - acc: 0.7943 - val_loss: 0.5040 - val_acc: 0.7398\n",
            "Epoch 518/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4533 - acc: 0.7923 - val_loss: 0.5037 - val_acc: 0.7317\n",
            "Epoch 519/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4535 - acc: 0.7943 - val_loss: 0.5035 - val_acc: 0.7561\n",
            "Epoch 520/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4533 - acc: 0.7923 - val_loss: 0.5034 - val_acc: 0.7642\n",
            "Epoch 521/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4532 - acc: 0.7984 - val_loss: 0.5033 - val_acc: 0.7480\n",
            "Epoch 522/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4527 - acc: 0.7943 - val_loss: 0.5034 - val_acc: 0.7642\n",
            "Epoch 523/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4528 - acc: 0.7923 - val_loss: 0.5034 - val_acc: 0.7642\n",
            "Epoch 524/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4525 - acc: 0.7923 - val_loss: 0.5033 - val_acc: 0.7724\n",
            "Epoch 525/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4525 - acc: 0.7963 - val_loss: 0.5032 - val_acc: 0.7480\n",
            "Epoch 526/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4522 - acc: 0.7943 - val_loss: 0.5031 - val_acc: 0.7480\n",
            "Epoch 527/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4517 - acc: 0.7943 - val_loss: 0.5031 - val_acc: 0.7642\n",
            "Epoch 528/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4524 - acc: 0.7943 - val_loss: 0.5030 - val_acc: 0.7642\n",
            "Epoch 529/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4518 - acc: 0.7923 - val_loss: 0.5030 - val_acc: 0.7480\n",
            "Epoch 530/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4513 - acc: 0.7963 - val_loss: 0.5030 - val_acc: 0.7480\n",
            "Epoch 531/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4516 - acc: 0.7984 - val_loss: 0.5029 - val_acc: 0.7480\n",
            "Epoch 532/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4522 - acc: 0.7963 - val_loss: 0.5028 - val_acc: 0.7480\n",
            "Epoch 533/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4508 - acc: 0.7963 - val_loss: 0.5027 - val_acc: 0.7561\n",
            "Epoch 534/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4510 - acc: 0.7963 - val_loss: 0.5027 - val_acc: 0.7480\n",
            "Epoch 535/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4515 - acc: 0.7923 - val_loss: 0.5027 - val_acc: 0.7480\n",
            "Epoch 536/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4509 - acc: 0.7984 - val_loss: 0.5027 - val_acc: 0.7561\n",
            "Epoch 537/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4504 - acc: 0.7943 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 538/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4503 - acc: 0.7984 - val_loss: 0.5024 - val_acc: 0.7561\n",
            "Epoch 539/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4508 - acc: 0.7943 - val_loss: 0.5026 - val_acc: 0.7561\n",
            "Epoch 540/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4500 - acc: 0.7943 - val_loss: 0.5024 - val_acc: 0.7480\n",
            "Epoch 541/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4496 - acc: 0.7963 - val_loss: 0.5023 - val_acc: 0.7480\n",
            "Epoch 542/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4501 - acc: 0.7943 - val_loss: 0.5023 - val_acc: 0.7561\n",
            "Epoch 543/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4498 - acc: 0.7963 - val_loss: 0.5025 - val_acc: 0.7561\n",
            "Epoch 544/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4503 - acc: 0.7984 - val_loss: 0.5024 - val_acc: 0.7561\n",
            "Epoch 545/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4497 - acc: 0.8004 - val_loss: 0.5024 - val_acc: 0.7480\n",
            "Epoch 546/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4501 - acc: 0.7963 - val_loss: 0.5023 - val_acc: 0.7561\n",
            "Epoch 547/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4491 - acc: 0.7984 - val_loss: 0.5024 - val_acc: 0.7724\n",
            "Epoch 548/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4491 - acc: 0.7943 - val_loss: 0.5025 - val_acc: 0.7724\n",
            "Epoch 549/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4493 - acc: 0.7943 - val_loss: 0.5024 - val_acc: 0.7642\n",
            "Epoch 550/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4492 - acc: 0.7943 - val_loss: 0.5022 - val_acc: 0.7480\n",
            "Epoch 551/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4486 - acc: 0.7984 - val_loss: 0.5022 - val_acc: 0.7480\n",
            "Epoch 552/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4487 - acc: 0.7963 - val_loss: 0.5024 - val_acc: 0.7561\n",
            "Epoch 553/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4486 - acc: 0.7963 - val_loss: 0.5023 - val_acc: 0.7480\n",
            "Epoch 554/1000\n",
            "491/491 [==============================] - 0s 30us/step - loss: 0.4485 - acc: 0.7943 - val_loss: 0.5024 - val_acc: 0.7561\n",
            "Epoch 555/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.4481 - acc: 0.7963 - val_loss: 0.5023 - val_acc: 0.7480\n",
            "Epoch 556/1000\n",
            "491/491 [==============================] - 0s 30us/step - loss: 0.4485 - acc: 0.7963 - val_loss: 0.5025 - val_acc: 0.7561\n",
            "Epoch 557/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4490 - acc: 0.7984 - val_loss: 0.5027 - val_acc: 0.7724\n",
            "Epoch 558/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4482 - acc: 0.7963 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 559/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4481 - acc: 0.7963 - val_loss: 0.5026 - val_acc: 0.7642\n",
            "Epoch 560/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4477 - acc: 0.7984 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 561/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.4482 - acc: 0.7984 - val_loss: 0.5026 - val_acc: 0.7480\n",
            "Epoch 562/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4478 - acc: 0.7984 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 563/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4474 - acc: 0.7963 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 564/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4474 - acc: 0.7984 - val_loss: 0.5026 - val_acc: 0.7480\n",
            "Epoch 565/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4470 - acc: 0.7984 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 566/1000\n",
            "491/491 [==============================] - 0s 50us/step - loss: 0.4476 - acc: 0.7943 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 567/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4468 - acc: 0.7984 - val_loss: 0.5024 - val_acc: 0.7480\n",
            "Epoch 568/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4467 - acc: 0.7984 - val_loss: 0.5024 - val_acc: 0.7480\n",
            "Epoch 569/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4472 - acc: 0.8024 - val_loss: 0.5028 - val_acc: 0.7642\n",
            "Epoch 570/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4470 - acc: 0.7963 - val_loss: 0.5024 - val_acc: 0.7480\n",
            "Epoch 571/1000\n",
            "491/491 [==============================] - 0s 48us/step - loss: 0.4481 - acc: 0.7984 - val_loss: 0.5024 - val_acc: 0.7480\n",
            "Epoch 572/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4466 - acc: 0.8004 - val_loss: 0.5028 - val_acc: 0.7642\n",
            "Epoch 573/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4466 - acc: 0.7943 - val_loss: 0.5026 - val_acc: 0.7480\n",
            "Epoch 574/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4465 - acc: 0.7984 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 575/1000\n",
            "491/491 [==============================] - 0s 50us/step - loss: 0.4471 - acc: 0.7963 - val_loss: 0.5027 - val_acc: 0.7480\n",
            "Epoch 576/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4471 - acc: 0.7923 - val_loss: 0.5027 - val_acc: 0.7480\n",
            "Epoch 577/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4463 - acc: 0.7963 - val_loss: 0.5026 - val_acc: 0.7480\n",
            "Epoch 578/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4462 - acc: 0.8004 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 579/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4458 - acc: 0.8004 - val_loss: 0.5026 - val_acc: 0.7480\n",
            "Epoch 580/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4463 - acc: 0.8024 - val_loss: 0.5028 - val_acc: 0.7561\n",
            "Epoch 581/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4461 - acc: 0.7984 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 582/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4456 - acc: 0.7984 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 583/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.4462 - acc: 0.7963 - val_loss: 0.5026 - val_acc: 0.7480\n",
            "Epoch 584/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4460 - acc: 0.7963 - val_loss: 0.5029 - val_acc: 0.7724\n",
            "Epoch 585/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4455 - acc: 0.7984 - val_loss: 0.5029 - val_acc: 0.7724\n",
            "Epoch 586/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4458 - acc: 0.7943 - val_loss: 0.5026 - val_acc: 0.7480\n",
            "Epoch 587/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.4451 - acc: 0.7984 - val_loss: 0.5026 - val_acc: 0.7480\n",
            "Epoch 588/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4454 - acc: 0.8004 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 589/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4450 - acc: 0.7984 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 590/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4454 - acc: 0.7943 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 591/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4450 - acc: 0.8004 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 592/1000\n",
            "491/491 [==============================] - 0s 30us/step - loss: 0.4449 - acc: 0.7984 - val_loss: 0.5026 - val_acc: 0.7480\n",
            "Epoch 593/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4457 - acc: 0.7984 - val_loss: 0.5026 - val_acc: 0.7480\n",
            "Epoch 594/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4451 - acc: 0.8024 - val_loss: 0.5025 - val_acc: 0.7480\n",
            "Epoch 595/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4447 - acc: 0.8024 - val_loss: 0.5028 - val_acc: 0.7480\n",
            "Epoch 596/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4449 - acc: 0.8004 - val_loss: 0.5027 - val_acc: 0.7480\n",
            "Epoch 597/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4446 - acc: 0.8024 - val_loss: 0.5027 - val_acc: 0.7480\n",
            "Epoch 598/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4443 - acc: 0.8004 - val_loss: 0.5027 - val_acc: 0.7480\n",
            "Epoch 599/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4455 - acc: 0.7984 - val_loss: 0.5028 - val_acc: 0.7642\n",
            "Epoch 600/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4446 - acc: 0.8004 - val_loss: 0.5027 - val_acc: 0.7480\n",
            "Epoch 601/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4442 - acc: 0.8024 - val_loss: 0.5027 - val_acc: 0.7480\n",
            "Epoch 602/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4444 - acc: 0.7963 - val_loss: 0.5026 - val_acc: 0.7480\n",
            "Epoch 603/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4441 - acc: 0.7984 - val_loss: 0.5029 - val_acc: 0.7480\n",
            "Epoch 604/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4444 - acc: 0.7984 - val_loss: 0.5027 - val_acc: 0.7480\n",
            "Epoch 605/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4442 - acc: 0.7963 - val_loss: 0.5027 - val_acc: 0.7480\n",
            "Epoch 606/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.4443 - acc: 0.8024 - val_loss: 0.5028 - val_acc: 0.7480\n",
            "Epoch 607/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4443 - acc: 0.7984 - val_loss: 0.5029 - val_acc: 0.7480\n",
            "Epoch 608/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4433 - acc: 0.8004 - val_loss: 0.5027 - val_acc: 0.7480\n",
            "Epoch 609/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4447 - acc: 0.7943 - val_loss: 0.5030 - val_acc: 0.7480\n",
            "Epoch 610/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4434 - acc: 0.8024 - val_loss: 0.5029 - val_acc: 0.7480\n",
            "Epoch 611/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4440 - acc: 0.8004 - val_loss: 0.5027 - val_acc: 0.7398\n",
            "Epoch 612/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4440 - acc: 0.7984 - val_loss: 0.5028 - val_acc: 0.7480\n",
            "Epoch 613/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4436 - acc: 0.8024 - val_loss: 0.5028 - val_acc: 0.7480\n",
            "Epoch 614/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4443 - acc: 0.7984 - val_loss: 0.5034 - val_acc: 0.7724\n",
            "Epoch 615/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4431 - acc: 0.8004 - val_loss: 0.5028 - val_acc: 0.7480\n",
            "Epoch 616/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4430 - acc: 0.8024 - val_loss: 0.5033 - val_acc: 0.7642\n",
            "Epoch 617/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4431 - acc: 0.8004 - val_loss: 0.5031 - val_acc: 0.7480\n",
            "Epoch 618/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4430 - acc: 0.7963 - val_loss: 0.5032 - val_acc: 0.7561\n",
            "Epoch 619/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4429 - acc: 0.8024 - val_loss: 0.5032 - val_acc: 0.7561\n",
            "Epoch 620/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.4427 - acc: 0.7963 - val_loss: 0.5029 - val_acc: 0.7398\n",
            "Epoch 621/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4427 - acc: 0.8004 - val_loss: 0.5034 - val_acc: 0.7642\n",
            "Epoch 622/1000\n",
            "491/491 [==============================] - 0s 48us/step - loss: 0.4430 - acc: 0.7923 - val_loss: 0.5030 - val_acc: 0.7561\n",
            "Epoch 623/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4428 - acc: 0.8004 - val_loss: 0.5030 - val_acc: 0.7480\n",
            "Epoch 624/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4434 - acc: 0.8024 - val_loss: 0.5030 - val_acc: 0.7398\n",
            "Epoch 625/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4421 - acc: 0.8004 - val_loss: 0.5032 - val_acc: 0.7561\n",
            "Epoch 626/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4428 - acc: 0.8004 - val_loss: 0.5032 - val_acc: 0.7480\n",
            "Epoch 627/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4421 - acc: 0.8024 - val_loss: 0.5037 - val_acc: 0.7724\n",
            "Epoch 628/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4421 - acc: 0.8045 - val_loss: 0.5032 - val_acc: 0.7480\n",
            "Epoch 629/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4427 - acc: 0.8004 - val_loss: 0.5034 - val_acc: 0.7561\n",
            "Epoch 630/1000\n",
            "491/491 [==============================] - 0s 49us/step - loss: 0.4423 - acc: 0.7984 - val_loss: 0.5032 - val_acc: 0.7480\n",
            "Epoch 631/1000\n",
            "491/491 [==============================] - 0s 47us/step - loss: 0.4423 - acc: 0.8004 - val_loss: 0.5031 - val_acc: 0.7398\n",
            "Epoch 632/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4427 - acc: 0.8004 - val_loss: 0.5031 - val_acc: 0.7398\n",
            "Epoch 633/1000\n",
            "491/491 [==============================] - 0s 30us/step - loss: 0.4422 - acc: 0.8024 - val_loss: 0.5036 - val_acc: 0.7642\n",
            "Epoch 634/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.4421 - acc: 0.7984 - val_loss: 0.5033 - val_acc: 0.7480\n",
            "Epoch 635/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4416 - acc: 0.8045 - val_loss: 0.5032 - val_acc: 0.7398\n",
            "Epoch 636/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4413 - acc: 0.8024 - val_loss: 0.5032 - val_acc: 0.7398\n",
            "Epoch 637/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4414 - acc: 0.8024 - val_loss: 0.5033 - val_acc: 0.7561\n",
            "Epoch 638/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4421 - acc: 0.7984 - val_loss: 0.5032 - val_acc: 0.7561\n",
            "Epoch 639/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4417 - acc: 0.8004 - val_loss: 0.5032 - val_acc: 0.7398\n",
            "Epoch 640/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4425 - acc: 0.8004 - val_loss: 0.5035 - val_acc: 0.7642\n",
            "Epoch 641/1000\n",
            "491/491 [==============================] - 0s 50us/step - loss: 0.4414 - acc: 0.8004 - val_loss: 0.5036 - val_acc: 0.7561\n",
            "Epoch 642/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4420 - acc: 0.8004 - val_loss: 0.5042 - val_acc: 0.7724\n",
            "Epoch 643/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4417 - acc: 0.7943 - val_loss: 0.5034 - val_acc: 0.7561\n",
            "Epoch 644/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4417 - acc: 0.8004 - val_loss: 0.5033 - val_acc: 0.7398\n",
            "Epoch 645/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4415 - acc: 0.8004 - val_loss: 0.5033 - val_acc: 0.7561\n",
            "Epoch 646/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4411 - acc: 0.8004 - val_loss: 0.5032 - val_acc: 0.7480\n",
            "Epoch 647/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4408 - acc: 0.7984 - val_loss: 0.5034 - val_acc: 0.7480\n",
            "Epoch 648/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4415 - acc: 0.7984 - val_loss: 0.5032 - val_acc: 0.7398\n",
            "Epoch 649/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4412 - acc: 0.8024 - val_loss: 0.5032 - val_acc: 0.7480\n",
            "Epoch 650/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4410 - acc: 0.7984 - val_loss: 0.5032 - val_acc: 0.7398\n",
            "Epoch 651/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4421 - acc: 0.7984 - val_loss: 0.5033 - val_acc: 0.7480\n",
            "Epoch 652/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4419 - acc: 0.8004 - val_loss: 0.5034 - val_acc: 0.7480\n",
            "Epoch 653/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4410 - acc: 0.7963 - val_loss: 0.5033 - val_acc: 0.7480\n",
            "Epoch 654/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4429 - acc: 0.7923 - val_loss: 0.5034 - val_acc: 0.7480\n",
            "Epoch 655/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4413 - acc: 0.8004 - val_loss: 0.5033 - val_acc: 0.7480\n",
            "Epoch 656/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4405 - acc: 0.8024 - val_loss: 0.5035 - val_acc: 0.7480\n",
            "Epoch 657/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4404 - acc: 0.8065 - val_loss: 0.5034 - val_acc: 0.7480\n",
            "Epoch 658/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4407 - acc: 0.8024 - val_loss: 0.5042 - val_acc: 0.7724\n",
            "Epoch 659/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4411 - acc: 0.7963 - val_loss: 0.5037 - val_acc: 0.7642\n",
            "Epoch 660/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4406 - acc: 0.8024 - val_loss: 0.5033 - val_acc: 0.7398\n",
            "Epoch 661/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4400 - acc: 0.8004 - val_loss: 0.5035 - val_acc: 0.7480\n",
            "Epoch 662/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4402 - acc: 0.8024 - val_loss: 0.5034 - val_acc: 0.7480\n",
            "Epoch 663/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4403 - acc: 0.8004 - val_loss: 0.5040 - val_acc: 0.7724\n",
            "Epoch 664/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4401 - acc: 0.7984 - val_loss: 0.5037 - val_acc: 0.7642\n",
            "Epoch 665/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4409 - acc: 0.8004 - val_loss: 0.5036 - val_acc: 0.7480\n",
            "Epoch 666/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4404 - acc: 0.8004 - val_loss: 0.5035 - val_acc: 0.7480\n",
            "Epoch 667/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4397 - acc: 0.7984 - val_loss: 0.5042 - val_acc: 0.7724\n",
            "Epoch 668/1000\n",
            "491/491 [==============================] - 0s 49us/step - loss: 0.4402 - acc: 0.7963 - val_loss: 0.5035 - val_acc: 0.7480\n",
            "Epoch 669/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4407 - acc: 0.7984 - val_loss: 0.5036 - val_acc: 0.7480\n",
            "Epoch 670/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4399 - acc: 0.8024 - val_loss: 0.5040 - val_acc: 0.7642\n",
            "Epoch 671/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4400 - acc: 0.7963 - val_loss: 0.5036 - val_acc: 0.7480\n",
            "Epoch 672/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4400 - acc: 0.8024 - val_loss: 0.5045 - val_acc: 0.7724\n",
            "Epoch 673/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4400 - acc: 0.7963 - val_loss: 0.5041 - val_acc: 0.7724\n",
            "Epoch 674/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4405 - acc: 0.7984 - val_loss: 0.5034 - val_acc: 0.7480\n",
            "Epoch 675/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4393 - acc: 0.7984 - val_loss: 0.5035 - val_acc: 0.7480\n",
            "Epoch 676/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4392 - acc: 0.8004 - val_loss: 0.5037 - val_acc: 0.7480\n",
            "Epoch 677/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4394 - acc: 0.8024 - val_loss: 0.5039 - val_acc: 0.7561\n",
            "Epoch 678/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4403 - acc: 0.7984 - val_loss: 0.5036 - val_acc: 0.7480\n",
            "Epoch 679/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4394 - acc: 0.8004 - val_loss: 0.5038 - val_acc: 0.7561\n",
            "Epoch 680/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4395 - acc: 0.8004 - val_loss: 0.5037 - val_acc: 0.7480\n",
            "Epoch 681/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4397 - acc: 0.8004 - val_loss: 0.5035 - val_acc: 0.7561\n",
            "Epoch 682/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4402 - acc: 0.8024 - val_loss: 0.5036 - val_acc: 0.7480\n",
            "Epoch 683/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4394 - acc: 0.8004 - val_loss: 0.5040 - val_acc: 0.7561\n",
            "Epoch 684/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4391 - acc: 0.8004 - val_loss: 0.5042 - val_acc: 0.7561\n",
            "Epoch 685/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4399 - acc: 0.7984 - val_loss: 0.5038 - val_acc: 0.7480\n",
            "Epoch 686/1000\n",
            "491/491 [==============================] - 0s 47us/step - loss: 0.4395 - acc: 0.8004 - val_loss: 0.5036 - val_acc: 0.7480\n",
            "Epoch 687/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4392 - acc: 0.8004 - val_loss: 0.5037 - val_acc: 0.7480\n",
            "Epoch 688/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4392 - acc: 0.7984 - val_loss: 0.5036 - val_acc: 0.7480\n",
            "Epoch 689/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4395 - acc: 0.8024 - val_loss: 0.5038 - val_acc: 0.7480\n",
            "Epoch 690/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4397 - acc: 0.7984 - val_loss: 0.5036 - val_acc: 0.7561\n",
            "Epoch 691/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4388 - acc: 0.8004 - val_loss: 0.5037 - val_acc: 0.7480\n",
            "Epoch 692/1000\n",
            "491/491 [==============================] - 0s 47us/step - loss: 0.4387 - acc: 0.7943 - val_loss: 0.5039 - val_acc: 0.7480\n",
            "Epoch 693/1000\n",
            "491/491 [==============================] - 0s 50us/step - loss: 0.4394 - acc: 0.8004 - val_loss: 0.5039 - val_acc: 0.7480\n",
            "Epoch 694/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4394 - acc: 0.8004 - val_loss: 0.5040 - val_acc: 0.7561\n",
            "Epoch 695/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4387 - acc: 0.7984 - val_loss: 0.5040 - val_acc: 0.7480\n",
            "Epoch 696/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4387 - acc: 0.7984 - val_loss: 0.5041 - val_acc: 0.7561\n",
            "Epoch 697/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4381 - acc: 0.7984 - val_loss: 0.5038 - val_acc: 0.7480\n",
            "Epoch 698/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4389 - acc: 0.8004 - val_loss: 0.5037 - val_acc: 0.7561\n",
            "Epoch 699/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.4384 - acc: 0.7984 - val_loss: 0.5040 - val_acc: 0.7561\n",
            "Epoch 700/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4387 - acc: 0.7984 - val_loss: 0.5039 - val_acc: 0.7480\n",
            "Epoch 701/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4382 - acc: 0.8004 - val_loss: 0.5038 - val_acc: 0.7561\n",
            "Epoch 702/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4383 - acc: 0.8024 - val_loss: 0.5040 - val_acc: 0.7480\n",
            "Epoch 703/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4386 - acc: 0.7984 - val_loss: 0.5042 - val_acc: 0.7561\n",
            "Epoch 704/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4389 - acc: 0.8004 - val_loss: 0.5040 - val_acc: 0.7480\n",
            "Epoch 705/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4381 - acc: 0.7984 - val_loss: 0.5042 - val_acc: 0.7561\n",
            "Epoch 706/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4380 - acc: 0.7984 - val_loss: 0.5042 - val_acc: 0.7561\n",
            "Epoch 707/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4377 - acc: 0.8024 - val_loss: 0.5043 - val_acc: 0.7561\n",
            "Epoch 708/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4380 - acc: 0.7984 - val_loss: 0.5046 - val_acc: 0.7642\n",
            "Epoch 709/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4378 - acc: 0.7963 - val_loss: 0.5040 - val_acc: 0.7480\n",
            "Epoch 710/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4394 - acc: 0.8024 - val_loss: 0.5041 - val_acc: 0.7561\n",
            "Epoch 711/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4378 - acc: 0.8004 - val_loss: 0.5045 - val_acc: 0.7561\n",
            "Epoch 712/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4384 - acc: 0.7963 - val_loss: 0.5044 - val_acc: 0.7561\n",
            "Epoch 713/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4382 - acc: 0.7943 - val_loss: 0.5040 - val_acc: 0.7561\n",
            "Epoch 714/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4376 - acc: 0.8004 - val_loss: 0.5041 - val_acc: 0.7480\n",
            "Epoch 715/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4376 - acc: 0.8004 - val_loss: 0.5045 - val_acc: 0.7561\n",
            "Epoch 716/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4378 - acc: 0.8004 - val_loss: 0.5042 - val_acc: 0.7480\n",
            "Epoch 717/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4371 - acc: 0.7963 - val_loss: 0.5041 - val_acc: 0.7561\n",
            "Epoch 718/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4370 - acc: 0.8024 - val_loss: 0.5042 - val_acc: 0.7480\n",
            "Epoch 719/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4370 - acc: 0.7984 - val_loss: 0.5041 - val_acc: 0.7561\n",
            "Epoch 720/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4371 - acc: 0.8024 - val_loss: 0.5046 - val_acc: 0.7642\n",
            "Epoch 721/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4374 - acc: 0.7963 - val_loss: 0.5041 - val_acc: 0.7480\n",
            "Epoch 722/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4372 - acc: 0.7984 - val_loss: 0.5042 - val_acc: 0.7480\n",
            "Epoch 723/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4376 - acc: 0.7963 - val_loss: 0.5042 - val_acc: 0.7480\n",
            "Epoch 724/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4368 - acc: 0.8004 - val_loss: 0.5044 - val_acc: 0.7561\n",
            "Epoch 725/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4369 - acc: 0.7984 - val_loss: 0.5043 - val_acc: 0.7480\n",
            "Epoch 726/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4375 - acc: 0.8024 - val_loss: 0.5042 - val_acc: 0.7561\n",
            "Epoch 727/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4394 - acc: 0.7984 - val_loss: 0.5043 - val_acc: 0.7480\n",
            "Epoch 728/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4375 - acc: 0.8024 - val_loss: 0.5051 - val_acc: 0.7642\n",
            "Epoch 729/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4368 - acc: 0.7963 - val_loss: 0.5044 - val_acc: 0.7480\n",
            "Epoch 730/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4366 - acc: 0.7984 - val_loss: 0.5044 - val_acc: 0.7561\n",
            "Epoch 731/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4372 - acc: 0.8004 - val_loss: 0.5043 - val_acc: 0.7561\n",
            "Epoch 732/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4369 - acc: 0.8004 - val_loss: 0.5042 - val_acc: 0.7561\n",
            "Epoch 733/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4376 - acc: 0.8004 - val_loss: 0.5045 - val_acc: 0.7642\n",
            "Epoch 734/1000\n",
            "491/491 [==============================] - 0s 52us/step - loss: 0.4373 - acc: 0.7984 - val_loss: 0.5044 - val_acc: 0.7561\n",
            "Epoch 735/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4368 - acc: 0.7963 - val_loss: 0.5048 - val_acc: 0.7642\n",
            "Epoch 736/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4375 - acc: 0.8024 - val_loss: 0.5044 - val_acc: 0.7561\n",
            "Epoch 737/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4369 - acc: 0.8024 - val_loss: 0.5044 - val_acc: 0.7480\n",
            "Epoch 738/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4370 - acc: 0.7963 - val_loss: 0.5051 - val_acc: 0.7642\n",
            "Epoch 739/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4368 - acc: 0.7963 - val_loss: 0.5044 - val_acc: 0.7642\n",
            "Epoch 740/1000\n",
            "491/491 [==============================] - 0s 47us/step - loss: 0.4365 - acc: 0.8004 - val_loss: 0.5046 - val_acc: 0.7561\n",
            "Epoch 741/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4364 - acc: 0.7984 - val_loss: 0.5044 - val_acc: 0.7561\n",
            "Epoch 742/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4362 - acc: 0.8004 - val_loss: 0.5044 - val_acc: 0.7480\n",
            "Epoch 743/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4365 - acc: 0.7984 - val_loss: 0.5043 - val_acc: 0.7561\n",
            "Epoch 744/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4362 - acc: 0.8004 - val_loss: 0.5044 - val_acc: 0.7561\n",
            "Epoch 745/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4366 - acc: 0.7963 - val_loss: 0.5049 - val_acc: 0.7561\n",
            "Epoch 746/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4362 - acc: 0.8004 - val_loss: 0.5045 - val_acc: 0.7480\n",
            "Epoch 747/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4364 - acc: 0.7984 - val_loss: 0.5044 - val_acc: 0.7561\n",
            "Epoch 748/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4363 - acc: 0.7984 - val_loss: 0.5045 - val_acc: 0.7480\n",
            "Epoch 749/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4362 - acc: 0.7984 - val_loss: 0.5045 - val_acc: 0.7642\n",
            "Epoch 750/1000\n",
            "491/491 [==============================] - 0s 47us/step - loss: 0.4367 - acc: 0.8004 - val_loss: 0.5044 - val_acc: 0.7561\n",
            "Epoch 751/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4360 - acc: 0.7984 - val_loss: 0.5045 - val_acc: 0.7642\n",
            "Epoch 752/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4364 - acc: 0.7963 - val_loss: 0.5046 - val_acc: 0.7642\n",
            "Epoch 753/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4362 - acc: 0.7984 - val_loss: 0.5045 - val_acc: 0.7642\n",
            "Epoch 754/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4375 - acc: 0.7963 - val_loss: 0.5046 - val_acc: 0.7480\n",
            "Epoch 755/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4359 - acc: 0.8004 - val_loss: 0.5046 - val_acc: 0.7642\n",
            "Epoch 756/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4358 - acc: 0.8004 - val_loss: 0.5045 - val_acc: 0.7561\n",
            "Epoch 757/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4355 - acc: 0.8004 - val_loss: 0.5048 - val_acc: 0.7561\n",
            "Epoch 758/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4358 - acc: 0.7984 - val_loss: 0.5053 - val_acc: 0.7642\n",
            "Epoch 759/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4361 - acc: 0.7963 - val_loss: 0.5049 - val_acc: 0.7561\n",
            "Epoch 760/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4360 - acc: 0.8004 - val_loss: 0.5047 - val_acc: 0.7480\n",
            "Epoch 761/1000\n",
            "491/491 [==============================] - 0s 78us/step - loss: 0.4358 - acc: 0.7984 - val_loss: 0.5049 - val_acc: 0.7642\n",
            "Epoch 762/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4367 - acc: 0.8004 - val_loss: 0.5047 - val_acc: 0.7642\n",
            "Epoch 763/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4360 - acc: 0.7984 - val_loss: 0.5047 - val_acc: 0.7561\n",
            "Epoch 764/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4358 - acc: 0.7984 - val_loss: 0.5047 - val_acc: 0.7480\n",
            "Epoch 765/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4356 - acc: 0.7984 - val_loss: 0.5046 - val_acc: 0.7561\n",
            "Epoch 766/1000\n",
            "491/491 [==============================] - 0s 55us/step - loss: 0.4354 - acc: 0.8004 - val_loss: 0.5049 - val_acc: 0.7561\n",
            "Epoch 767/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4363 - acc: 0.8004 - val_loss: 0.5056 - val_acc: 0.7642\n",
            "Epoch 768/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.4358 - acc: 0.7963 - val_loss: 0.5046 - val_acc: 0.7642\n",
            "Epoch 769/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4363 - acc: 0.7984 - val_loss: 0.5047 - val_acc: 0.7561\n",
            "Epoch 770/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4355 - acc: 0.7963 - val_loss: 0.5049 - val_acc: 0.7642\n",
            "Epoch 771/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4355 - acc: 0.8004 - val_loss: 0.5046 - val_acc: 0.7642\n",
            "Epoch 772/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4355 - acc: 0.8004 - val_loss: 0.5047 - val_acc: 0.7480\n",
            "Epoch 773/1000\n",
            "491/491 [==============================] - 0s 49us/step - loss: 0.4354 - acc: 0.7984 - val_loss: 0.5047 - val_acc: 0.7480\n",
            "Epoch 774/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4349 - acc: 0.8004 - val_loss: 0.5049 - val_acc: 0.7561\n",
            "Epoch 775/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4351 - acc: 0.7984 - val_loss: 0.5048 - val_acc: 0.7480\n",
            "Epoch 776/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4366 - acc: 0.7984 - val_loss: 0.5048 - val_acc: 0.7642\n",
            "Epoch 777/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4353 - acc: 0.7984 - val_loss: 0.5048 - val_acc: 0.7642\n",
            "Epoch 778/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4352 - acc: 0.7963 - val_loss: 0.5054 - val_acc: 0.7642\n",
            "Epoch 779/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4348 - acc: 0.7963 - val_loss: 0.5052 - val_acc: 0.7561\n",
            "Epoch 780/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4348 - acc: 0.7984 - val_loss: 0.5049 - val_acc: 0.7561\n",
            "Epoch 781/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4353 - acc: 0.8004 - val_loss: 0.5055 - val_acc: 0.7642\n",
            "Epoch 782/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4357 - acc: 0.7943 - val_loss: 0.5049 - val_acc: 0.7480\n",
            "Epoch 783/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4348 - acc: 0.7984 - val_loss: 0.5050 - val_acc: 0.7480\n",
            "Epoch 784/1000\n",
            "491/491 [==============================] - 0s 47us/step - loss: 0.4348 - acc: 0.8004 - val_loss: 0.5049 - val_acc: 0.7480\n",
            "Epoch 785/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4357 - acc: 0.8004 - val_loss: 0.5051 - val_acc: 0.7561\n",
            "Epoch 786/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4350 - acc: 0.7984 - val_loss: 0.5049 - val_acc: 0.7480\n",
            "Epoch 787/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4353 - acc: 0.7963 - val_loss: 0.5054 - val_acc: 0.7642\n",
            "Epoch 788/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4346 - acc: 0.8004 - val_loss: 0.5053 - val_acc: 0.7561\n",
            "Epoch 789/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4346 - acc: 0.7984 - val_loss: 0.5051 - val_acc: 0.7480\n",
            "Epoch 790/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4349 - acc: 0.7984 - val_loss: 0.5052 - val_acc: 0.7561\n",
            "Epoch 791/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4353 - acc: 0.7963 - val_loss: 0.5051 - val_acc: 0.7480\n",
            "Epoch 792/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4348 - acc: 0.8004 - val_loss: 0.5055 - val_acc: 0.7561\n",
            "Epoch 793/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4360 - acc: 0.7984 - val_loss: 0.5062 - val_acc: 0.7642\n",
            "Epoch 794/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4358 - acc: 0.7984 - val_loss: 0.5051 - val_acc: 0.7480\n",
            "Epoch 795/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4348 - acc: 0.7984 - val_loss: 0.5050 - val_acc: 0.7480\n",
            "Epoch 796/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4343 - acc: 0.7943 - val_loss: 0.5053 - val_acc: 0.7561\n",
            "Epoch 797/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4342 - acc: 0.7984 - val_loss: 0.5052 - val_acc: 0.7480\n",
            "Epoch 798/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4349 - acc: 0.8024 - val_loss: 0.5049 - val_acc: 0.7561\n",
            "Epoch 799/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.4344 - acc: 0.7963 - val_loss: 0.5049 - val_acc: 0.7642\n",
            "Epoch 800/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4347 - acc: 0.7943 - val_loss: 0.5049 - val_acc: 0.7642\n",
            "Epoch 801/1000\n",
            "491/491 [==============================] - 0s 32us/step - loss: 0.4345 - acc: 0.8004 - val_loss: 0.5049 - val_acc: 0.7642\n",
            "Epoch 802/1000\n",
            "491/491 [==============================] - 0s 48us/step - loss: 0.4347 - acc: 0.7984 - val_loss: 0.5052 - val_acc: 0.7480\n",
            "Epoch 803/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4345 - acc: 0.7984 - val_loss: 0.5049 - val_acc: 0.7642\n",
            "Epoch 804/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4347 - acc: 0.7963 - val_loss: 0.5060 - val_acc: 0.7642\n",
            "Epoch 805/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4344 - acc: 0.7963 - val_loss: 0.5054 - val_acc: 0.7480\n",
            "Epoch 806/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4349 - acc: 0.7984 - val_loss: 0.5057 - val_acc: 0.7642\n",
            "Epoch 807/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4345 - acc: 0.8004 - val_loss: 0.5051 - val_acc: 0.7480\n",
            "Epoch 808/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4342 - acc: 0.7984 - val_loss: 0.5053 - val_acc: 0.7480\n",
            "Epoch 809/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.4340 - acc: 0.8004 - val_loss: 0.5053 - val_acc: 0.7480\n",
            "Epoch 810/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4350 - acc: 0.7984 - val_loss: 0.5049 - val_acc: 0.7561\n",
            "Epoch 811/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4342 - acc: 0.7984 - val_loss: 0.5049 - val_acc: 0.7642\n",
            "Epoch 812/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4340 - acc: 0.7984 - val_loss: 0.5049 - val_acc: 0.7642\n",
            "Epoch 813/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4336 - acc: 0.8004 - val_loss: 0.5050 - val_acc: 0.7642\n",
            "Epoch 814/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4341 - acc: 0.7984 - val_loss: 0.5051 - val_acc: 0.7480\n",
            "Epoch 815/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4339 - acc: 0.8004 - val_loss: 0.5051 - val_acc: 0.7642\n",
            "Epoch 816/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4344 - acc: 0.7963 - val_loss: 0.5051 - val_acc: 0.7642\n",
            "Epoch 817/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4340 - acc: 0.8004 - val_loss: 0.5051 - val_acc: 0.7642\n",
            "Epoch 818/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4338 - acc: 0.8004 - val_loss: 0.5051 - val_acc: 0.7561\n",
            "Epoch 819/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4340 - acc: 0.7984 - val_loss: 0.5051 - val_acc: 0.7561\n",
            "Epoch 820/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4342 - acc: 0.7943 - val_loss: 0.5050 - val_acc: 0.7642\n",
            "Epoch 821/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4338 - acc: 0.7984 - val_loss: 0.5057 - val_acc: 0.7724\n",
            "Epoch 822/1000\n",
            "491/491 [==============================] - 0s 50us/step - loss: 0.4351 - acc: 0.7923 - val_loss: 0.5051 - val_acc: 0.7642\n",
            "Epoch 823/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4337 - acc: 0.8004 - val_loss: 0.5054 - val_acc: 0.7480\n",
            "Epoch 824/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4345 - acc: 0.7963 - val_loss: 0.5052 - val_acc: 0.7480\n",
            "Epoch 825/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4334 - acc: 0.8004 - val_loss: 0.5055 - val_acc: 0.7480\n",
            "Epoch 826/1000\n",
            "491/491 [==============================] - 0s 51us/step - loss: 0.4339 - acc: 0.7984 - val_loss: 0.5051 - val_acc: 0.7480\n",
            "Epoch 827/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4339 - acc: 0.7963 - val_loss: 0.5052 - val_acc: 0.7561\n",
            "Epoch 828/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4340 - acc: 0.7943 - val_loss: 0.5053 - val_acc: 0.7724\n",
            "Epoch 829/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4339 - acc: 0.7943 - val_loss: 0.5052 - val_acc: 0.7561\n",
            "Epoch 830/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4336 - acc: 0.7963 - val_loss: 0.5058 - val_acc: 0.7561\n",
            "Epoch 831/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4334 - acc: 0.7963 - val_loss: 0.5060 - val_acc: 0.7642\n",
            "Epoch 832/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4346 - acc: 0.8004 - val_loss: 0.5053 - val_acc: 0.7480\n",
            "Epoch 833/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4335 - acc: 0.7963 - val_loss: 0.5052 - val_acc: 0.7561\n",
            "Epoch 834/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4339 - acc: 0.7963 - val_loss: 0.5052 - val_acc: 0.7561\n",
            "Epoch 835/1000\n",
            "491/491 [==============================] - 0s 51us/step - loss: 0.4338 - acc: 0.7963 - val_loss: 0.5052 - val_acc: 0.7724\n",
            "Epoch 836/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4338 - acc: 0.7963 - val_loss: 0.5054 - val_acc: 0.7642\n",
            "Epoch 837/1000\n",
            "491/491 [==============================] - 0s 49us/step - loss: 0.4337 - acc: 0.7984 - val_loss: 0.5055 - val_acc: 0.7480\n",
            "Epoch 838/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4333 - acc: 0.7943 - val_loss: 0.5053 - val_acc: 0.7642\n",
            "Epoch 839/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4342 - acc: 0.7984 - val_loss: 0.5054 - val_acc: 0.7480\n",
            "Epoch 840/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4333 - acc: 0.7984 - val_loss: 0.5057 - val_acc: 0.7480\n",
            "Epoch 841/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4341 - acc: 0.7963 - val_loss: 0.5056 - val_acc: 0.7480\n",
            "Epoch 842/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.4340 - acc: 0.7984 - val_loss: 0.5054 - val_acc: 0.7561\n",
            "Epoch 843/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4334 - acc: 0.8004 - val_loss: 0.5052 - val_acc: 0.7561\n",
            "Epoch 844/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4327 - acc: 0.8004 - val_loss: 0.5052 - val_acc: 0.7561\n",
            "Epoch 845/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4340 - acc: 0.7984 - val_loss: 0.5053 - val_acc: 0.7561\n",
            "Epoch 846/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4341 - acc: 0.7963 - val_loss: 0.5053 - val_acc: 0.7480\n",
            "Epoch 847/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4328 - acc: 0.8004 - val_loss: 0.5051 - val_acc: 0.7561\n",
            "Epoch 848/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4330 - acc: 0.8004 - val_loss: 0.5051 - val_acc: 0.7561\n",
            "Epoch 849/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4336 - acc: 0.7984 - val_loss: 0.5052 - val_acc: 0.7561\n",
            "Epoch 850/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4335 - acc: 0.7984 - val_loss: 0.5051 - val_acc: 0.7642\n",
            "Epoch 851/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4333 - acc: 0.7963 - val_loss: 0.5051 - val_acc: 0.7642\n",
            "Epoch 852/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4331 - acc: 0.7943 - val_loss: 0.5052 - val_acc: 0.7561\n",
            "Epoch 853/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4333 - acc: 0.7984 - val_loss: 0.5054 - val_acc: 0.7480\n",
            "Epoch 854/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4329 - acc: 0.7943 - val_loss: 0.5053 - val_acc: 0.7561\n",
            "Epoch 855/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4328 - acc: 0.7963 - val_loss: 0.5056 - val_acc: 0.7480\n",
            "Epoch 856/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4330 - acc: 0.7943 - val_loss: 0.5052 - val_acc: 0.7642\n",
            "Epoch 857/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4330 - acc: 0.7902 - val_loss: 0.5056 - val_acc: 0.7480\n",
            "Epoch 858/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4344 - acc: 0.8004 - val_loss: 0.5061 - val_acc: 0.7561\n",
            "Epoch 859/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4324 - acc: 0.7963 - val_loss: 0.5054 - val_acc: 0.7480\n",
            "Epoch 860/1000\n",
            "491/491 [==============================] - 0s 31us/step - loss: 0.4323 - acc: 0.8004 - val_loss: 0.5053 - val_acc: 0.7561\n",
            "Epoch 861/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4329 - acc: 0.7984 - val_loss: 0.5059 - val_acc: 0.7480\n",
            "Epoch 862/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4332 - acc: 0.7963 - val_loss: 0.5077 - val_acc: 0.7642\n",
            "Epoch 863/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4336 - acc: 0.7943 - val_loss: 0.5062 - val_acc: 0.7561\n",
            "Epoch 864/1000\n",
            "491/491 [==============================] - 0s 58us/step - loss: 0.4339 - acc: 0.7984 - val_loss: 0.5057 - val_acc: 0.7480\n",
            "Epoch 865/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4328 - acc: 0.7943 - val_loss: 0.5054 - val_acc: 0.7480\n",
            "Epoch 866/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4321 - acc: 0.7984 - val_loss: 0.5055 - val_acc: 0.7480\n",
            "Epoch 867/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4321 - acc: 0.8004 - val_loss: 0.5053 - val_acc: 0.7561\n",
            "Epoch 868/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4331 - acc: 0.7963 - val_loss: 0.5053 - val_acc: 0.7561\n",
            "Epoch 869/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4325 - acc: 0.7984 - val_loss: 0.5055 - val_acc: 0.7480\n",
            "Epoch 870/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4330 - acc: 0.8004 - val_loss: 0.5053 - val_acc: 0.7724\n",
            "Epoch 871/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4320 - acc: 0.7984 - val_loss: 0.5054 - val_acc: 0.7480\n",
            "Epoch 872/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4321 - acc: 0.8004 - val_loss: 0.5054 - val_acc: 0.7642\n",
            "Epoch 873/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4322 - acc: 0.7984 - val_loss: 0.5057 - val_acc: 0.7480\n",
            "Epoch 874/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4324 - acc: 0.7963 - val_loss: 0.5054 - val_acc: 0.7724\n",
            "Epoch 875/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4327 - acc: 0.8004 - val_loss: 0.5056 - val_acc: 0.7642\n",
            "Epoch 876/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4335 - acc: 0.7963 - val_loss: 0.5053 - val_acc: 0.7724\n",
            "Epoch 877/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4323 - acc: 0.8004 - val_loss: 0.5054 - val_acc: 0.7642\n",
            "Epoch 878/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4328 - acc: 0.7963 - val_loss: 0.5053 - val_acc: 0.7724\n",
            "Epoch 879/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4333 - acc: 0.7984 - val_loss: 0.5056 - val_acc: 0.7480\n",
            "Epoch 880/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4319 - acc: 0.8004 - val_loss: 0.5056 - val_acc: 0.7480\n",
            "Epoch 881/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4331 - acc: 0.7943 - val_loss: 0.5056 - val_acc: 0.7480\n",
            "Epoch 882/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4325 - acc: 0.7963 - val_loss: 0.5054 - val_acc: 0.7642\n",
            "Epoch 883/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4328 - acc: 0.7984 - val_loss: 0.5053 - val_acc: 0.7724\n",
            "Epoch 884/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4319 - acc: 0.7984 - val_loss: 0.5055 - val_acc: 0.7642\n",
            "Epoch 885/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4320 - acc: 0.8004 - val_loss: 0.5054 - val_acc: 0.7724\n",
            "Epoch 886/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4326 - acc: 0.7963 - val_loss: 0.5055 - val_acc: 0.7642\n",
            "Epoch 887/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4318 - acc: 0.7984 - val_loss: 0.5057 - val_acc: 0.7480\n",
            "Epoch 888/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4328 - acc: 0.8004 - val_loss: 0.5063 - val_acc: 0.7561\n",
            "Epoch 889/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4329 - acc: 0.7984 - val_loss: 0.5056 - val_acc: 0.7480\n",
            "Epoch 890/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4314 - acc: 0.7984 - val_loss: 0.5056 - val_acc: 0.7480\n",
            "Epoch 891/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4322 - acc: 0.8004 - val_loss: 0.5054 - val_acc: 0.7724\n",
            "Epoch 892/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4321 - acc: 0.7984 - val_loss: 0.5055 - val_acc: 0.7480\n",
            "Epoch 893/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4317 - acc: 0.7984 - val_loss: 0.5055 - val_acc: 0.7561\n",
            "Epoch 894/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4320 - acc: 0.7984 - val_loss: 0.5054 - val_acc: 0.7724\n",
            "Epoch 895/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4321 - acc: 0.7963 - val_loss: 0.5055 - val_acc: 0.7561\n",
            "Epoch 896/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4313 - acc: 0.7963 - val_loss: 0.5056 - val_acc: 0.7480\n",
            "Epoch 897/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4314 - acc: 0.8004 - val_loss: 0.5053 - val_acc: 0.7642\n",
            "Epoch 898/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4330 - acc: 0.7963 - val_loss: 0.5053 - val_acc: 0.7724\n",
            "Epoch 899/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4321 - acc: 0.7984 - val_loss: 0.5053 - val_acc: 0.7724\n",
            "Epoch 900/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4319 - acc: 0.7984 - val_loss: 0.5054 - val_acc: 0.7480\n",
            "Epoch 901/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4324 - acc: 0.7984 - val_loss: 0.5054 - val_acc: 0.7724\n",
            "Epoch 902/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4325 - acc: 0.7943 - val_loss: 0.5055 - val_acc: 0.7642\n",
            "Epoch 903/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4319 - acc: 0.7963 - val_loss: 0.5055 - val_acc: 0.7724\n",
            "Epoch 904/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4325 - acc: 0.7923 - val_loss: 0.5053 - val_acc: 0.7724\n",
            "Epoch 905/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4314 - acc: 0.7984 - val_loss: 0.5056 - val_acc: 0.7480\n",
            "Epoch 906/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4319 - acc: 0.7984 - val_loss: 0.5056 - val_acc: 0.7480\n",
            "Epoch 907/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4317 - acc: 0.7943 - val_loss: 0.5054 - val_acc: 0.7642\n",
            "Epoch 908/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4315 - acc: 0.7984 - val_loss: 0.5054 - val_acc: 0.7642\n",
            "Epoch 909/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4313 - acc: 0.7984 - val_loss: 0.5059 - val_acc: 0.7480\n",
            "Epoch 910/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4318 - acc: 0.7984 - val_loss: 0.5057 - val_acc: 0.7480\n",
            "Epoch 911/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4316 - acc: 0.7963 - val_loss: 0.5058 - val_acc: 0.7480\n",
            "Epoch 912/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4311 - acc: 0.7963 - val_loss: 0.5054 - val_acc: 0.7642\n",
            "Epoch 913/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4315 - acc: 0.7984 - val_loss: 0.5053 - val_acc: 0.7724\n",
            "Epoch 914/1000\n",
            "491/491 [==============================] - 0s 33us/step - loss: 0.4323 - acc: 0.7963 - val_loss: 0.5057 - val_acc: 0.7480\n",
            "Epoch 915/1000\n",
            "491/491 [==============================] - 0s 49us/step - loss: 0.4314 - acc: 0.7984 - val_loss: 0.5057 - val_acc: 0.7480\n",
            "Epoch 916/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4319 - acc: 0.7984 - val_loss: 0.5060 - val_acc: 0.7561\n",
            "Epoch 917/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4314 - acc: 0.7943 - val_loss: 0.5053 - val_acc: 0.7724\n",
            "Epoch 918/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4311 - acc: 0.7943 - val_loss: 0.5056 - val_acc: 0.7480\n",
            "Epoch 919/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4315 - acc: 0.7963 - val_loss: 0.5054 - val_acc: 0.7724\n",
            "Epoch 920/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4318 - acc: 0.7963 - val_loss: 0.5053 - val_acc: 0.7642\n",
            "Epoch 921/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4313 - acc: 0.7963 - val_loss: 0.5057 - val_acc: 0.7480\n",
            "Epoch 922/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4316 - acc: 0.7963 - val_loss: 0.5053 - val_acc: 0.7724\n",
            "Epoch 923/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4310 - acc: 0.7984 - val_loss: 0.5054 - val_acc: 0.7724\n",
            "Epoch 924/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4311 - acc: 0.7963 - val_loss: 0.5061 - val_acc: 0.7561\n",
            "Epoch 925/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4311 - acc: 0.7963 - val_loss: 0.5055 - val_acc: 0.7724\n",
            "Epoch 926/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4315 - acc: 0.7963 - val_loss: 0.5056 - val_acc: 0.7724\n",
            "Epoch 927/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4314 - acc: 0.7943 - val_loss: 0.5056 - val_acc: 0.7642\n",
            "Epoch 928/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.4308 - acc: 0.7943 - val_loss: 0.5059 - val_acc: 0.7480\n",
            "Epoch 929/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4316 - acc: 0.7984 - val_loss: 0.5055 - val_acc: 0.7724\n",
            "Epoch 930/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4311 - acc: 0.7963 - val_loss: 0.5057 - val_acc: 0.7561\n",
            "Epoch 931/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4321 - acc: 0.7943 - val_loss: 0.5055 - val_acc: 0.7642\n",
            "Epoch 932/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4311 - acc: 0.7984 - val_loss: 0.5055 - val_acc: 0.7642\n",
            "Epoch 933/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4308 - acc: 0.7943 - val_loss: 0.5057 - val_acc: 0.7561\n",
            "Epoch 934/1000\n",
            "491/491 [==============================] - 0s 43us/step - loss: 0.4311 - acc: 0.7963 - val_loss: 0.5069 - val_acc: 0.7642\n",
            "Epoch 935/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4312 - acc: 0.7943 - val_loss: 0.5056 - val_acc: 0.7724\n",
            "Epoch 936/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.4310 - acc: 0.7984 - val_loss: 0.5063 - val_acc: 0.7561\n",
            "Epoch 937/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4306 - acc: 0.8004 - val_loss: 0.5054 - val_acc: 0.7642\n",
            "Epoch 938/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4312 - acc: 0.7984 - val_loss: 0.5054 - val_acc: 0.7642\n",
            "Epoch 939/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4319 - acc: 0.7963 - val_loss: 0.5058 - val_acc: 0.7561\n",
            "Epoch 940/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4301 - acc: 0.7984 - val_loss: 0.5056 - val_acc: 0.7724\n",
            "Epoch 941/1000\n",
            "491/491 [==============================] - 0s 48us/step - loss: 0.4307 - acc: 0.8004 - val_loss: 0.5054 - val_acc: 0.7642\n",
            "Epoch 942/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4307 - acc: 0.7963 - val_loss: 0.5055 - val_acc: 0.7724\n",
            "Epoch 943/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4323 - acc: 0.7943 - val_loss: 0.5062 - val_acc: 0.7561\n",
            "Epoch 944/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4308 - acc: 0.8024 - val_loss: 0.5052 - val_acc: 0.7642\n",
            "Epoch 945/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4305 - acc: 0.7902 - val_loss: 0.5053 - val_acc: 0.7642\n",
            "Epoch 946/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.4310 - acc: 0.7984 - val_loss: 0.5053 - val_acc: 0.7642\n",
            "Epoch 947/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.4309 - acc: 0.7963 - val_loss: 0.5053 - val_acc: 0.7642\n",
            "Epoch 948/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4309 - acc: 0.7963 - val_loss: 0.5054 - val_acc: 0.7724\n",
            "Epoch 949/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4310 - acc: 0.7984 - val_loss: 0.5055 - val_acc: 0.7642\n",
            "Epoch 950/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4312 - acc: 0.7943 - val_loss: 0.5064 - val_acc: 0.7561\n",
            "Epoch 951/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4305 - acc: 0.8004 - val_loss: 0.5053 - val_acc: 0.7642\n",
            "Epoch 952/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4317 - acc: 0.7943 - val_loss: 0.5054 - val_acc: 0.7642\n",
            "Epoch 953/1000\n",
            "491/491 [==============================] - 0s 55us/step - loss: 0.4305 - acc: 0.7943 - val_loss: 0.5056 - val_acc: 0.7561\n",
            "Epoch 954/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4305 - acc: 0.7943 - val_loss: 0.5056 - val_acc: 0.7561\n",
            "Epoch 955/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4311 - acc: 0.7923 - val_loss: 0.5057 - val_acc: 0.7561\n",
            "Epoch 956/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4309 - acc: 0.7984 - val_loss: 0.5054 - val_acc: 0.7642\n",
            "Epoch 957/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4298 - acc: 0.7963 - val_loss: 0.5054 - val_acc: 0.7642\n",
            "Epoch 958/1000\n",
            "491/491 [==============================] - 0s 40us/step - loss: 0.4313 - acc: 0.7963 - val_loss: 0.5053 - val_acc: 0.7642\n",
            "Epoch 959/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4315 - acc: 0.7943 - val_loss: 0.5053 - val_acc: 0.7642\n",
            "Epoch 960/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4300 - acc: 0.7963 - val_loss: 0.5053 - val_acc: 0.7642\n",
            "Epoch 961/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4303 - acc: 0.7943 - val_loss: 0.5056 - val_acc: 0.7561\n",
            "Epoch 962/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4298 - acc: 0.7984 - val_loss: 0.5056 - val_acc: 0.7724\n",
            "Epoch 963/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4305 - acc: 0.8024 - val_loss: 0.5058 - val_acc: 0.7724\n",
            "Epoch 964/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4306 - acc: 0.7943 - val_loss: 0.5055 - val_acc: 0.7642\n",
            "Epoch 965/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4305 - acc: 0.7963 - val_loss: 0.5055 - val_acc: 0.7642\n",
            "Epoch 966/1000\n",
            "491/491 [==============================] - 0s 47us/step - loss: 0.4307 - acc: 0.7963 - val_loss: 0.5055 - val_acc: 0.7642\n",
            "Epoch 967/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4306 - acc: 0.7963 - val_loss: 0.5055 - val_acc: 0.7642\n",
            "Epoch 968/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4307 - acc: 0.7984 - val_loss: 0.5055 - val_acc: 0.7642\n",
            "Epoch 969/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4305 - acc: 0.8024 - val_loss: 0.5055 - val_acc: 0.7642\n",
            "Epoch 970/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4323 - acc: 0.7963 - val_loss: 0.5067 - val_acc: 0.7561\n",
            "Epoch 971/1000\n",
            "491/491 [==============================] - 0s 46us/step - loss: 0.4301 - acc: 0.7963 - val_loss: 0.5061 - val_acc: 0.7561\n",
            "Epoch 972/1000\n",
            "491/491 [==============================] - 0s 49us/step - loss: 0.4301 - acc: 0.7963 - val_loss: 0.5061 - val_acc: 0.7561\n",
            "Epoch 973/1000\n",
            "491/491 [==============================] - 0s 47us/step - loss: 0.4310 - acc: 0.7963 - val_loss: 0.5055 - val_acc: 0.7642\n",
            "Epoch 974/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4310 - acc: 0.7984 - val_loss: 0.5058 - val_acc: 0.7561\n",
            "Epoch 975/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4295 - acc: 0.7963 - val_loss: 0.5057 - val_acc: 0.7642\n",
            "Epoch 976/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4299 - acc: 0.7963 - val_loss: 0.5066 - val_acc: 0.7642\n",
            "Epoch 977/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4302 - acc: 0.7963 - val_loss: 0.5056 - val_acc: 0.7642\n",
            "Epoch 978/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4295 - acc: 0.7963 - val_loss: 0.5057 - val_acc: 0.7642\n",
            "Epoch 979/1000\n",
            "491/491 [==============================] - 0s 42us/step - loss: 0.4302 - acc: 0.7943 - val_loss: 0.5058 - val_acc: 0.7642\n",
            "Epoch 980/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4302 - acc: 0.7923 - val_loss: 0.5065 - val_acc: 0.7561\n",
            "Epoch 981/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4305 - acc: 0.7943 - val_loss: 0.5057 - val_acc: 0.7642\n",
            "Epoch 982/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4306 - acc: 0.7963 - val_loss: 0.5058 - val_acc: 0.7642\n",
            "Epoch 983/1000\n",
            "491/491 [==============================] - 0s 49us/step - loss: 0.4295 - acc: 0.7943 - val_loss: 0.5057 - val_acc: 0.7642\n",
            "Epoch 984/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4295 - acc: 0.8004 - val_loss: 0.5059 - val_acc: 0.7561\n",
            "Epoch 985/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4295 - acc: 0.7963 - val_loss: 0.5071 - val_acc: 0.7642\n",
            "Epoch 986/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4296 - acc: 0.7984 - val_loss: 0.5057 - val_acc: 0.7642\n",
            "Epoch 987/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4297 - acc: 0.7943 - val_loss: 0.5057 - val_acc: 0.7642\n",
            "Epoch 988/1000\n",
            "491/491 [==============================] - 0s 44us/step - loss: 0.4302 - acc: 0.7923 - val_loss: 0.5058 - val_acc: 0.7642\n",
            "Epoch 989/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4289 - acc: 0.7963 - val_loss: 0.5059 - val_acc: 0.7642\n",
            "Epoch 990/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4294 - acc: 0.7923 - val_loss: 0.5061 - val_acc: 0.7480\n",
            "Epoch 991/1000\n",
            "491/491 [==============================] - 0s 37us/step - loss: 0.4297 - acc: 0.7963 - val_loss: 0.5058 - val_acc: 0.7642\n",
            "Epoch 992/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4311 - acc: 0.7943 - val_loss: 0.5064 - val_acc: 0.7561\n",
            "Epoch 993/1000\n",
            "491/491 [==============================] - 0s 34us/step - loss: 0.4297 - acc: 0.7984 - val_loss: 0.5065 - val_acc: 0.7561\n",
            "Epoch 994/1000\n",
            "491/491 [==============================] - 0s 38us/step - loss: 0.4296 - acc: 0.7943 - val_loss: 0.5069 - val_acc: 0.7642\n",
            "Epoch 995/1000\n",
            "491/491 [==============================] - 0s 53us/step - loss: 0.4291 - acc: 0.7984 - val_loss: 0.5061 - val_acc: 0.7480\n",
            "Epoch 996/1000\n",
            "491/491 [==============================] - 0s 39us/step - loss: 0.4297 - acc: 0.7943 - val_loss: 0.5056 - val_acc: 0.7642\n",
            "Epoch 997/1000\n",
            "491/491 [==============================] - 0s 35us/step - loss: 0.4297 - acc: 0.7963 - val_loss: 0.5057 - val_acc: 0.7642\n",
            "Epoch 998/1000\n",
            "491/491 [==============================] - 0s 36us/step - loss: 0.4296 - acc: 0.8024 - val_loss: 0.5064 - val_acc: 0.7642\n",
            "Epoch 999/1000\n",
            "491/491 [==============================] - 0s 41us/step - loss: 0.4296 - acc: 0.7984 - val_loss: 0.5055 - val_acc: 0.7642\n",
            "Epoch 1000/1000\n",
            "491/491 [==============================] - 0s 45us/step - loss: 0.4304 - acc: 0.7943 - val_loss: 0.5056 - val_acc: 0.7642\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "5LFv7pLtTdgy",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 325
        },
        "outputId": "f3ad3279-328c-4a98-b376-0ba5add8d0d3"
      },
      "source": [
        "#visualize the training loss and the validation loss to see if the model is overfitting\n",
        "plt.plot(hist.history['loss'])\n",
        "plt.plot(hist.history['val_loss'])\n",
        "plt.title('Model loss')\n",
        "plt.ylabel('Loss')\n",
        "plt.xlabel('Epoch')\n",
        "plt.legend(['Train', 'Val'], loc='upper right')\n",
        "plt.show()"
      ],
      "execution_count": 17,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAb8AAAE0CAYAAAC8ZD1pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzs3XlcFPX/wPHX7C67C8gtLIp444nk\njUeeWVp5Zfq1LPP4VVpqZZqpHVpZWXRpWlpm5VlmZnl0Gh5lHmmamil5oyKIXArLsrvz+4OCJkBB\njuV4Px8PHrqf+czsez4c752Zz6EkJyerCCGEEFWIztUBCCGEEGVNkp8QQogqR5KfEEKIKkeSnxBC\niCpHkp8QQogqR5KfEEKIKkeSnxAVzKlTp/D19eWhhx4q0+Ns27YNX19fXn755WK9rxDlgSQ/Ia7B\n19cXX19f/Pz8OHHiRIH1Bg4cmFN38eLFZRihEKKoJPkJUQgGgwFVVVmyZEm+20+ePMmWLVswGAxl\nHJkQ4npI8hOiEPz9/WnXrh0rVqzAbrfn2b506VJUVaVPnz4uiE4IUVSS/IQopPvuu48LFy7w9ddf\na8rtdjvLly+nTZs2NG/evMD9T548ycMPP0yzZs0IDAwkLCyMkSNHcvDgwXzrp6WlMX36dJo1a4bF\nYqFdu3bMmzcPVS14RkKr1crbb79Nt27dCAkJoWbNmnTv3p3Fixdfdb/iKsq52Ww2Fi5cSLdu3ahX\nrx7BwcGEh4czePBgvvrqK03dgwcPcv/99xMREYHFYqF+/fp06tSJSZMmkZKSUmrnIyo/uUcjRCEN\nGjSI6dOns2TJEvr165dT/u233xIXF8f06dM5e/Zsvvvu27ePAQMGkJqayi233ELz5s05ceIE69at\n45tvvmHFihX07Nkzp35mZiYDBgxg7969NGvWjCFDhpCamsprr73Gzz//nO97pKWlMXDgQPbs2UNE\nRATDhg0DYNOmTTz++OPs3r2bd999twRb5PrO7eGHH2b16tU0adKEIUOG4Onpyfnz59m7dy/r16+n\nf//+QHbi69WrF4qi0Lt3b+rVq8fly5c5ffo0K1asYNy4cfj4+JT4+YiqQZKfEIXk6enJ4MGD+fjj\njzlz5gyhoaEALFmyhGrVqjFo0CDefvvtPPupqsrYsWNJSUnhnXfeyUlKAJs3b+aOO+7gwQcf5Pff\nf8fDwwOAefPmsXfvXm677TaWLVuGTpd9k2bixIl079493/imT5/Onj17mDlzJo899lhOeWZmJsOH\nD2flypX079+fW2+9taSapMjnlpKSwueff07Lli354Ycf8jwjTUxMzPn/ypUrsVqtLFu2jL59+2rq\npaWlYTQaS+w8RNUjtz2FKIIRI0bgdDpZtmwZAGfPnuWHH37gzjvvpFq1avnus3PnTv78809at26t\nSQ4A3bt3p2/fvly8eJGNGzfmlC9fvhxFUXjuuedyEh9A7dq1GTNmTJ73SEpKYuXKlURERGgSH4DJ\nZOLZZ58F4NNPP72+Ey9AUc9NURRUVcVoNKLX6/McLyAgIE+Zu7t7njIvLy9MJlMJnYWoiuTKT4gi\naNmyJRERESxfvpwpU6awdOlSHA4HI0aMKHCf/fv3A9C1a9d8t3fv3p1169axf/9+Bg8eTFpaGseP\nHyc4OJiwsLA89Tt37pynbM+ePdjtdnQ6Xb7j8P7ppHP06NFCnWdhFfXcvL296dOnD9988w2dO3em\nb9++dOzYkXbt2uX58DBo0CAWLFjAPffcQ//+/enatSvt27enUaNGJXoOomqS5CdEEY0YMYJJkybx\n7bffsmzZMsLDw2ndunWB9VNTUwEICgrKd7vFYgHI6cDxT/3AwMB86+d3nEuXLgHZz9/27dtXYCyX\nL18ucNv1KOq5AXz44YfMnTuX1atX8+qrrwLg5uZGnz59mDVrFnXq1AGgTZs2fPPNN7z++uusX7+e\nVatWAdlXv4899hijR48u0XMRVYvc9hSiiIYMGYKHhwdPPPEEsbGxjBw58qr1vb29AYiPj893+4UL\nFzT1/vk3ISEh3/r5HeeffR588EGSk5ML/Pr999+vfYJFUNRzg+zbmE8++SS7d+/m8OHDLF68mF69\nerFu3ToGDx5MVlZWTt127drxySefcPLkSX744QeeeuoprFYrjz/+OCtXrizRcxFViyQ/IYrI29ub\nO+64g7Nnz+Lh4cGQIUOuWv+GG24AsqcHy8+WLVuA7FuqkP08q379+ly4cIG//vorT/38enu2bdsW\nnU7HL7/8UqRzKa6intt/1ahRg0GDBrFy5Urat29PTEwMf/75Z556RqORtm3b8sQTT7BgwQIA1q9f\nXxKnIKooSX5CXIfp06ezbNkyVq9efc3u9pGRkTRu3Jg9e/bk6XCyZcsW1q1bR0BAALfddltO+T33\n3IOqqjz77LM4nc6c8tOnT7Nw4cI871G9enWGDh3KgQMHePnll/MdiH/27NkSf+ZX1HO7ePFivmP/\nMjMzc26N/tPjdefOnWRkZOSp+8/V5D/1hLge8sxPiOsQEhJCSEhIoeoqisK7777LwIEDGTt2LF98\n8UXOWLivvvoKo9HIggULNH/Mx48fz4YNG9i4cSNdunShV69epKam8sUXX9CxY8c8A+0BXn31VY4f\nP84rr7zCp59+SqdOnbBYLDlXkLt37+bFF18s0Q4jRT23c+fO0bVrV5o1a0bz5s0JCQnhypUr/Pjj\njxw7doz+/fvToEEDAObMmcPWrVvp2LEjderUwcvLi7/++otvv/0Wd3f3Yk/sLao2SX5ClIHWrVuz\nefNmoqKi2Lx5M5s2bcLHx4fbb7+dSZMmERERoalvMplYu3Yts2fP5osvvmDBggXUrl2bSZMm0a9f\nv3yTn5eXF+vXr2fp0qV89tlnrF+/HqvVSmBgIHXq1GHGjBnccccdLj232rVrM336dLZt28bPP//M\nxYsX8fHxoX79+jz66KOa4RL3338/fn5+7Nmzh507d5KVlUWNGjW46667GD9+vPT6FMWiJCcnl96c\nR0IIIUQ5JM/8hBBCVDmS/IQQQlQ5kvyEEEJUOZL8hBBCVDmS/IQQQlQ5kvyEEEJUOZL8hBBCVDmS\n/EpATEyMq0MoV6Q98pI20ZL20JL2yKu020SSnxBCiCpHkp8QQogqR5KfEEKIKkeSnxBCiCpHVnUQ\nQogyYLfbuXLlSr7bzGZzznqGIlth2sTT0xOD4frSmCQ/IYQoZXa7nbS0NHx9fVEUJc92k8mE2Wx2\nQWTl17XaRFVVkpOT8fLyuq4EKLc9hRCilF25cqXAxCeuj6Io+Pr6Fng1fS1y5Vcctkz0h38jNHoD\n5ow0rNPecnVEQohyShJfyStOm0ryu162TDwfG4xyJQ33v4uU86dRa9R2aVhCCCGuTW57Xi+jCUfd\nRpoiw96fXRSMEEKIopDkd53S7U4WebTUlCm7NrsmGCGEqGBGjx7Nfffd57L3l9ue18nDoGO5TyvG\n/qvMePIIWedOodas47K4hBCiJPj6+l51+913382777573cd/8803UVX1uvcvLkl+xRAeVovoX5vR\nI/mPnDLjhhVkPjDNhVEJIUTxHTlyJOf/3377LY888oimrKBhCFlZWbi5uV3z+D4+PsUPshjktmcx\n9K/jzuIa3TVlhp+/Q3f6L9cEJIQQJcRiseR8/ZOo/lt29OhRfH19Wbt2LbfddhsWi4WVK1cSHx/P\nqFGjaNq0KTVq1KBjx46sWrVKc/z/3vbs1asX06ZN45lnnqFu3bq0aNGC5557rtSuDuXKrxg6BxsZ\nX7czB099RXh6LACKqmJaOoeMaW+BTu/iCIUQ5Znvh2fL9P2SR4WUynFnzpzJrFmzCA8Px2QykZGR\nQdu2bZk4cSLe3t58//33PPTQQ4SGhtKxY8cCj7N8+XImTJjApk2b2LFjBxMmTKB169b069evxGOW\nK79i0CkKgxpW46n6QzXl+qMHMK56D1x4P1sIIcrKuHHj6Nu3L3Xr1qVGjRrUqVOHcePGERERQd26\ndXnggQfo3bs3a9asuepxIiIieOKJJ2jQoAF33nknkZGRbN26tVRiluRXTKOaePJt9ZZ87X+Dptz4\n9acYVy2UBCiEqPRatWqleW2325k9ezadOnWibt26hISE8N1333HmzJmrHqd58+aa18HBwSQkJJR4\nvCDJr9hqVzMwINjB/zUZwwU3b80248ZPMK6YLwlQCFGpeXh4aF6/9tprLFq0iIkTJ7Ju3Tq2bdvG\nzTffTFZW1lWP89+OMoqi4HQ6SzxekGd+JeL+2llEJ/nRv8UTfPP7y/jZ03O2Gb9bDQ47tnsfAZ18\n1hBC5PrnGZzVaq1UE1vv2LGDvn37MmTIEACcTifHjh0jNDTUxZHlkr/GJaC6ERZ08WOPd31uvuEp\nEg3VNNuNm9ZiWhwFDruLIhRCiLLTsGFDNm3axK5duzhy5AiPPfYYcXFxrg5LQ5JfCbkl1Mwj4dXY\n51WXXi2fIsHNS7PdbdvXmOc8DZkZLopQCCHKxrRp02jevDl33HEHffv2JTAwkP79+7s6LA0lOTlZ\nHkgVU0xMDGFhYWQ5Ve7fcokvT1ppdiWW7/a9RHCWdjFGR/2mZEx8GbyvPntCRfZPe4hc0iZaVa09\nUlJSrjqou7Ld9iwJhW2Ta7VtQVx+5bdo0SIiIiKwWCx069aN7du3X7W+zWbjxRdfJCIigqCgIMLD\nw1mwYEHO9uXLl+Pr65vny2q1lvap4KZT+KCbP71DzfzhWYserZ7hhDlQU0d//DAes8ajxJ8r9XiE\nEELkz6XJb82aNUydOpVJkyaxdetW2rdvz5AhQ67aHXb06NFs2rSJOXPmsHv3bj766KM83WM9PDw4\ncuSI5qusPlUZdApLe/hzZz13Yjxq0KXVDH6rpp3rU3chFvdZ49CdPFomMQkhhNByafKbP38+w4YN\nY8SIETRu3JioqCgsFguLFy/Ot/6PP/7I1q1b+eyzz+jRowd16tShbdu2dOnSRVNPURTNNDwWi6Us\nTieHUa/wfjc/xjT1JM7kR8+WT7PJV5ugdSlJuL/0CPpfS2cApxBCiIK5LPnZbDb27dtHz549NeU9\ne/Zk586d+e6zYcMGWrVqxfz582nWrBmtW7dmypQpXL58WVMvIyOD8PBwmjVrxtChQ9m/f3+pnUdB\ndIrC7EgfZrbx5rLBg74RU1gR1ElTR8m04v72sxjXLIZSGssihBAiL5eN80tMTMThcBAYqH0mFhgY\nSHx8fL77nDx5kh07dmAymViyZAkpKSlMmTKFuLg4lixZAkBYWBjz5s0jPDycy5cvs2DBAvr06cNP\nP/1EgwYNCownJiamWOdT0P63u0NyPQNvnTAyoulDxBl9eTx2o6aO8cslZPyxn5MD/w+nyT3f41Q0\nxW3PykjaRKsqtYfZbMZkMl21Tln0S6hoCtMmqamp+eaMa3WoqlCD3J1OJ4qi8P777+f07omKimLQ\noEHEx8cTFBRE+/btad++fc4+kZGRdOnShYULF/Lqq68WeOzi9Dy7Vs+1GQ1VrphSeP/PK0xpeA/H\n3C289dcS3FRHTh2fmP20WPYaGY+9iBpcfgaCXo+q1pOvMKRNtKpae6SkpFy134H09syrsG3i7e19\nXYPnXXbbMyAgAL1en2fetoSEBIKCgvLdx2KxUKNGDU231kaNGgEQGxub7z56vZ6WLVty/PjxEoq8\n6BRF4eVIH3qHZn8jF4b04pYbphH/n+nQdOdP4/HcWPT7d7giTCGEqDJclvyMRiMtW7YkOjpaUx4d\nHU1kZGS++3To0IG4uDjNM75jx44BFJj5VVXl0KFDZd7p5b8MOoUlPfy5Nyx7Drxtvk1p32YWe6rV\n1dRT0q9gfnMabl8tleeAQghRSlza23PcuHGsWLGCJUuWcOTIEZ588kni4uIYNWoUAGPGjGHMmDE5\n9QcPHoy/vz/jxo3j8OHD7Nixg6lTpzJgwICcZ4ezZ89m06ZNnDx5kt9//53x48dz6NAhRo8e7ZJz\n/DeTXuHtzr6MaeoJQKw5gG6tZrA8qLOmnqKqmD7/APO8GZCRnt+hhBBCFINLn/kNGjSIS5cuERUV\nxYULF2jatCmrVq2idu3aQN5bmdWqVWPt2rVMmTKFnj174uvry+23386MGTNy6qSkpPDoo48SHx+P\nt7c3ERERbNy4kTZt2pTpuRVEURRe6eBLE183Hv8lGas+uyPMb151eeXYCvTkTrhj2LMNj/OnyXjk\nBdQatV0YtRBCXJ8lS5bw9NNPc/r0aVeHoiHTm5WA6314/+XJDB7alkS6Pftb0DPpICsOvU11u3bo\nhuruiXXMUzhadcrvMOVOVevMUBjSJlpVrT0q4vRmd911F+np6Xz11Vd5th05coTIyEjWrFmTZ7ja\nf11v8qv005tVZQPquvPd7YHUqaYH4Ee/cCLbzGLvf58DZlzB/a3puH3xkTwHFEKUieHDh7Nt2zZO\nnTqVZ9vSpUsJDQ2le/fuZR9YCZHk52Lh/m782C+QDkFGAE65B9K11QyWWTrnqWta+1H2yhDpl/Ns\nE0KIktS7d2+CgoJYvny5pjwrK4tPP/2Ue++9F51Ox9NPP02bNm0IDg4mIiKCmTNnkpmZ6aKoC69C\njfOrrALMer7sU50JPyWx6ngGVr2RkU0e4lev+rz213IM5F7tGfZtx+OFcdnjAS21XBi1EKK4qo3o\nnv1vGb3f5Y83F7quwWDg7rvvZsWKFUydOhXd34txf/311yQmJnLPPfcA4OXlxTvvvENwcDB//vkn\nEydOxGw2M3Xq1NI4hRIjV37lhEmvsLCrH9Nb/b0OoKIwr1Yfeuc3HvDcKTyeexj94d9cEKkQoqoY\nPnw4sbGxbN68Oads2bJl9OzZk1q1sj98P/nkk0RGRlKnTh169+7NY489xueff+6iiAtPkl85oigK\nU1p680E3P0zZjwHZ4teM9m1msdurvrbulVTMUZMxbPrSBZEKIaqCBg0a0LlzZ5YtWwbA+fPn2bRp\nE8OHD8+ps2bNGnr37k2jRo0ICQnhmWeeKXDSkfJEkl85dGd9D9bcUh1vowJkjwfs0fIZVgZ11NRT\nHA7MS97EuOQtsNtdEaoQopIbPnw4GzZsICkpiRUrVuDn58dtt90GwC+//MIDDzzAzTffzCeffMLW\nrVuZPn06NpvNxVFfmzzzK6c6B5vY3C+IO7+7yIk0B1a9keFNx3HQM5QXT6zS1DVuWovu3Cms42dC\ntaJ3+RVCuMY/z+DK41CHfwwYMIApU6bw6aefsmzZMu666y7c3NwA2LlzJ6GhoUyePDmnfnkbz1cQ\nufIrx+p7G/ju9kDaVM/+QUNReKXOAAY1n8hlvXaGeMPh3/B47iGUc3m7JQshxPVyd3dnyJAhzJ49\nmxMnTmhueTZo0IDY2FhWr17NiRMneO+99/jiiy9cGG3hSfIr5wLd9ay7tTq31c79VPhVYFu6tJrJ\naXN1TV1d/Dk8nn9YJsYWQpSo4cOHk5ycTGRkJI0bN84p79evHw8//DBPPvkkXbp04aeffmLatGku\njLTwZIaXElAWs1U4nCrTd6Ww8PCVnLLqtlRWH3qLG1OOaOqqioJt6Fiy+vwPFKVU48pPVZu9ozCk\nTbSqWntUxBleXE1meBEA6HXZK8NPvsErp+yi0Zubb5jOB8HdNXUVVcX0ybuYFkdJRxghhMiHJL8K\nRFEUnm7tzSuRPuj+vqDL0hkY0/h+Hm9wL05F++1027oR8xtTIeNKPkcTQoiqS5JfBTSmWTWW9PDH\n/PdYQBSFuaG30rfFE6S5eWjqGg79ivuLj6Bcii/7QIUQopyS5FdB9a3jztre1fF2y32m951/BJGt\nniPWU7twr/7MMdyffxjd6b/KOkwhhCiXJPlVYB0sJlbfEkCQe+638ahHTdrdMIMDfg01dXVJF3F/\n8RH0B3aXdZhCCFHuSPKr4NoHmfju9kDCfHLnK0gw+tApfDobg9pq6irWdMxvPIlh68ayDlOIKk9V\npWN9SStOm0ryqwTqehn4oW8gHS3GnLIMvYmBTR9lQWhvTV3F6cT8wasYP/8A5JdRiDLh6elJcnKy\nJMASpKoqycnJeHp6Xtf+Mr1ZJeFj1LH65gBGbb7Ed7HZa2k5FR3jG9xHjDGQ144tRyH3F8/41VKU\ni3Fk/t8UMLi5KmwhqgSDwYCXlxepqan5bk9NTcXb2zvfbVVVYdrEy8sLg+H60pgkv0rE003HJ70C\neHZ3KvMO5S54Oyf0Vk6Zq7Pyz3dwc+ROOOu2/XuU5ESsj84Cs0d+hxRClBCDwVDgYOz4+HhCQ0PL\nOKLyrbTbRG57VjI6ReGFdtqxgABrA9vRPWI6aWbtJynDH3txj3oCrqSVcaRCCOE6kvwqIUVRGNOs\nGou7+eesCwiw0yeMthEziPOuoamv/+sQ7q9MgrTkMo5UCCFcQ5JfJTawnjuf3BSAuz73EvCYRzA3\nhM/geHXtUAj9qaO4v/wYSnJiWYcphBBlTpJfJdcjxMzG26prxgImGr1o2+RJjlqaaOrqz57E/aVH\nURJlNhghROUmya8KaFXdyMZbq1PDI/fbnWrwoG2jJzhUs4Wmru5CLO4vTUCJP1fWYQohRJmR5FdF\nNPRxY32fQE0CTNebiWzwOPtqtdbU1V28kD0fqCyMK4SopCT5VSENfAys7xNIzX8lQKveSMf6j7Kz\nbgdNXV3yRdxffgxd7PGyDlMIIUqdJL8qpoGPgfW3BhLikdsNNEtnoGvth9lUt6umri41CfOrk+UW\nqBCi0pHkVwXV9zaw/tbq1PLMTYAOnZ4+dR7gm4a9NHV1KZdwf3Wy9AIVQlQqkvyqqHr5JEBV0dE3\nZCQ/tbhNU1eXcA5z1GS4nP/UTEIIUdFI8qvC6noZWNs7QFuoKPTwv5s/mvfQFOtjT+D+xlTIzCjD\nCIUQonRI8qviGvq4sfHW6poyVdHRtvpIzjVurynXH/sD8/znwGEvyxCFEKLESfITdAo2Ed0vUDMT\njE0x0KLGQyTX144DNOzfgemjN2Q5JCFEheby5Ldo0SIiIiKwWCx069aN7du3X7W+zWbjxRdfJCIi\ngqCgIMLDw1mwYIGmzpdffklkZCRBQUFERkaybt260jyFSqFVdSOLu/tpJsNOwUj3RhPJDNVOhea2\ndSPGLz4s4wiFEKLkuDT5rVmzhqlTpzJp0iS2bt1K+/btGTJkCGfOnClwn9GjR7Np0ybmzJnD7t27\n+eijj2jevHnO9l27djF69GiGDBnCtm3bGDJkCCNHjuTXX38ti1Oq0G6t7c6sdtolVw5aTQxuOQVH\n9WBNufHLJRg2ry/L8IQQosS4NPnNnz+fYcOGMWLECBo3bkxUVBQWi4XFixfnW//HH39k69atfPbZ\nZ/To0YM6derQtm1bunTpklPn3XffpUuXLkyePJnGjRszefJkbrzxRt59992yOq0K7eHm1RjVWLu2\n39dpnkzr/jTOatrlkEwfv4H+wK6yDE8IIUqEy5KfzWZj37599OzZU1Pes2dPdu7cme8+GzZsoFWr\nVsyfP59mzZrRunVrpkyZwuXLuQu37t69O88xb7rppgKPKfJ6JdKXbjVMmrI3LvqxbMAzqMbccsXp\nxDz/OXSxJ8o6RCGEKBaXJb/ExEQcDgeBgYGa8sDAQOLj819V4OTJk+zYsYODBw+yZMkSoqKi2LRp\nEw8//HBOnQsXLhTpmCIvo17hox7+1PfSa8rvP1eTnUOeRFVyHwwqGVcwvzkVJeVSWYcphBDXzeDq\nAIrC6XSiKArvv/8+Pj7Zz6aioqIYNGgQ8fHxBAUFXfexY2JiihVbcfcvj14JUxi138xlR3ayc6rQ\nO64RP3UdQostq3Lq6S5egFcm8dfwJ1ANbkDlbI/ikjbRkvbQkvbIqzhtEhYWdtXtLkt+AQEB6PV6\nEhISNOUJCQkFJjGLxUKNGjVyEh9Ao0aNAIiNjSUoKAiLxVKkY/7jWg11NTExMcXav7wKAz4KsPK/\nHxJx/j2y4YpD4V6/QezobsV981c5dT3PnqD59vVk/t+UStsexSFtoiXtoSXtkVdpt4nLbnsajUZa\ntmxJdHS0pjw6OprIyMh89+nQoQNxcXGaZ3zHjh0DIDQ0FIB27doV6Zji6nrVMvN8W21Hl0PJDkbW\nvhd7eFtNudvWjdIDVAhRIbi0t+e4ceNYsWIFS5Ys4ciRIzz55JPExcUxatQoAMaMGcOYMWNy6g8e\nPBh/f3/GjRvH4cOH2bFjB1OnTmXAgAE5z/nGjh3L1q1befPNNzl69ChvvPEG27Zt46GHHnLJOVYG\n45pX48567pqyz09nMevGSTiDQzXlpqVz8DgnHWCEEOWbS5PfoEGDePnll4mKiqJLly7s2LGDVatW\nUbt2bSD7VmZsbGxO/WrVqrF27VpSU1Pp2bMno0aNonPnzsybNy+nTmRkJIsXL2bFihV07tyZTz75\nhMWLF9O2bds87y8KR1EU3uzkm6cDzEtHnOy77xlUozm3rj2LeqsXQFpyWYcphBCFpiQnJ8s8VcVU\nVe7X/5WSRecv48l05JbV89KzM/gg/u/P0tS1N2+LdfIroNMjqs7PSGFJe2hJe+RVaZ/5iYqnoY8b\nUR18NWUn0hyMzmyF7ZbBmnLDoV8xrpEp0IQQ5ZMkP1Ek9zXyzDMDzPrTVt4KvwdHI+0k2MZ1y9Dv\n/akswxNCiEKR5CeK7OX2vtwQ4KYpm7Evnd/veQqnj7+m3PzeyyhxsQghRHkiyU8Umdmg8HEPf7yN\nuTO9ZDnhgd91XH54Juq/nvMpGVcwv/2MLIIrhChXJPmJ61LXy8DcTn6asn2JWcxIq8PZXtrnf/rY\nE5g+iJI1AIUQ5YYkP3HdBtZzZ2Bd7fi/+Ycu82Pjm8mK1E4u7rbzR9w2rizL8IQQokCS/ESxvN7R\nh1qeubc5nSrMjDFx6b5JOGrV09Q1frYI/aE9ZR2iEELkIclPFEuAWc/cztrhD6etOp49kIX1kVmo\nnl455YrqxPTuCyiXZIUNIYRrSfITxdYzxMzoxp6asg/+vMKqy35YH3pWswSSLi0Z87yZYM8q4yiF\nECKXJD9RIl5o551n+rNJvyRzpn5rbANHasr1x/7A+MmCMoxOCCG0JPmJEuHppmNhV3/c/vUTlZql\nMnlHMrZ+92KP0K6qYfz+cwz+uj0PAAAgAElEQVQ7NpVxlEIIkU2Snygx7YKMvNDOR1O28bSVr87Y\nsI6ZjrO6RbPNtDgK5ezJMoxQCCGySfITJeqBJp608HJoyh7elsRxpyfW8c/lrPQOoGRacX/7WchI\nL+swhRBVnCQ/UaL0OoWnGto0tz+v2FV6b0ggs05jMu99RFNfd/40psUyAF4IUbYk+YkS18BT5ZnW\n2tXfE6xOPj+egb17X7Ju7K3Z5rYrGrfvPy/LEIUQVZwkP1EqxodX0wx+B5jxawpJNpXM+ybiCG2g\n2Wb85F10Rw+UZYhCiCpMkp8oFTpFYVE37dyfFzKcTNmRDCYz1gnPobrnjg1UHA7M859DSU0q61CF\nEFWQJD9RajpYTDxxg5em7LPjGXzyVzqqpRbWB6dptumSL2J653lw2MsyTCFEFSTJT5SqJ27wItxf\nu/bfpF+SOXfFgaP1jdhuu1uzzXD4N1kBXghR6iT5iVJl1Cu828UP4396f878NQUA2+D/w9HkBu0+\n65ej3/tzWYYphKhiJPmJUtfC3y3P4PdVxzPYfM4KegPWh57F6Rug2W5+/yWUC2fLMkwhRBUiyU+U\nif9r4klzP4OmbPxPyWQ6VFTfAKzjZqDqcn8clfQrmOc9C7bMsg5VCFEFlFjyU1WV9HSZqUPkz6BT\neLWDdumj2CsOFvxxGQBnowhsQx/SbNefPoZpyVtlFqMQouoocvJbv349zz//vKbs7bffJiQkhFq1\najFs2DBJgiJfnYNN3FlPu/L7C3tS2ZtgAyCr92Ds7bpptrtt+xrDlg1lFqMQomoocvJ76623iIuL\ny3m9b98+ZsyYQZs2bRg5ciTff/89c+bMKdEgReXxQjsffI256/vZVZjwcxJ2pwqKgvX/puCsEarZ\nx7T0LXQnj5Z1qEKISqzIye/YsWNERETkvP7ss8/w9/dn9erVvPHGG4waNYo1a9aUaJCi8qjpqeft\nG7WD3w8l2Xl1f1r2C3dPrOOfRzWac7YrWVmY5z6DknC+LEMVQlRiRU5+VqsVDw+PnNc//vgjN910\nEyaTCYAWLVpw9qz00hMF61fHnf/V197+fG1/GvsuZt/+dNaqR+aoSZrtusQLmN+aDla5pS6EKL4i\nJ7+QkBB+++03IPsq8M8//6Rnz5452y9duoTZbC5odyEAmNXehyD33B8/pwoPbk0i05G9uoO9083Y\nbhqo2UcfewLz+7NlBQghRLEVOfkNHTqUjz/+mLvuuos777wTPz8/+vTpk7N97969NGzYsESDFJVP\nkLuetztrb38eTbHz3uHLOa9tw8Zjb9tVU8fw61bc1i0rkxiFEJVXkZPf448/zuOPP865c+eoVasW\ny5Ytw8cnewBzUlIS27dv59Zbby3xQEXl0zvUzNAG2tufUfvTSLT+vRiuwYD1wWl5V4BYsxj9vu1l\nFaYQohIqcvLT6/U8/fTTbN26lfXr19OpU6ecbX5+fsTExDBx4sQSDVJUXi+298HbLbf3Z6pN5ald\nKbkVTO5YH52F6pm7PqCiqpgXvIgSd6YsQxVCVCIlNsh9165dfP/991y5cqWkDimqgOpmPZP/s/LD\nJ8cy+O6MNee1Glgj7wwwGVcwv/0sZGaUWaxCiMqjyMkvKiqKwYMHa8ruvvtu+vTpw9ChQ2nfvj2n\nT58usQBF5TemWTWa+WqnPpu4PZkUmzPntaN5G2x3/WcGmNgTmOc/B04nQghRFEVOfqtXr6Zx48Y5\nr7/++mu++eYbHn30URYtWoTNZuPVV18t9PEWLVpEREQEFouFbt26sX17wc9ytm3bhq+vb56vo0dz\nB0AvX7483zpWq7XA4wrXMukV5nfxQ5d795Oz6Q5m7E7R1Mu6ZTBZHW7SlBn278C0VCZVEEIUjeHa\nVbTOnTtHWFhYzuuvvvqKBg0aMGPGDABiYmJYtqxwvfHWrFnD1KlTef311+nQoQOLFi1iyJAh7Nix\ng9DQ0AL327FjB35+uT0Fq1evrtnu4eGRMxzjHzL8onxrVd3II+HVeOtAbm/Pj46mM6i+B11rZI8h\nRVHIHDUJ3Zlj6M+ezKnn9uOXOGo3xN6jXxlHLYSoqIp85acoCg6HI+f1li1buOmm3E/jNWvWJCEh\noVDHmj9/PsOGDWPEiBE0btyYqKgoLBYLixcvvup+gYGBWCyWnC+9Xp8nxn9vt1gsRThD4SpPtvQm\nzEf7eezJHcnZU5/9w+yB9dFZOL20k2Sbls5B99ehsghTCFEJFDn5NWzYkA0bsica/uGHH4iLi+Pm\nm2/O2X727Fl8fX0L2j2HzWZj3759mgHyAD179mTnzp1X3bd79+40btyY/v37s3Xr1jzbMzIyCA8P\np1mzZgwdOpT9+/cX5tSEi7kbFN7urP3ZOZxs591DlzVlqqUW1okvoxpyV4hXHHbMb89ASYwvk1iF\nEBVbkZPfhAkT2Lx5M3Xq1OHuu++mSZMmdO/ePWf7li1bNHN/FiQxMRGHw0FgYKCmPDAwkPj4/P+A\nBQcH88Ybb7B06VKWLl1KWFgYAwYM0DwnDAsLY968eaxYsYJFixZhMpno06cPx44dK+qpChfoYDFx\n13/G/r30Wxon0+yaMmeDpljHPKUp0yVfxPz6FLiSVupxCiEqNiU5ObnIc0Vt3ryZ7777Dm9vb+6/\n//6cZ25JSUlMmDCBoUOH0q/f1Z+/nD9/nqZNm7JhwwY6d+6cU/7KK6/w2Wef8euvvxYqliFDhqDX\n6/nkk0/y3e5wOOjSpQs33njjVTvixMTEFOr9ROlLtMH/9rqTas/tAdPB18Hc5pkoirZuzU2rsfzy\nraYsrXYjjg17THNlKISoWv7dNyU/Re7wAtm3Hf99tfcPPz+/Qnd2CQgIQK/X53k+mJCQQFBQUKFj\nadOmzVVXkdDr9bRs2ZLjx49f9TjXaqiriYmJKdb+lU1x2yMMeNHtChN+Ts4p25GsZ58+hP818NBW\nbjAFe1YGhl9zb397nT5K8x8/I/OhZ0BXYkNZi0V+RrSkPbSkPfIq7Ta57r8MycnJrF27lrlz5zJ3\n7lzWrl1LcnLytXf8m9FopGXLlkRHR2vKo6OjiYyMLPRxDhw4cNUOLaqqcujQIen0UsHcG+bBjcFG\nTdm0nSlcsjq0FXV6rGOewtGohabYbVc0xk/eLe0whRAV1HVd+c2ZM4fZs2eTmZmJ+q8Z9s1mM9Om\nTeORRx4p1HHGjRvHmDFjaNOmDZGRkSxevJi4uDhGjRoFwJgxYwBYuHAhAO+88w61a9emadOm2Gw2\nVq1axYYNG1iyZEnOMWfPnk27du1o0KABqampLFy4kEOHDvHGG29cz6kKF1EUhbc6+dL5y3gy/853\niZlOpu1KYWFXf21lo4mMR1/E48UJ6M6dyi3+9jNU/yCy+gwpw8iFEBVBkZPfkiVLmDlzJt26deOh\nhx7KGfB+5MgRFixYwMyZM/Hz82P48OHXPNagQYO4dOkSUVFRXLhwgaZNm7Jq1Spq164NQGxsrKZ+\nVlYWzz77LOfOncNsNufUv+WWW3LqpKSk8OijjxIfH4+3tzcRERFs3LiRNm3aFPVUhYs19HFjcoQX\nL/6W24Hl02MZ3BSSnvf2ZzVvMia9gvsL49AlJ+YUm1bOR/Wrjj2yR1mFLYSoAIrc4aVTp04EBQXx\nxRdfoPyn94GqqgwcOJCEhISrztRS2cj9eq2SbA+bQ6XbV/EcTs7t7VnLU8/uQRbcDUqe+rpTMbi/\n9CjKvxa9VQ1uWMfNxNG6c576ZUV+RrSkPbSkPfIqd8/8jh8/zu23354n8UH2raq+fftes3OJEIVl\n1CvMu1G77l/sFQev7kvNt76zThjWCc+j/mviA8WehXn+THRHfi/VWIUQFUeRk5+Pjw8nT54scPvJ\nkydz1vcToiS0CTTyQBNPTdncg5f5PdGWb31HeFsy/+9JTZliz8J9zlPo9+8otTiFEBVHkZNfnz59\neP/99/n00081nV1UVWXVqlUsWrRIFrMVJe6ZNt7U9Mj9cXWoMHZrEhn2/O/a2zvfgvU+7bqSypU0\nzG9OR39oT6nGKoQo/4qc/GbMmEHDhg1zOrv06dOHPn360LhxY8aOHauZ5FqIkuJt1PFaR+3UZ38k\n25myo+DhNfabBpB5xyhNmaI6MS2che6UTGogRFVW5OTn7+9PdHQ0L730Ei1atODSpUtcunSJFi1a\nMHv2bD7//HMSExOvfSAhiui22u7cUVc79dnSmHR+Tcj/9idA1oD7yBx8v6ZMl5KE+0uPyhWgEFXY\ndQ1yN5lMjB07ls8//5xdu3axa9cuPv/8cx588EGWL19O+/btSzpOIQCI6uiDxV37YztxezI2RwGd\nlhWFrH73Yrv9bm2xNR3z60+iP1S4afSEEJVL+Zj7SYhCqm7W815Xbe/PA5eyeHFv/r0//2Eb/AC2\nWwZryhSHHfOb0zD88kOJxymEKN8k+YkKp1tNM3fW097+nHvwMlvPZxa8k06H7Z7xZN79sKZYycrC\ntPAl6QUqRBUjyU9USK939CXEI3csnwpM+iUZp3r1ORuy+vyPzDv/T1OmqE7Mc57CsGVDaYQqhCiH\nJPmJCsnXpOPdrn78e6qFmBQ77x2+cs19s/oPxzpykqZMcTgwL47CuOo9cDpLOFohRHlTqLk99+wp\nfK+4c+fOXXcwQhRF1xomhtR3Z9XxjJyyqTtT6F/HnZqe+qvsCfYe/cjMtGJaOV9TbtywAt35U1jv\nnwqeXqUStxDC9QqV/Hr16pXvdGb5UVW10HWFKK7JN3hpkh/AtF3JfNwj4Jr7ZvUZgjMgCPPCF1Gy\ncodLGPb+jMfMMWQ88RpqUM0Sj1kI4XqFSn7z58+/diUhXKCRrxsTW1TjzQOXc8q+PGll1bF8Vn7I\nh6NdNzL8AzG/9RS61KSccl38OTyeuZ/M/z2IvUf/crMorhCiZBQq+Q0bNqy04xDiuj3V2pu1JzM4\nkZa70O2TO5O5KcREgPnqtz8BnA2akfHsO5jnz0R/4khOuWJNx7zkLbL+3E/mmKfAcF3LXwohyiH5\nOCsqPINOYWnPAEz/ynNJmSojoy9hdxZuxS41sAYZT72NvWWnPNvcdkXj/uIElH8tlCuEqNgk+YlK\nIdzfjUkR2g4q2+JszDt4uYA98uFmxProLDLvfQTVzajZpD9+GI9nH8Dthy/gGsMphBDlnyQ/UWlM\njPCik0WbtN4+eJnYy/YC9siHTkfWzYPImLkAZ/VgzSYly4Zp6Rzcp49Cd/JoSYQshHARSX6i0nDT\nKSzu7o/Hv1Z4T8x08r8fEskq5O3Pfzhr1SdjxrvYW9+YZ5v+3EncZ43HuGohSrJM4i5ERSTJT1Qq\nwR56HmtRTVP2R5KdZ3enFPlYqrcf1kdewPrAVFSztueokmXDuGElHpPuwrh6EaQX4faqEMLlJPmJ\nSmdShBc9apo0Ze/+cYUvT2YUsMdVKAr2G/uQPusDHHUb5d1sz8K4bhmek4Zi2LpRngcKUUFI8hOV\njl6n8E4XP9z12skWJm5PJrOgpY+uQQ2sQcbMhVhHTUat5p1nu5J+BfMHr+L+wsPoD8oySUKUd5L8\nRKVUw0PPs220SepSppOb1yfgKOLzvxyKgr17X668sgzbbXfn6REKoD92GPeoyZjfmIru2OHrex8h\nRKmT5CcqrbHNPGngrR3k/vulLD7489qTX19VNW9sQ8eQ/spSbLfciWo05ali2L8Dj+cfwv35hzBE\nf4Uu01q89xRClChJfqLSUhSF724PxN+k/TGf8WsqfyRlFfv4aoAF2z0TSJ/1AfZWnfOtoz92GPNH\nbxA+ZzKmj15Hd/SArBohRDkgyU9UagFmPV/01k5yneFQeXBr0nU///sv1VIL62Mvkj5jAY6G4fnW\n0dsycYteh8eLE/B47E5Mi19Dv/cnuJJWIjEIIYpGJisUld4NAUbmdvblkZ+Tc8oOXspi9m+pzGjr\nU2Lv46zfhIyn30Z/aA9uG1Zg+GNvvvV0KUnotqzHbct6VEXBWbshjladcDRpiaN+UzCZSywmIUT+\nJPmJKuG+Rp5sPZ/J6n8tfzTn4GV6h5rpYMn7zO66KQqO8LY4wtuiJJzHbdvXGLZsQFfAYHhFVdGf\nikF/Kgb4GABH/aZkdb0VR3g71MAaJRebECKHJD9RZbzWwZftcZmcS89+5uZUYdimS/zYL5C6XiX/\nq6AG1sA2aDS2/vcR/80aQk//iWH/DhRr+lX30x8/jP54dk9RZ1BN7C3a42jXDUeDZpBP5xohRNFJ\n8hNVhq9Jx/wb/bjju9yrsEuZTh7YcomvbwvEoCulRZgNBlIatyKo7//IzLKh/3Mf+v070P+5H/2Z\nY1fdVRd/DuOmtbBpLarZg6zufcnq0R/VEgKyaLQogJJwHt350ziatoJ8huQISX6iiukRYmZsM08W\n/JE73GF3QhZR+9OY1irv4PUS52bE0aI9jhbtAVBSk9Dv+wX9kf0YdkZrVpT/L8WajvGbVRi/WYUz\nsCaOZq1x1qqLs2ZdnKH1UX38Sz9+UXJUtVQ+wOgP7ML9tSkAOBo0I+Ppt0GXz7qWdjtkZoCnV95t\n16AknMftxy9Bb8B2+zBw98g5ppJ4ATWopvbcVBX9n/tQLiXgDG2AcikeFB2GnT+iJF7A3qMfqt6A\n/lQMzpp1sLfseD2nXrRzSE5OlvmYiikmJoawsDBXh1FulPf2cDhV7vw+kc3nMjXln98SwE0hpdPZ\npLBtojtxBP3B3egP/oo+5iCKo/ArUjh9A3CG1EUNsOC0hKD6B+Gs3RCnjx94eper1ejL+8+IhtOZ\n23a2zOxFjf+bTJwOlAtnUb39QHVi2LsdNTAYR5OWcCUNw4FdoKo46oShS76I2/oV6I/sR/XxJ+PJ\nN7Eun0/A778AkD59Ls4GTSHLhmHfL7j9+BXOGrXBmo4u/hxqNS8c4e1Qki7iDG2AGhCE6lENJfEC\n5neez/MBytHkBhyhDTHs2QZ6PfaWHdEf2Y/+dO5dh8zB92f/x82E/uAu9EcPgNGEklb0OXH/Ybv9\nbpSUJHA6cNv+fZH3j7l3MjVu7nvd738tkvxKQIX6RS4DFaE94jMcdPwinsTM3DF3gWYdm/sHEeJ5\n7dXfi+q62iQzA/2hvRh2b0Z/YDe6tORr71MA1c0NJSt3bGNWp5vJ6j0EZ636oFPyvzIoRdfTHkpc\nLKhO1Bq1cwuzbNlXGtWDIeMKurMn0cXFoqSlYL+xN6pfdchIR3cxDmdI3ewklpqMYk1HdfdAH3sC\n/b5fsvdLOI8zMBjV7Inu4nnUat7ojx9BuZKabzzOmnVwBtVEdffE7ZcfitEa4r9stw/jvN5EwJ0j\nSu09JPmVgIrwx74sVZT2+PJkBiOiL2nKbghw4+vbquNhKNmrpGK3iS0Tw/bvMfzyA/q/DqHYiz9I\n/9/srTvjDKmHs3owzuBQ1OBa2bdRi3JbzukARZe9j8MOTidKahKqbwDoDWDPyv5XUTixby/1GjdG\nd+4UurMncYteh+rrj5KaAlk2dHGnUTKtOC21cDRpmX177O+OQk4fP+yRPdFdvIBh708l2g6ifHDU\nbsiRviOpHZl3SbGS4vLkt2jRIubOncuFCxdo0qQJL7/8Mp06dcq37rZt2+jXr1+e8l27dtGoUe6M\n+19++SUvvfQSJ06coF69ejz99NP57ldSKsof+7JSkdpj1p5UXvtdO9B8QF0zH3b3R1eCz2NKtE2s\n6eiPHEB35i90p/9Cd+EsutgTJZ4QVb0B3Nxw1m6IanBDsWaAmxHVyyf7fePP4QywoKRcuuZ7qx7V\nUGTZJ1EEB8fPpm67DqV2fJd2eFmzZg1Tp07l9ddfp0OHDixatIghQ4awY8cOQkNDC9xvx44d+Pn5\n5byuXr16zv937drF6NGjmTZtGv369WPdunWMHDmSb7/9lrZt25bq+YiKZ1orLw4kZfHtmdy5N788\naeXl39J4qnUZdIC5HmYPHDdE4rghMrfMbv/7KuoEysU4lMup2Qkq8QJKStI1h1fkR3HYwWHPfv5T\nAF3ihcIdSxJfkak6HcpVpsJz1G6YfSWdlYlyJQ1d0kXNdqclBNXLF3S6nO+hWs0b1WhGSU+DLBvO\nkHrZH2YuxKL6BGR3wsmyoWRmZD9f9PLJ89xP1enA0wtVb8gzflXVG1Acdpy+1XHWCEWxWdEfO4yj\n8Q04Qutn316OPQFmdxyNI3BaaqGkX8ZpqYXq6YXuYhyGn7/F0bITWb7amZlKmkuv/G666SaaN2/O\n3Llzc8pat27NgAEDmDFjRp76/1z5HTt2jICA/Btm1KhRJCUlsXbt2pyyAQMGUL16dT744IOSPwkq\n1pVOWaho7ZGW5aT3+gT+SNZ2Lnmvqx//a+BRwF5F4/I2uZKGLu4M+mOH0Z05hpKUgP7YHyjpxZzk\nu5Jz1AlDfyoGR4Om6P9epUM1mXE0bYXu/Gns7XuguhmzO514emcneZ0O3Znj2VfNQSE4A4JQ/aqj\nVvPJviVs9sjtQPP3reFjp05Rv1k4GNzgcgoYjLk9KKuo0v6dcdmVn81mY9++fUyYMEFT3rNnT3bu\n3HnVfbt3747NZqNx48ZMnjyZrl275mzbvXs3Dz74oKb+TTfdxHvvvVdywYtKxctNx8peAdy0PoGL\n1txP2g9uTcLLTeHW2u4ujK6EeHrhbNAMZ4NmuWWqipJ4Af2R31GSL6JkpKPEn0UXF4su7gyKi1ei\nUBUdipr9/cjq0Q8y0rMH+esN2VcfHtWyOwWdjMFRNwzVxx/FmpHdM1Kvhyxbdl03I6olBCU1u8OQ\nI6w5OBwo9ixUkxlMpfD9LWxPfZ0O1TcAR8Kl3PF4Xr4lH4/Iw2XJLzExEYfDQWBgoKY8MDCQ+Pj4\nfPcJDg7mjTfeoHXr1thsNj799FMGDBjAhg0bcp4TXrhwoUjH/EdMTEwxzqb4+1c2FbE9ZjfS8dAB\nE1lq7rO+uzdd4s1mVm70L/5KDOW2TYLqZX/9m6rilpqE59lj6GyZ2a/T01B1euxmD5wmM06DGw73\naqiKgtvlFHT2LLKq+WLzzb59ps/MwGFyR3HYUZxOnG5GnEYTqqKgGtxQFR2qXl/8nqbt+1y7TqYK\npr/ncT0dW7z3KyXl9ufDhYrTJte6aqxQg9zDwsI0J9S+fXtOnz7N3LlzC+wkU5RjXy+X39IqZypq\ne4QBdp90xm5L0pQ/c9SdnwcGUacYU6BV1DahbeS161yHCtsepUTaI6/SbhOXjXoNCAhAr9eTkJCg\nKU9ISCAoKKjQx2nTpg3Hjx/PeW2xWIp9TFF13dXQgzFNPTVll+0qg79P5KLV4aKohBAlzWXJz2g0\n0rJlS6KjozXl0dHRREYW/tPmgQMHsFgsOa/btWtX7GOKqm1Wex+619ROIB2TYueeTZdwqjIsVojK\nwKW3PceNG8eYMWNo06YNkZGRLF68mLi4OEaNGgXAmDFjAFi4cCEA77zzDrVr16Zp06bYbDZWrVrF\nhg0bWLJkSc4xx44dy2233cabb77J7bffzvr169m2bRvffPNN2Z+gqJDcdApf3BLAoO8Sif7XFGg7\n4234f3SOk8Nq4GsqP1OFCSGKzqXJb9CgQVy6dImoqCguXLhA06ZNWbVqFbVrZ09fFBurfTCdlZXF\ns88+y7lz5zCbzTn1b7nllpw6/yTRWbNm8dJLL1GvXj0WL14sY/xEkSiKwkc9/OnwxQXOp2s7u9Rd\ncV4SoBAVnMtneKkM5GG1VmVqj+Opdrp9FU9aVt5fkz+HBhPsUbieipWpTUqCtIeWtEdelbbDixAV\nQX1vAxturZ7vtpar47Da5bOjEBWRJD8hriEiwJhvArQ6oM/GBFTpBCNEhSPJT4hC6Bxs4sveeafU\n25eYxT0/XiLTIQlQiIpEkp8QhdStppnN/QLzlG88bcWy5BxHkkt2VQUhROmR5CdEEbSsbmT1zflP\nqh75RTx/pUgCFKIikOQnRBH1qmVmw63VMeSz3F+fjRe5klX8eUCFEKVLkp8Q16FzsImfBuadMu+i\n1UnvjRdJsUkCFKI8k+QnxHVq4uuW7zPAg5eyqLP8PG/8nobDKR1hhCiPJPkJUQwtqxs5fU8NquVz\nD/T5Pak8tyfVBVEJIa5Fkp8QxeRt1PHrnRaa+uadLXDuwcvsis/MZy8hhCtJ8hOiBAR76Pnm9ry3\nQAFu2XCREftMsiKEEOWIJD8hSoiPUceJYTXy3fbHZT3+H50j3S4dYYQoDyT5CVGC/Ew6zg+vSX2v\n/Ce8rrn0PH8kyVhAIVxNkp8QJczdoLB7kIV8hgEC0GltPC//Jh1hhHAlSX5ClAK9TuHSyJrcVtuc\n7/ZX9qVx34+JMhRCCBeR5CdEKVEUhRU3BbD3Tku+2786ZaXeyvP8ckF6gwpR1iT5CVHK6nsb+D4y\nPd9tqTaVWzdeZOL2JM6nO8o4MiGqLkl+QpQBXzdIHhVC/zr53wb98Eg6TT+N48uTGWUcmRBVkyQ/\nIcrQkp4BfNLLv8DtI6IvcfcPiSRnypAIIUqTJD8hylifUHeSR4UQ1cEHL7e8fUK/PmOl2ao4Fh2+\nLCtECFFKJPkJ4SIPNK3G/sEWPPKZFzTdrjJ5Rwqhy8+z+ZzVBdEJUblJ8hPChfzNek4Oq0FkkDHf\n7U4VBn6biO+HZ5n5awqqTJEmRImQ5CeEixn1Ct/eHkjsvTXoZMk/CQK8deAy435KljlChSgBkvyE\nKCequenYeFsgq3oFFFhnxV/p+H90js+O5T90QghROJL8hChnbgk1kzSyJoPruxdY54GtSbRaHcfs\n31LZFZ8pt0OFKCJJfkKUQ4qisKibPzvuCMJYwG/piTQHs/elccuGi4zZmiQJUIgikOQnRDnWxNeN\n+BEhfNDN76r1Vh3P4MYv49lyzirzhQpRCJL8hKgA7qzvQdzwmtxRt+BboYeS7Az4NpGAj88x4ack\nSYJCXIUkPyEqCLNB4cMe/lwcUZP5N/pete7SmHQCPj5Ht6/iSciQOUOF+C9JfkJUMAadwj1hniSP\nCuHD7n74mwr+Nd6fmMqWnXQAABcESURBVEXYJ3H4fniWD/+8wolUOz/EWmX6NFHlGVwdgBDi+t1R\nz4OBdd355FgGD21Lumrdib8k5/zf16jwY78g6nnpUZSClt0VovKSKz8hKjhFUbi7oQcX7qvJmKae\nhdon2abS+vML+H10jkd/TuJEqr2UoxSifJHkJ0QlYdIrvNLBl6SRNRnbrHBJEODjo+m0+vwCE7cn\nyewxospwefJbtGgRERERWCwWunXrxvbt2wu13y+//EJAQAAdO3bUlC9fvhxfX988X1arTA4sqgZF\nUZgd6UvyqBAODw1myFUGy//bh0eyZ4/x/fAs3b6K57szVhk7KCotlya/NWvWMHXqVCZNmsTWrVtp\n3749Q4YM4cyZM1fdLzk5mbFjx9KtW7d8t3t4eHDkyBHNl9mc/yKiQlRmNTz0vN/Nn6SRNdlwa3UG\nXmWoxL/tT8zifz8k4vfRObaezyzlKIUoey5NfvPnz2fYsGGMGDGCxo0bExUVhcViYfHixVfdb/z4\n8dx99920a9cu3+2KomCxWDRfQlRliqLQOdjERz38OTw0mOZ+he/r1v+bi/h+eBbfD8/y8ZErcjUo\nKgWXJT+bzca+ffvo2bOnprxnz57s3LmzwP0WLVpEQkICTzzxRIF1MjIyCA8Pp1mzZgwdOpT9+/eX\nWNxCVHQ1PPT8PNBC8qgQztxbgxGNPAq976Pbk/H7+9Zoz3XxfB+be2s0y6lKYhQVhsuGOiQmJuJw\nOAgMDNSUBwYGEh8fn+8+hw4d4pVXXuH7779Hr9fnWycsLIx58+YRHh7O5cuXWbBgAX369OGnn36i\nQYMGBcYTExNz/SdTAvtXNtIeeZXXNhkflP21N0XHmAOFfzyw92IWQ75P1JT5u6lMb2ijs7+DfNbo\n1Siv7eEq0h55FadNwsLCrrq9wozzy8zMZPTo0bzwwgvUrVu3wHrt27enffv2Oa8jIyPp0qULCxcu\n5NVXXy1wv2s11NXExMQUa//KRtojr4rQJmHA0LYQe9nO3IOXee/wlSIf41KWwuTDJgAig4xMivCi\nVy0Tuv+MJawI7VGWpD3yKu02cVnyCwgIQK/Xk5CQoClPSEggKCgoT/24uDiOHDnCuHHjGDduHABO\npxNVVQkICOCzzz7LcwsVQK/X07JlS44fP146JyJEJVOrmoFXO/jyagdfforLJGpfGluuo9PLzngb\n//shMU/5TSEmbvXWExYGaVlOqhkUGWgvypzLkp/RaKRly5ZER0czcODAnPLo6Gj69++fp37NmjXz\nDIP44IMPiI6OZtmyZdSuXTvf91FVlUOHDhEeHl6yJyBEFXBjsIkb+2Rfyf1/e3ceFNWVL3D82zTN\npkizNjGAGsAA7qLgEscYn0McJ4s6lmR8ZoqXBCzNM1hxFJP3jGVMApIYdTITE4nR5PHKhZApNQ6a\nqDUBJWKe+y6iuERBwBYaGhq6+/3B0NoiGpX9/j5VVOm5p7vP+VUXP+65Z6kxW9lwrop1pyv5v5La\nh37PnVdq2HnFmbknr9iVrxyp5eXev359ohCPok2HPWfNmkVCQgKRkZFER0ezZs0arl27RlxcHAAJ\nCQkAfPbZZ2g0GiIiIuxe7+Pjg7Ozs115cnIyQ4cOJTg4mPLycj777DOOHz/OsmXLWq9jQnRCzmoV\nL/fuYktQp/S1/GfODfZff/hEeLvZe/TM3nNrC7bnergwLsCFMd2d8XdTc63KTGDX+l9ZdRYr31+u\npqzGwu97uOLR1KGHQjShTZPfpEmTKCsrIzU1laKiIsLDw9m4caPtLu7y5csP/J43b97kjTfeoLi4\nmG7dutG/f3+2bdtGZGRkczdfCEUL02r4/vf1jyiq6iyYzPDj1RpWnTCwt8j0yO+/pbCaLYWNN6f4\nbYAzOy7fGobNKDCS+VtvGToVD0Sl1+tlbvIjkofV9iQejSkxJlarlZXHDLzzc3mrfaaDCv7jyS70\ndFfj76amu5uaEA9HfF0c2nVyVOL343467YQXIUTnplKpeKOfO2/0c7eVXTeaOXOzjq8PX2X9L5pm\n/0yLFdJONT1LVa2C/t4aFgzsxrgAZ9trfr5u4vTNOiYEueDtov5XuZU910yUmyz8W4ALzur2mzzF\ng5PkJ4RoNb6uanxd1fhV1LIqpidmixUHFfxUbGJ/sYn3DpZT04Jn75qtcLCk9q6zUAFm74GfJvoR\nptXw4eEK3j9YAcCkXq6sedqr5RomWp0kPyFEm1E71N9NDdc5M1znzOx+7pjMVkqqLTip4WKFmfcP\nlvPDldbbX3TYt4032cg8byTzfP3s1PeiPJgZ0QWVSoW+xsLKYxUcL6vlT092YUx3F6xYcXO8NQHH\narW26yFXpZLkJ4RoV5zUKrp3qR969HFRk/FbH9s1s8XK0bJa/N3U1JitrD1dycdHDa3avrfzbvJ2\n3s1G5dsvN07Qfb003Ki2oHNz4M3+7jzeRU1Pd0cqai18d7GaCE8No/ydWqPZ4g6S/IQQHYbaQcVA\nn1vJ4p0hHrwzxAOo31u0qMqMm6OKnVdq+J+zVRwuNeHp7MD5ihYcS72HY2X1y0CuVJmZtqusyXpD\nPZwxnijG00lFiIcjU55wo6+Xhj3Xasg8b6Sfl4bZ/dw5d7MOlQqe6Nb0r+4asxW1Chwd5G7zXiT5\nCSE6BY2DioB/rQOcEuzGlODGG3bXWqyc0dfx3/tvsuuXGh53U3Olqm0S4+3231QD9Yky+5qJL09X\n2V3fVGBk4T1mzXZzUpEc5UFBuZlPTxiorLPS10tDTIAz/9nXHa2zAxcq6ig3Wejp7kg3WRcpyU8I\noRwaBxV9vDRkxvjYlTc8lztYYuK7i9XsuFTNkbLmWbzfGspNVmbm6O3KjpXVcqyslo+OND0srHN1\nIO7JLjirVWw8V0VBRR3P93Dl9b5dGeDtxNUqMyazlR7u9qmius6Kkxrbnq0F5XVoHLBtQnA3FbUW\nrhst97xrbU3toxVCCNGGGiakDPJxYpCPE/81uFuTdc+X1/H3C0YcVXDVaGbbxWoutNGw6qMqMlpI\nPlRhV7apwMimAuOver2rWoXRfGupeEygC1cqzQR0UdPfW4OTg4rxgS6U1liY8n0JNWYY97gzG8c1\n3pSgpNqMp5ODbRJUS5PkJ4QQD6BXN0fm9L+1dvH9qMZ1LFYrKmDH5RpO3qhFb7KQW2TiutFMQQdN\nlHdze+ID2H6pfkeeY2W1ZP3r30sO2A/Xfn+lBs+1vxDUVU2ohyNDfZ1IOVSBlfo70a3jfQj1aP41\noHeS5CeEEM2sYTgwJtCFmMCmz0hsGG5t2M2kqMrMjsvV7P6lBmOdFT9XB9adqX/+p3N1oMhoaZX2\nt4aLBjMXDWZ23raMpchoYWhmMaV/6t7iny/JTwgh2sidQ386NzXTe3dh+m2nW6wY6dnk64+W1XLg\nuokITw19vTQcLDHxv/lVGOusHCwxddi7zJk5N/h3TxUtueGbJD8hhOig+nlp6Od1a4hwhL8zI/yd\n7/maqjoLGgcVVyrrn1eeK6+j2mylu5uaaaFubL9Uzd8vGMlths3JH9aGc0b+7uDCiVCzbbu55ibJ\nTwghFKRh95me7o7M7NO10fWEiK4kRDQub9CwJZ3JAodKTHx1tgovZwee8ncmTOvIhYo6lh0x2B2A\nHOmjISbQxbZd3K/xRq/aFkt8IMlPCCHEA2iYjemshmidM9E6+zvNHu6OjO5+9+ec8wZ247KhjloL\n+Lupqai1kH21hu2XqwnsosbPVc1jbmqeDXShsCC/RfshyU8IIUSrCbhtLaCro5rJT7gx+YnGGxK0\nNFnmL4QQQnEk+QkhhFAcSX5CCCEUR5KfEEIIxZHkJ4QQQnEk+QkhhFAclV6vt96/mhBCCNF5yJ2f\nEEIIxZHkJ4QQQnEk+QkhhFAcSX5CCCEUR5KfEEIIxZHk9wjS0tLo378/Op2O0aNHs3fv3rZuUotY\ntmwZY8aMITAwkODgYKZOncqJEyfs6litVj744APCwsLw9/dnwoQJnDx50q6OXq8nPj6eoKAggoKC\niI+PR6/Xt2ZXWsSyZcvQarX8+c9/tpUpMR7Xrl1jxowZBAcHo9PpiI6OJicnx3ZdSTExm80sWbLE\n9vuhf//+LFmyhLq6Oludzh6PPXv2EBsbS3h4OFqtlvT0dLvrzdX/48eP87vf/Q5/f3/Cw8NJSUnB\nar3/IgZJfg8pMzOTpKQk3nzzTX788UeioqKYMmUKly5dauumNbucnBxeeeUVtm/fzubNm3F0dOTF\nF1/kxo0btjorVqzgr3/9KykpKezatQtfX18mTpxIRcWt87teffVVjhw5QkZGBhkZGRw5coSEhIS2\n6FKz2b9/P2vXrqVPnz525UqLh16vJyYmBqvVysaNG9m3bx9Lly7F19fXVkdJMVm+fDlpaWmkpKSQ\nl5dHcnIyq1evZtmyZbY6nT0elZWVREREkJycjKura6PrzdH/8vJyJk6ciJ+fH7t27SI5OZm//OUv\nfPLJJ/dtn6zze0hjx46lT58+rFy50lY2ePBgXnjhBd555502bFnLMxgMBAUFkZ6ezvjx47FarYSF\nhfHaa68xd+5cAIxGI6Ghobz77rvExcVx+vRpoqOjycrKYtiwYQDk5uYyfvx49u/fT2hoaFt26aHc\nvHmT0aNHs3LlSlJSUoiIiCA1NVWR8Vi8eDF79uxh+/btd72utJhMnToVT09PVq1aZSubMWMGN27c\nYMOGDYqLx+OPP87SpUuZNm0a0Hzfhy+++IJFixZx5swZW4JNTU1lzZo1nDhxApVK1WSb5M7vIZhM\nJg4dOsQzzzxjV/7MM8+wb9++NmpV6zEYDFgsFrRaLQCFhYUUFRXZxcPV1ZURI0bY4pGXl0fXrl2J\njo621Rk2bBhdunTpsDFLTEzkhRde4De/+Y1duRLj8d133xEZGUlcXBwhISE89dRTfP7557bhJ6XF\nZNiwYeTk5HDmzBkATp06RXZ2NuPGjQOUF487NVf/8/LyGD58uN2d5dixY7l69SqFhYX3bIMcZvsQ\nSktLMZvNdkM6AL6+vhQXF7dRq1pPUlIS/fr1IyoqCoCioiKAu8bj6tWrABQXF+Pt7W33l5hKpcLH\nx6dDxmzdunUUFBTw+eefN7qmxHhcuHCBL774gpkzZ5KYmMjRo0eZP38+APHx8YqLSWJiIgaDgejo\naNRqNXV1dcydO5dXX30VUOZ35HbN1f/i4mK6d+/e6D0arvXs2bPJNkjyEw/krbfe4qeffiIrKwu1\nWt3WzWkTZ8+eZfHixWRlZaHRaNq6Oe2CxWJh0KBBtiH/AQMGUFBQQFpaGvHx8W3cutaXmZnJ+vXr\nSUtLIywsjKNHj5KUlERQUBAvv/xyWzdPIMOeD8Xb2xu1Ws3169ftyq9fv46fn18btarlLViwgG++\n+YbNmzfb/UWl0+kA7hkPPz8/SktL7WZhWa1WSkpKOlzM8vLyKC0tZdiwYXh7e+Pt7c2ePXtIS0vD\n29sbLy8vQDnxgPrvwJNPPmlX1rt3by5fvmy7DsqJycKFC3n99deZPHkyffr0ITY2llmzZvHxxx8D\nyovHnZqr/35+fnd9j4Zr9yLJ7yE4OTkxcOBAdu/ebVe+e/duu/HpzmT+/Pm2xNe7d2+7az169ECn\n09nFo7q6mtzcXFs8oqKiMBgM5OXl2erk5eVRWVnZ4WI2YcIE9u7dS3Z2tu1n0KBBTJ48mezsbEJC\nQhQVD6h/FpOfn29Xlp+fT2BgIKC870hVVVWjkRG1Wo3FYgGUF487NVf/o6KiyM3Npbq62lZn9+7d\nPPbYY/To0eOebVAnJSUtasY+KYa7uzsffPAB/v7+uLi4kJqayt69e/nkk0/w8PBo6+Y1q7lz57J+\n/XrWrl1LQEAAlZWVVFZWAvV/CKhUKsxmM8uXLyc4OBiz2czbb79NUVERy5cvx9nZGR8fH37++Wcy\nMjLo168fV65cYc6cOQwePLjDTN1u4OLigq+vr93Ppk2bCAoKYtq0aYqLB0BAQAApKSk4ODjg7+/P\nP//5T5YsWcKcOXOIjIxUXExOnz7Nhg0bCAkJQaPRkJ2dzbvvvsukSZMYO3asIuJhMBg4deoURUVF\nfP3110RERNCtWzdMJhMeHh7N0v/g4GC+/PJLjh49SmhoKLm5uSxcuJDExMT7/oEgSx0eQVpaGitW\nrKCoqIjw8HDef/99Ro4c2dbNanYNszrvNH/+fBYsWADUD0ckJyezdu1a9Ho9kZGRfPjhh0RERNjq\n6/V65s2bxz/+8Q8Axo8fz9KlS5t8/45kwoQJtqUOoMx4bN++ncWLF5Ofn09AQACvvfYaCQkJtgkL\nSopJRUUF7733Hlu3bqWkpASdTsfkyZOZN28eLi4uQOePR3Z2Ns8991yj8pdeeolPP/202fp//Phx\n5s6dy4EDB9BqtcTFxTF//vx7LnMASX5CCCEUSJ75CSGEUBxJfkIIIRRHkp8QQgjFkeQnhBBCcST5\nCSGEUBxJfkIIIRRHkp8Q4lcpLCxEq9XatugSoiOT5CdEO5Keno5Wq23y54cffmjrJgrRKcipDkK0\nQ0lJSfTq1atRed++fdugNUJ0PpL8hGiHxo4dy9ChQ9u6GUJ0WjLsKUQHpNVqmTNnDpmZmURHR6PT\n6Rg5cuRdh0ULCwuJi4ujV69e+Pv7M2bMGLZu3dqonslkIjU1laFDh+Ln50doaCgvvfQSJ0+ebFR3\n3bp1DBw4ED8/P8aMGcOBAwdapJ9CtBS58xOiHSovL6e0tLRRube3t+3f+/bt49tvvyUhIYGuXbuy\nbt06YmNj2bJlC8OHDwfqzzaLiYnBYDCQkJCAt7c3GzduZPr06axevZo//OEPQP1htLGxsezatYsX\nX3yR+Ph4qqqqyM7O5tChQ4SHh9s+NzMzk8rKSuLi4lCpVKxYsYLp06dz6NAhOdxXdBiysbUQ7Uh6\nejqzZs1q8vq1a9dwcXGx7Wq/Y8cOoqKiACgrK2Pw4MGEhYWRlZUFwFtvvcXf/vY3tmzZwqhRowAw\nGo08/fTT6PV6jh07hkajsX3u4sWLmT17tt1nWq1WVCoVhYWFDBgwAC8vL9sO+gDbtm3jj3/8I+vX\nr+fZZ59t9pgI0RLkzk+IdiglJaXRyehQf35ig0GDBtkSH4CXlxdTpkxh9erV6PV6tFotO3bsYMCA\nAbbEB+Dq6sorr7zCvHnzOHz4MEOGDGHz5s1otVpmzJjR6DPvPBrm+eeftztSZsSIEQBcuHDhofsr\nRGuT5CdEOzR48OD7TngJDg5usuzixYtotVouXbp01zPVGhLrxYsXGTJkCOfPnyckJMQuuTYlICDA\n7v8NiVCv19/3tUK0FzLhRQjxQNRq9V3LrVZ5giI6Dkl+QnRQ586da7IsKCgIgMDAQM6ePduo3pkz\nZ+zq9erVi/z8fEwmU0s1V4h2RZKfEB3UwYMHycvLs/2/rKyMTZs2ER0dbRuKjImJ4fDhw+zdu9dW\nr7q6mjVr1qDT6Rg4cCBQ/xxPr9ezatWqRp8jd3SiM5JnfkK0Qzt37qSgoKBReWRkJCEhIQBEREQw\ndepU4uPjbUsdDAYDCxcutNVPTEzkm2++YerUqXZLHU6dOsXq1atxdKz/FRAbG8vGjRtZuHAhBw8e\nZMSIEVRXV5OTk8PEiROJjY1tnY4L0Uok+QnRDiUnJ9+1fOnSpbbkFx0dzahRo0hOTubChQuEhISQ\nnp7OyJEjbfV9fX3Jyspi0aJFpKWlYTQaCQ8P56uvvrKbCKNWq9mwYQMfffQRGRkZbN26FU9PT4YM\nGWK7OxSiM5F1fkJ0QFqtlri4ODlhQYiHJM/8hBBCKI4kPyGEEIojyU8IIYTiyIQXITog2U1FiEcj\nd35CCCEUR5KfEEIIxZHkJ4QQQnEk+QkhhFAcSX5CCCEUR5KfEEIIxfl/BtLypqGzJ78AAAAASUVO\nRK5CYII=\n",
            "text/plain": [
              "<Figure size 432x288 with 1 Axes>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "8Oj4jZIKTgGs",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 325
        },
        "outputId": "35e597b9-a5ea-4bd3-e148-1dcff19fb7c7"
      },
      "source": [
        "#visualize the training accuracy and the validation accuracy to see if the model is overfitting\n",
        "plt.plot(hist.history['acc'])\n",
        "plt.plot(hist.history['val_acc'])\n",
        "plt.title('Model accuracy')\n",
        "plt.ylabel('Accuracy')\n",
        "plt.xlabel('Epoch')\n",
        "plt.legend(['Train', 'Val'], loc='lower right')\n",
        "plt.show()"
      ],
      "execution_count": 18,
      "outputs": [
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAcgAAAE0CAYAAACo8aOIAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzs3XdYFNf6B/DvbKMJLCJFjYgSRBAV\nQUWNFTWxJHbsJRojtlxzNRqTG425MdHExPxiNzEklmjkKhYUu6jYK7ZYMIoKyIL0unV+f6Ary84u\nW6W9n+fhSXbmzJkz4+6+e86cwuTk5LAghBBCiAZeZReAEEIIqYooQBJCCCEcKEASQgghHChAEkII\nIRwoQBJCCCEcKEASQgghHChAEvIaPX78GGKxGNOmTasS+RBCdKMASWo0sVgMsVgMFxcXPHr0SGe6\nQYMGqdNGRka+xhISQqoqCpCkxhMIBGBZFps2beLcn5SUhJMnT0IgELzmkhFCqjIKkKTGq1u3Ltq1\na4etW7dCoVBo7d+8eTNYlkWfPn0qoXSEkKqKAiSpFcaPHw+JRIIDBw5obFcoFPjzzz8REhKCFi1a\n6Dw+KSkJ06dPR0BAANzc3ODr64v3338ft27d4kyfn5+Pzz//HAEBAfDw8EC7du2watUqsKzumR1L\nSkqwcuVKdOvWDQ0bNkSDBg3QvXt3REZG6j3OEDKZDL/88gvCw8MRGBgId3d3NG7cGAMGDMDBgwd1\nHpeamor58+cjJCQEnp6eaNy4Mbp164ZvvvkGcrncpLRisRj9+/fnPN+SJUsgFosRHx+vsV0sFqNl\ny5bIzc3F/PnzERgYCFdXV6xZswYA8ODBAyxatAjdu3eHj48P3N3dERgYiI8++ghPnz7VeX1xcXEY\nOXIkfH194e7ujoCAAAwfPlz9Pjl27BjEYjGmT5/OebxSqURAQAAaNmyI3Nxcnech1RMFSFIrDBky\nBI6OjlrNrIcOHUJaWhomTJig89iEhAR069YN27ZtQ8uWLfHRRx+hc+fO2LdvH3r16oXjx49rpJdK\npRg4cCDWrFkDsViMqVOnonPnzvjhhx/w2WefcZ4jPz8f/fv3x4IFC8CyLEaPHo0xY8YgLy8Ps2fP\n1vkFbajs7GzMnz8fBQUF6NGjB2bMmIF+/frh5s2bGDlyJH7//XetY65du4bOnTtj3bp1cHd3x5Qp\nUzBixAjUrVsXP/30EwoLC01KayqZTKYO6L1790ZERAQaNmwIAIiJiUFkZCQaNmyIoUOHYsqUKWje\nvDm2bNmCsLAwpKSkaOX37bffYvDgwYiPj0ePHj0wc+ZM9OjRA48fP8bmzZsBAGFhYWjSpAl27dqF\nnJwcrTwOHjyI1NRUDBkyBM7OzmZfI6la6KELqRUcHBwwbNgwbNy4EU+fPkWjRo0AAJs2bUKdOnUw\nZMgQrFy5Uus4lmUxdepU5ObmYs2aNRg9erR634kTJzB48GBMmTIFN27cgL29PQBg1apVuHr1Kvr1\n64ctW7aAxyv9Hfrvf/8b3bt35yzf559/jitXrmDRokX4+OOP1dulUinGjRuHbdu2YcCAAejbt69J\n1y8Wi3Hz5k11QHkpNzcXffr0wVdffYWRI0fCzs4OQGkwmjBhArKysrB27VqMGjVK4ziJRII6deoY\nndYcEokE/v7+OHDggPpevzRixAhMnz4dNjY2GtuPHz+OYcOG4YcffsBPP/2ksf37779Ho0aNcODA\nAbzxxhsax70MqAzDYNKkSViwYAH++usvTJ06VSPdyx8WkyZNMvv6SNVDNUhSa0yYMAEqlQpbtmwB\nUPolePToUQwdOlTnF/iFCxdw9+5dBAcHawRHAOjevTveffddPH/+HLGxsertf/75JxiGwVdffaUO\njgDg5eWFiIgIrXNkZ2dj27ZtaNWqlUZwBAAbGxssXLgQALB9+3bTLvxFPuWDIwA4OztjzJgxyMnJ\nwdWrV9XbDxw4gCdPnuDtt9/WCngA4OHhoe7UZExac3399ddawREAGjRooBUcgdIaYPPmzbVq+evX\nr1fnVz44AtC4V2PHjoWtrS3++OMPjTSPHz/G8ePHERQUhDZt2phyOaSKoxokqTWCgoLQqlUr/Pnn\nn5g3bx42b94MpVKpt3n1+vXrAICuXbty7u/evTtiYmJw/fp1DBs2DPn5+Xj48CE8PT3h6+urlf6t\nt97S2nblyhUoFArweDwsWbJEa//LjkX379836Dp1uXPnDlasWIGzZ89CIpGgpKREY/+zZ8/U/3/5\n8mUAQK9evSrM15i05rC1tUVgYCDnPpZlERUVha1bt+LWrVvIycmBUqlU7xeJRBrpjSmzi4sLBg8e\njG3btuHcuXPo2LEjgNLWB5VKRbXHGowCJKlVJkyYgDlz5uDQoUPYsmULAgMDERwcrDN9Xl4eAMDd\n3Z1zv4eHBwCoO2i8TO/m5saZniufrKwsAKXPOhMSEnSWpaCgQOe+ily6dAkDBgyAQqFAt27d0Ldv\nXzg6OoLH4+HmzZuIjY2FVCpVp395PfXr168wb2PSmqNevXpgGIZz3+eff461a9fC09MTPXv2RP36\n9WFrawsA2Lp1q1ZHndzcXDg5ORnc9Dt58mRs27YNv//+Ozp27Ai5XI4tW7bAyckJQ4cONe/CSJVF\nAZLUKuHh4ViwYAHmzp2LlJQUrSbN8pycnAAA6enpnPslEolGupf/zcjI4EzPlc/LY6ZMmYLvv//e\ngKsw3g8//IDi4mLExMSgS5cuGvuWL1+u0UQMQN3hpGytUhdj0gKlz/XK1u7K0tcTVFdwzMjIwPr1\n6xEQEIBDhw7B0dFRY//OnTs5y5yZmYmCggKDgmRISAiCgoKwZ88eLF26FPHx8ZBIJPjwww/h4OBQ\n4fGkeqJnkKRWcXJywuDBg5GSkgJ7e3uEh4frTd+6dWsA0Bp28NLJkycBlDbfAoCjoyOaNm0KiUSC\nBw8eaKU/c+aM1ra2bduCx+Ph3LlzRl2LMR4+fAgXFxet4KivTABw9OjRCvM2Ji1Q2mEoOTmZc9+1\na9cMyqOspKQkqFQq9OjRQys4pqSkICkpSesYY8sMAB988AGkUim2bt2q7pwzceJEo8tLqg8KkKTW\n+fzzz7Flyxbs2LGjwq75oaGh8PPzw5UrV7Q6yZw8eRIxMTFwdXVFv3791NvHjBkDlmWxcOFCqFQq\n9fYnT56oO4eUVa9ePYwYMQI3b97EkiVLOCczSElJMesZpJeXF7Kzs7XGbW7atAnHjh3TSt+3b194\neXnh8OHD+Ouvv7T2p6enq8tpTFqgNDglJyfj8OHDGuk2btyICxcumHRtAHD+/HmNmmlBQQFmzZrF\neT9fdpZasGABZ7BOTU3V2jZs2DCIxWKsXr0aJ0+eRIcOHRAQEGB0eUn1QU2spNZp2LAhZ49OLgzD\nYO3atRg0aBCmTp2KXbt2oUWLFnj06BH27t0LkUiEdevWafSsnDlzJvbv34/Y2Fh06dIFvXr1Ql5e\nHnbt2oWOHTtqTVYAAN9//z0ePnyI7777Dtu3b0enTp3g4eGhroleunQJ33zzDZo1a2bSNU+bNg3H\njh1D3759MWjQIDg5OeHatWs4f/48Bg4ciD179mikF4lE2LhxI4YMGYKpU6di06ZNaNeuHWQyGR48\neIATJ04gMTERYrHYqLQA8NFHH+HYsWMYO3YsBg0aBDc3N/Xz13feeQeHDh0y6to8PDwwdOhQ7Ny5\nE126dEGPHj2Ql5eHuLg42NraomXLlrh586bGMWFhYZg7dy6WLVuGDh06oF+/fmjUqBEyMjJw+fJl\neHt7Y+vWrRrH2NnZYfTo0erJCaj2WPNRDZKQCgQHB+PEiRMYOXIkrl+/jhUrVuDUqVPo378/jhw5\ngt69e2ukt7Gxwe7duzF9+nRkZWVh3bp1OH36NObMmcPZSxUobZrdt28fli9fjvr162Pfvn3qmopA\nIMCXX36JwYMHm3wNvXr1wl9//QU/Pz/s2rULmzdvho2NDWJiYvD2229zHtOmTRvEx8fjww8/REpK\nCtauXYtt27YhIyMDs2fP1nj2Zkzarl27qidd2Lt3LzZv3gxHR0ccOXJE3VRtrJUrV2LOnDkoLi7G\nhg0bcPz4cfTp0weHDx9WP+Mt7z//+Q927NiBjh074siRI1ixYgWOHj2KRo0a6ezZPG7cOACl0xcO\nGjTIpLKS6oPJyckxbw4rQgipJXbs2IHJkydj5syZWLx4cWUXh1gZBUhCCDGAUqlEWFgYbt68iatX\nr8Lb27uyi0SsjJ5BEkKIHufOncOZM2dw5swZXL9+HePHj6fgWEtQgCSEED1OnDiB7777DmKxGGPG\njNH5HJnUPNTESgghhHCgXqyEEEIIBwqQhBBCCAcKkIQQQggHCpCvSWJiYmUXoUqh+6GN7okmuh/a\n6J5osvb9oABJCCGEcKAASQghhHCgAEkIIYRwoABJCCGEcKAASQghhHCgAEkIqdESc+U4mlwCqZIm\nDSPGoblYCSE11qGnJRhzLBMKFmjtKkTce27gMUxlF4tUE1SDJITUWPPO50DxouJ4PVOOI8nSyi0Q\nqVaoBkkIqXZYlsXWB0W4nilHkKsQN7LkaOoowKTmDhDwXtUQHxcoNY6LfybFO41sX3dxSTVFAZIQ\nUu1se1CEGadztLYXKVh83MqxEkpEaiJqYiWEVDvTOYIjACy6kveaS0JqMqpBEkKsLilfgVlncpBe\nrMT8Nk5wFjH44lIe7PjAjx3FaOUqwrq/CzD/Qm6Zo+yB0ylGn0umZPHfK3k48LRYa9+q2wV4WqjA\nnqQSrX0+TnwAgIBhMLu1I0b42GPlzXysuFWAjBKVOt2wpnbY8VA7b3+xAO52fPydLUdnTxus6CyG\no9C8OohMyeKLS7k4niJFrzdsMEFsVnaVpkihwqfnc3FOIsNAb1t83sYJ/BdN4SdSS/DFpTzcypJD\nxANa1hViWQcxgt1EnHkdTS7Bl5dz4STi4eOGDHytWO4qUYPcsGEDWrVqBQ8PD3Tr1g1nz57Vm/5/\n//sfOnfujPr166NZs2aYMmUKJBKJRpo9e/YgNDQU7u7uCA0NRUxMjMZ+lmWxZMkSNG/eHJ6enujf\nvz/u3Llj8WsjhAD/vZKHk8+kuJOjwJRTWRgfl4VbWXJcypBj3vlcJOUrygVH0807n4NVtwvwT56S\ncz9XcASAf/KU+CdPiXu5Cnx0Ohunnkmx4HKeRnAEwBkcAeBOjgInn0mRUaLCrqRirLtdYN6FANj/\npBi/3CnEgzwF1v1diFNZfLPzrAz/+6cYmxOL8CBPgR9vFOCsRAYAUKpYTIvPxq0sOQBApgKuPJfj\n47PcLQRyFYup8dm4na3AOYkMyx9yB1FLqfQAGR0djfnz52POnDk4deoU2rdvj/DwcDx9+pQz/fnz\n5xEREYFRo0bh3Llz+PPPP3H37l18+OGH6jQXL17EpEmTEB4ejvj4eISHh+P999/H5cuX1Wl+/vln\nrF69Gt999x2OHz8ONzc3DB48GPn5+Va/ZkJqorQiJTKKuYNS9KNXQUWqBPJkr8Yknk+XIeYxd9Ax\nxR/3i8zOQ6YCBhx8blYe31zLxwWJFI/yFEgtVOJhngJXMmS4lC5DYq4cN7PkeFqgwOk0Ke7lyMGy\nLB7mKXA1Q4YbmTI8ylNg4olsjTwXJ4qQ8FyGv7PluJEpw4nUEvydLYekSImL6VLkyVTIKlEitbD0\n34FlWTzOVyDhuQwJz2UoedGlN71YiUvpMuTLVXheosStLDnOS6RIeC7DlQwZ7ubIkV6sxO2s0nIm\n5StQIFchKV8BFWvceNL0YiVmlQt4k05kIUeqwoM8BZ4VqbSOuZElV5+n7PXcyZbjeZkfLBdyrPuD\ngcnJyanU0bM9e/ZEixYtsGLFCvW24OBgDBw4EF9++aVW+pUrV2L9+vW4deuWetuWLVvw6aefIiWl\ntDlm4sSJyM7Oxu7du9VpBg4ciHr16uG3334Dy7Jo3rw5PvzwQ3zyyScAgOLiYvj6+uLrr7/GxIkT\nLX6diYmJ8PW1ZmNA9UL3Q1t1vierbuVj4eU88AAs7yTG+GYOGvvFvxvfVErMM7tVHZQoWay5Xaje\n5mnHw3Afe6y4ZXrttmt9G0S/7arRW1iXTfcL8a8z3LXBOoLSpuz/6nhunDquPo4kSxFxKgslSmBu\na0cM8LZDlz3pGumy3m9gtbGtlVqDlMlkSEhIQFhYmMb2sLAwXLhwgfOY0NBQSCQSHDhwACzLIjMz\nE9HR0ejdu7c6zaVLl7Ty7NmzpzrPx48fQyKRaKSxs7NDp06ddJ6XEMJNqmTxxaU8qFhAwQLfXdNs\nhWGNrHEQy1h+o0AjOAJAWrHKrOAIAKeeSXE4mbuZuiyWZfHNVd2dpgoUrM7gCJT2SJ5/IQclLxol\nll3PRzpHC0WB3Hrvr0rtpJOZmQmlUgk3NzeN7W5ubkhPT+c8pn379vjtt98wZcoUFBcXQ6FQoEeP\nHli7dq06jUQi0Zvny+eVXGmePXums7zmLs5Ji51qovuhrarekzwFcDRDgIZ2KrR3ViE+i49UKYOU\nEgYnMvko+1s7pUiJH+MfQsADYtMFKG0Rq57Pzgi30cey4F9HCVchkFjEwNOGxagGCjjwWSSX8OAq\nZBGdJoCk2PR/9y2XH+NZkY3GtvlnMlC+Xnf93kN42poWJCtqsal2vVjv3r2LTz/9FHPnzkVYWBgk\nEgkWLFiAjz/+GOvXr7fquc1p/qrOzWfWQPdDW1W9JwoVi3bREjzKL/313sxZgPu5Cr3HfP3ARu9+\nUv3dKXgV/CRS4HqeZX8ELUrUfg8lFmo3ero0bAzfukKLnvulSg2Qrq6u4PP5yMjI0NiekZEBd3d3\nzmOWL1+O4OBg/Otf/wIABAYGwt7eHn379sXChQvRsGFDeHh46M3Tw8NDva1Ro0YGnZeQ2urQ0xJ1\ncARQYXAk5HXKlWl38rGUSn0GKRKJEBQUhLi4OI3tcXFxCA0N5TymuLgYfL7mL5WXr1Wq0hvVrl07\nvXk2btwYHh4eGmlKSkpw7tw5neclpLa6RwGRVGHWDJCV3sQ6Y8YMREREICQkBKGhoYiMjERaWpq6\nJ2lERAQAqJtP+/Tpg1mzZuG3335Dz549kZaWhs8++wytW7dW1wanTp2Kfv364aeffkL//v2xb98+\nxMfH4+DBgwAAhmEwbdo0LF++HL6+vnjzzTfxww8/wMHBAcOGDauEu0BI1SWswotfOAoZjH7THnVt\neVhyzfQhWqcHuuN0mpRzLKanHQ9pxdb7EibmyZXV0E46ADBkyBBkZWVh2bJlkEgk8Pf3R1RUFLy8\nvAAAycnJGunHjBmDgoIC/Prrr/jiiy/g5OSErl27YtGiReo0LwPt4sWL8e2336JJkyaIjIxE27Zt\n1WlmzZqF4uJizJ07Fzk5OQgJCUF0dDQcHWkeR0LKMqQ7f2UZ2sQO33UonV7G1AAZ5CpEYN3Sv/IB\n8otgJ3zS2hF1/0iBijrjVknWrEFW+jjI2qKqdsCoLHQ/tFXFe5JRrITvX2mv9Zxt3YR4mKdElrTi\nL773m9nj/95yAWDYWMuOHiKcezGLy0tH33VD2xfTmpXP42WA3JpYqHP+V1ME1xPi6nO5ycf/0tUF\nK24VqGegsSZbPuBmx8fTAu5JICrbZ20c8WmQk1XyrvQaJCGk6vr9XmHFiSysqaMA+/q44bvTSfjp\nkeFTidnyoR4zx2VhiBNmt3JEerES6cUqlChZ+DoL4CzS3RXD5kV3h9G+DloBMkAswP+9JYavsxAl\nShYiHiBXAc9LVBDygIYOfDwvUcGrDh8PchUoVrKob8+HXFW6L1emwr0cOXycSstQb2OqRv4zW9TB\nKo7p6n7p6oLhPvYY6G0Hj02pWvu56MqrrJB6Qkxs7oCZ5a7z0egGsOED93IU6Libe/idJYW6i3Av\nR44cA5tObfnWa+Go9KnmCCFVS1K+An8mFmLFzXx8a8ZzPQBoVIePvkauv/hRS0fYChgM8VTAqcwD\n0Pcaa+dTdgKV1Z1d9OZbR1Ca2N2Oj8C6QrR1E2kFxw+av5oBiM8AI33s1a+XdXDWSLu0gxjt3W3g\nYsNDfXs+XG358LQvzdtPLEQdIQ/ejgLwGAbNxEK0dhXB3Y6Phg6lUddZxEN7dxu42vIh4DFoVWao\nwptOAnzS2hG25UZO2PBY9PMqvQ82BgYGBwGDuUEVPzr6up0zhje1h1j0Kt+wBjawEzDgMQz8Xawz\nlKK8X7u5YESZ+67PofZFmNXSeo/FKEASQtROppYgaIcEM07nYOFl85aO8qrDx5rOLljczhldPEVo\n5szdYNXU8VUUWBTihJYvAoUtH1jX1QUBYgG6eIrwVVtnzuNferexHT70d4CvswAzWtTR2i9VVlwj\n+aS1I955wwZ+zgKsfEsMN7tXZRvja49xvvalwauVIzp7Wnai7J/fEiOknhBBrkKs6iyG2IaHNZ1d\nYPciEDoIGCxqJkOdMiuEBIi172kzZwGWhjqjW30bNBcLsKaLC5xFPOzrW0/nuSf62aOjhwgiPoP1\nXesiwEWATh4iLAnVvOdhDbTHJh7qVw9dPEVwt+NhjK89PgrUvveG+r9OYnjVERj03HvFW2LUte5c\n5dTESgh5Zd5501bUCHAR4OwgD537Y/qWzlr1flwWdie9mpjc25GPq8M8dR7Xz8sO/bzsdO73dnz1\nFWbDZ7Csw6v1oFaXa1L0sK94IHt9ez629+YOJPYCHlZWUEs1R5t6Ihx7T3Mc9pCm9hjS9FVtqvxM\nS+E+9viq3HRtF4eU/jtMDdAMVJ09bRDqLsKFdM1nsDkTG2q8fqeRLd7RUesf4G2H46lSjW2hHjbq\nf9+Xvm6nGVjX3i7AZxd1v7f29a2Hzp6vgm9zjsBf1v6+9fCWpw2sPfEU1SAJqWUOPCnGl5dycTlD\n84sys0Rp8pjHJe311+5e+qyNI8rWDcoGNEOUbea0FzCY6OegM+3CkFcdN8QiBgMa6w601dWk5g6w\nF7y6o+WbgcuzE5j3vG64jx2cyzTBftXWsM4xSj3z8da356GDu2ZVcHATOziJdJe1o4eVq44vUA2S\nkFrkSHIJRh3LAlC6ePDlIR5o4iQAy7IYdiSzwuP5TOmXk59YCDs+AwXLorOnDbo1MOw5o59YiL19\n6mHf42J08rRB7zeMez45ubkDRDwGd3LkGOfrACc9HWxmBdZBHQGDpAIFJvk5wNbM4FAVOYt4ONLf\nDZsTC+EvFmJ8M/3P7uzM7NBiL+Dh6LtuiLxbiKZOAkzS8wOlLK4hMovbOeFZkQof+jtoNanWEfJw\nuL8bfr9bCB8nAdzseJh4IhsBYgGiertabfWO8ihAElKLTI9/tb6gkgW+v56PtV1c8LhAiWsGDDtI\nHtvA7FpIl/o26FLftLlaGYbBBAO/lPk8BlMCTH8eVl20qCvE0lDDauJ9vWxx4OmrlTiC6xnf8cbX\nWYglBp5PfR43zRqfDR+YGai/c01zsVA9xhUABjcxrOOOJVETKyG1SEaJ5tjChOcyfH0lF0E7JBUe\nO7m5g9nBkVSukT726s5StnztZ4XW8paHSN0symeAX7vWfS3nNRfVIAmpxe7kKHAnR//4uEUhTmjn\nLkKn1/Tch1iPiM/gxAA3nE2TwcdJgCZOrycEMAyDPe/Uw5k0KRo48OEnfj1DRsxFAZIQotf7fg4Q\n21BjU01hL+Chl5HPfi1BxGfQo+HrP6856F1PSC3B6ulJqI89NauSWooCJCG1xPVM0+btFFlxKi9C\nqjIKkIRUY39nyzHs8HN02i3B8ZQSjX2n06QIP/wcfz0oAsuyWHDJ+EkA3rLwbDGEVCf0DJKQaupy\nhgy99mWoXw85nImfO4kxwc8BOx4WYfLJ0iEdR1KkWHErH39nGzcJwHuNbbH4NfVyJKQqogBJSDW1\n6LJ2jXDW2RxM8HNQB8eXjAmOh/vXQ3t308YpElKTUBMrIdXU6TQZ53ZD1kXUxduRj+B61KxKCEAB\nkhDygocdDwf7uRm0kgIhtQEFSEIIAOC/7ZzhacCKF4TUFhQgCamG4p9JK05kJCFVHAnRQAGSkGpI\n39p6pqqJq10QYg7qxUpINXAjU4ZsKYt7OXJkSlW4lWXaoH9d6ggYdDNxhQ1CaioKkIRUcZ+ez8H6\nO4UWz5fPAPXt+RDxgC+CneAgpAYlQsqq9E/Ehg0b0KpVK3h4eKBbt244e/aszrTTpk2DWCzW+mvQ\noIFRaeLj4znT3L9/36rXSoixMkuUVgmOAPBodH3cGu6Jq8M8MaTp619rj5CqrlJrkNHR0Zg/fz5+\n/PFHdOjQARs2bEB4eDjOnz+PRo0aaaVfunQpFi1apLHtnXfeQadOnYxK89L58+fh4uKifl2vXj3z\nLogQC7ubY9zsN4Zq7SqEk6jSfx8TUqVVaoBcvXo1Ro8ejQkTJgAAli1bhmPHjiEyMhJffvmlVnpn\nZ2c4O7+a+ur8+fNISkrC+vXrjUrzkpubG1xdXS15SYRY1PNyCxybq1t9G/iJBZjVUv9q7oSQSmxi\nlclkSEhIQFhYmMb2sLAwXLhwwaA8Nm7cCH9/f4SGhpqUpnv37vDz88OAAQNw6tQp4y6AkNfgWZHS\novnt6VMP33cQo6EDjXckpCKVFiAzMzOhVCrh5uamsd3NzQ3p6ekVHp+bm4vdu3dj/PjxRqfx9PTE\n8uXLsXnzZmzevBm+vr4YOHCg3uefhLxOz4qUGH88E/MvmD6cY2moMyb5Oahff9yyjiWKRkitUW17\nsUZFRUGlUmHkyJFGp/H19YWvr6/6dfv27fHkyROsWLGC81nlS4mJiWaV2dzjaxq6H9pe3pMv74kQ\nm1Hxx/O75lI8KmKw7onm/KmRrUrQUliEsHpAsJAHAYBgpyIkJlb847MqofeINronmsy5H2XjAJdK\nC5Curq7g8/nIyMjQ2J6RkQF3d/cKj9+4cSMGDBig0cnGlDQvhYSEIDo6Wm+aim6mPomJiWYdX9PQ\n/dBW9p7EnjZswvEBrRuDzwDrnqSpt3Wtb4MhIQ3Vr5sZkpG0GKJdf0B0YDtUzi6Q9x4GRloMUcwW\nAICs/ygoWoVCdCQaqnqekA2ZCNjYaWTBv3QCwhP7wTo6Q957KIQn9wEyKWSD3wfr8Yb2ORVyiKIj\nwfvnDlR+rSEbNB7gvWr6LXv6e14XAAAgAElEQVQ/BGePQHAlHspmLSHvPRRMznOIdkYCLAvZ0Elg\nXT2AwnyIdv4GJj8X8gHjoGrU9EW5TkJ4+iD4f199dW6lAqom/lC+GQDZ0A8AUfUYA6r1uVEqINy/\nDfyk+1B5NAQvPRWsgxOgUgLSEsjfGwOV15v6My3Ig82ODRDG7eXcrXwzEPwHt7S2y3oOgqJDT4j2\nbobg5kWt/axACNWbAYBCAf6D2wAAVV03qBr5QOXZqPQ9ZGsPwelDEJw9DNalHmThU8CKDe8XYu3v\nkUoLkCKRCEFBQYiLi8OgQYPU2+Pi4jBgwAC9x165cgW3bt3CkiVLzEpT1s2bN+Hh4WFY4Qmxorhy\nCx/rYy9g4CziYX6QI75LyIeHHQ8Lgp2MPqdo9yaIDmwHAPBys2Gz41fN/fu3QbR/m8Y22ajp6v9n\nnj2B7eqvwLAsAEB47qh6H+/xAxQv+UPrnMKyed5NAFvHEfK3h2ml4z26C9v13wAABJdPQVXXDaLD\n0eDfv1G6X5KM4gWrYbN1FYSnDwEA+PdvouinKDBpT2G3SrvDHwDwH9wq/eLn8yEbHqHz3lRlgpP7\nYbPzN537+feuo+j//qfxw6M8m80/Q3j+mO48OIIjAIiO7Ybo2G6dxzEKOfh3r2ts42VlgJeVAVw/\nD7AqKDr2hu2vr76jmdxslHzyvc48X7dKbWKdMWMGIiIiEBISgtDQUERGRiItLQ0TJ04EAERElL5p\ny/dA/eOPP+Dj44MuXbrozFtfmjVr1sDLywv+/v6QyWSIiorC/v37sWnTJgteHSGmmXU2x+C09i+m\nh5vfxgn/buUIHgMITViNQxS7reJEZdMfjNIIkPxH99TBsTx+ahKYzHSwrpotQ8LjezReC07u5wyQ\nNn+u0nhtu/ZrMMpXnZf4D24DKqU6OAIAL+c5eIm3IDy2q8JrER6JrrYB0nbjT3r383KzwL+TAGWL\nEJ1p9AVHaxId3gnWRbMPCldNtDJVaoAcMmQIsrKysGzZMkgkEvj7+yMqKgpeXl4AgOTkZK1j8vPz\nER0djXnz5unMt6I0crkcCxcuRGpqKmxtbdXnffvtty1zYYSY4HExg5VnsvGkwLCeq23qCTWCoQ2/\nEudSVVYwXlOlfU28nEzN188lnIcyWZqPYcoGx1f5awdnRlYCXk6W/nIBYGSWn/i9SikuquwS6Mb1\nb1mFVHonncmTJ2Py5Mmc+/bv36+1zdHRESkp+p/PVJRm1qxZmDVrlnEFJcSKEp7LMOyKHQD9X2a9\nG9pAhdJa4n/bGt+UajWV/UXHEYABgOXTcBZd96ZK4HEMpGBZgKkaE+dXeoAkhBi2OkfOxIYVpqk0\nFX0JW/sLj9UxoQKfvuKYimr3lYnr300uqzKdpmiuKUJes6PJJZhyKgsb7hSAffHc7pxEpveYfwVW\n7TGMnM2eGgmsHCA5mlgBAAKhdc9bHVR27V4frrLJ9X8WXif6eUXIa/QgV47wI5lgAUT9UwwnEQ/D\nffRPFL6qsxijKkhT6SqqQaosO2WeweenJlbr33szMBz/boxcBh0/d147qkES8hp9eTlP48P/1eU8\nvekdBAzG+jqAb0LP1NeqolqKtb+kdTSxstTEWrWfQVbxGiQFSEKsIOG5DJ12SxAYlYboh6863lxI\n1/zwpxQpcUFSyb0odQzPMEpFX8KWOIcejK4ATAGyajexSjnG/FKAJKRm+/xiLv7OViC5UImPz+ag\nUF76Bc61OseQw5la2156LU1NlvhCqrAGaeUvaV0BUkABkqsZs6pgpMXa26pQgKR3DyFmkqtY3MmW\n4w0HPura8sGyLM6W6XSTJ2dxOLkEHT24e+YVKirxiYtSAZutqypOx4F/OR7KtqUTcVT0Jcz/528I\nV38FfvJDAEDxzEXcCWVS2GxbA979G/Bs0gLwmW1QWew/f197o4q1WA1ScGIfhEd2QlW/MaQTPgb/\n3k2Idv8B1rkuWHFd8G9cBC8vW+MY6egZYIWiCgfzc1HVdQMvKwMq57rg5b4ay9nGlLJfOgl5x56w\n3fA9+LevgJGVQOXkAl5eNpQNvU3I0XKEJ7WH8tkv/NCgY2VvDwXa97F0kTRQgCTEDDIliz6xGbj6\nXA5XGx729KmHg0+1m40mnsjmOLry8W9egjAuxqRjbdcvRmGrvaVd8iuoQdpu+E7jtd2qRZzpBOeP\nqWfYqZ/8CEWdexlUFqYwX3ujSgnWAr1Ymcx02P7+AwCAn/wIrLMLhKcOgJGVAE//0XmczdbVJp+T\n92JyhLLB0VT8e9dhE/UrBNfOvMr/RTDnpySZnX9lEZ45YvUASU2shJhhV1Ixrj6XAwAypSr890ou\nFl/V3/HGGNbummMTafq8l4xMCub5i0nSLdSMZ/ubZnlsNq8wPTOlknsgupGEh3dovBYd3VUaHKsR\n4cl9lV0Ei2MK82CbkWrVc1CAJMQMUf9oznxzONmyHW6s3fjKFGs/AzLq+JfP/qzUEYTh6sRhKJXS\nIuMvmao8VVstZ+1JEChAEmKGWv8BellzrIIdQRil8vX00CWVhrVAC4E+tf7zTYg5eFYenzi0iV3F\nicxiZgB5+QveakMJzCifpQJbVR4mUcuxepbxsgQKkISYwdofoE9aO1r5DGZ6ETyq5FACSwW2qnht\npJSVa5DUi5UQM1hrhamv2zphUBM7NKpTxT+iL4NHVaxlqZS6JzE3RlW8NgIAYBkKkIRUWXwrfD4j\nu7lgSNMqPvfqC+pJyqtiLUuptMgUd1WydkwAUBMrIVUaz8yBGO962Wq87tnQBgO8rf3c0YKqcA2S\nUVmok04VvDZSytqddKgGSYgZTK1B/jPKE3YCBvYCHgrkKihUQOI//6CtfwMwVWSxWIMolYC0pEpN\nD6amVBpe+2NZQCYFbDR/sEBaXDVrx6SUlWuQFCAJMYBMyeJoSgka2PMRVE8EFcvieIoUOx6aNo7Q\n1fbVB7uOsDTKioWwanAUbV8P4eEdUDVojJJ/fQ3Wrb7Zedr9+KkFSqYPY/JsCcJDUVC2bG9Q2jrv\n9zDtJKRSsVZezowCJCEVYFkWAw89xzmJDAyANV1ccCS5BNGPzBtk/zoxKUkQxW4DAPCfPIDwwHbI\nxn9cyaUykImtpLysDKgKLDerEal6aBwkIZXsfLoM515MPs4CmBafXa2CIwCIjuzUfH1sdyWV5PXi\n37xY2UUgVmTtTjpUgySEQ8JzGTbdL4K/iwDZUssu9jstwMGi+dV45rQ6y+UWKwYxn8rJBXCoA96z\npwAAVmQDRiaFsoE3FD3eA/9uAgRX4g3Or8p00mFZtnp1HiDERNlSFd6JzYDUhL4Z84IckVKoxL0c\nOS5naH45d/QQYVhTO0xoRgHSKGZ0RGUsMQ6SmKXglwOAjWE9s+VvD+XcXmdCd+4Dqso4yBYtWmD4\n8OEYPnw4AgICrFkmQirVr3cKTAqOY3zt8XkbJ/Vr8e8pGvt/6epS9Qf+E2Jp1gxiVq60GVzy4OBg\nrFu3Dp07d0aXLl2wevVqSCQSswuwYcMGtGrVCh4eHujWrRvOnj2rM+20adMgFou1/ho0aKBOEx8f\nz5nm/v37Gnnt2bMHoaGhcHd3R2hoKGJiTFsTj9Q8TwpM69Y/PaCOxut1XVzUM+2M8bWn4EhqJys3\ng1qTwSXfsmUL7t27h+XLl8PR0RELFixAixYtMHToUOzYsQPFJiybEx0djfnz52POnDk4deoU2rdv\nj/DwcDx9+pQz/dKlS3Hv3j2NP29vbwwaNEgr7fnz5zXS+fj4qPddvHgRkyZNQnh4OOLj4xEeHo73\n338fly9fNvoaCHmpRV3NxXlHvmmPS0M8cOI9N6x6S1xJpapGdNUGqJm0eqsNARIAnJ2d8f777yM2\nNhYJCQn47LPPkJqaiilTpqBZs2aYPn06Tp48aXB+q1evxujRozFhwgT4+flh2bJl8PDwQGRkpM7z\ne3h4qP8ePXqEpKQkTJgwQSutm5ubRlp+mfEya9euRZcuXfDJJ5/Az88Pn3zyCTp37oy1a9caczsI\nqVBTJwGC6ono+b0hdM16QzPZVG9Wfk5oTSaX3MvLC3PmzMGOHTswaNAgFBQUYNu2bRg8eDACAwOx\nZs0aKPW8sWUyGRISEhAWFqaxPSwsDBcuXDCoDBs3boS/vz9CQ0O19nXv3h1+fn4YMGAATp06pbHv\n0qVLWuft2bOnweclhFgDd4CkuVCruWr849CkhyL5+fnYs2cPoqKicObMGfD5fPTr1w+jRo2CSCTC\nH3/8gf/85z+4c+cOVq5cyZlHZmYmlEol3NzcNLa7ubkhPT29wjLk5uZi9+7dWLhwocZ2T09PLF++\nHMHBwZDJZNi+fTsGDhyI/fv3o1OnTgAAiURi0nkTExMrLJc1j69pqur9yMsTwdiPhqeNyiLXU1Ee\ngoJc2GRKwIAFCwZSVw8o6jhzpuXJpLCTlD6ucE99ivKNvCknDsNPJjW7zJbCyuVaIzpUBXkQFBdU\nSnmIZVjic9HGSvn7+vrq3W/wt4BSqcSRI0cQFRWFgwcPori4GEFBQViyZAmGDRuGunXrqtO+/fbb\nWLx4MdavX68zQJorKioKKpUKI0eO1Nju6+urcdHt27fHkydPsGLFCnWANFVFN1OfxMREs46vaary\n/ch8kAHAuLlFvw51ha+PeStwVHRPhAf/B5ttq7W2S4dMgnzgeI1tvAe3Yf/9TL3n8/v9W9MKaiW8\nl4svl0HBsfqz9ufcmvkbHCCbNWuG7OxseHp6YsqUKRg1ahT8/Px0pvf390dBge43t6urK/h8PjIy\nMjS2Z2RkwN3dvcLybNy4EQMGDICLi0uFaUNCQhAdHa1+7eHhYfJ5Sc2WVaLEmTTjguO37Z0xtKmV\nV+BQKTmDIwDYREdC3ncEILJRbxMd2G7d8hBSCxj8DLJnz57YuXMnbt++jUWLFukNjgAwdOhQZGdn\n69wvEokQFBSEuLg4je1xcXGczxTLunLlCm7duoXx48frTffSzZs34eHhoX7drl07k85Lar54I4Pj\n96HOmN6iDnjWfs5SwWoZTLk5RwWXT+lIScjro2jR1iL5yLv2097WsZdF8tbH4BrkL7/8YvGTz5gx\nAxEREQgJCUFoaCgiIyORlpaGiRMnAgAiIiIAAOvXr9c47o8//oCPjw+6dOmileeaNWvg5eUFf39/\nyGQyREVFYf/+/di0aZM6zdSpU9GvXz/89NNP6N+/P/bt24f4+HgcPHjQ4tdIqo872XJMiMsy6hh7\n4WvqgGCBdQ1ZW3swJUUWKIyec9jYgpGWaG1XNmkO/qO76tequm7gZb1qxVE51wXrWvojlv/wTuk2\nvoCz2bWysLb2YG3twct5zrlf2dAb8t5DILhyGvxblwGwYF78uym9fMDW9QCTnw0mNwu85xLIu78H\nVQMv2GwtbRlQtGwH2NgZ9OOGrVM6IYWidUfIwwZAeHQXeJKU0qnbFIrSc2SkqtOyfAGULULA5GZB\ncPuK3ryVvoGlS3zx+OAn3tLar3KuC0ZaDKaEe2ifPGwgUJgHCG0gC/+wwmsxhHTkNLBCEQS3LgMq\nFZQtQiAN/xB4Zv5YfH0MDpAHDhzA8ePHsWzZMs79c+fORc+ePdGnTx+DTz5kyBBkZWVh2bJlkEgk\n8Pf3R1RUFLy8vAAAycnJWsfk5+cjOjoa8+bN48xTLpdj4cKFSE1Nha2trTrPt99+W53mZTBevHgx\nvv32WzRp0gSRkZFo29Yyv3ZI9cOyLD44YVxwBAAHQfUJkMXzl0PVpDmY9FQ4zB1tcj4FG0+YXRZD\nJCYmImj5v8EUWfY5pLJZSxT/xzp9IwBA0WOAUenl74QbnFbXc2rpmy2MOme14+AI2fiPOXoGVJEA\nuWLFCjRt2lTn/pKSEvz8889GBUgAmDx5MiZPnsy5b//+/VrbHB0dkZKSwpG61KxZszBr1qwKzztw\n4EAMHDjQ8IKSGkWpYsHnvQpuyYVK/J1jfG3FXvCaxnipLDBYXijS/G8txfJpRiNiGIM/3X///TeC\ngoJ07m/dujXu3r2rcz8hVcUP1/PhsSkVgVFpuJ4pQ+TdQrT8n2m/RF/bCC8LjAVkXwRGtpYHSAgo\nQBLDGPxOUSgUKCnRfrbwUnFxMaTSqjOmihAuTwsUWHy1tENLcqES317Nw6Fk3e/bAY1tsTDECT5O\nAhQoWDTa8kxjv8ICTZ+GYCo6jyHloBpkKapBEgMZXIMMCAjAvn37wHJ8EFUqFWJiYtC8eXOLFo4Q\nS5EqWfx+txBtozVrivqCIwAsCHHCm85CMAwDRyEPw31eDeew4zPoUt9Gz9EWVFETqwE1TPblMBCh\nUH/CqsQavYMpQBIDGRwgp06diosXL2LcuHG4fv06pFIppFIpEhISMHbsWFy+fFnd65SQqkSpYuGx\nKRX/Ppdj9DJWvs6awWRhsBP6NLJFq7pCrO/qAkfh63oGWUHBDXlG+bLmaOVV2Ks6lppYiYEMfqcM\nHToUDx8+xNKlSxEbG6uxj2EYfPrppxgxYoTFC0iIuWKf6n40oM9Ab1utbW/UEeCvXq7mFskoTNpT\nzu72ZQlP7oOqcTNAIdfd3CqoRjVHa6IaJDGQUe+UuXPnIjw8HDExMUhKSgIAeHt747333oO3t7cV\nikeI6a5myPDr3UJse2Da2D8Rr/InWeZfPQPbVV+CqWA8oGj/NgMyo8AAgO4DMZjR7xRvb2989NFH\n1igLIRaTVqTEO7EZkJsxOkJQBQKk7bqvKwyONZmyaXMIbl6yaJ4qzzcsmh+puarvQl2E6PF/N/PN\nCo4A0KeRdhPr68Y1K40p5KGay7vJu79nkXytTTZ8KlgLdtRhbe0g7znYYvmRms2oGuSxY8ewatUq\nJCQkIC8vj7NHa1aW8bOREGKu2CfFWHwlD3Vtefi/TmKckxg3p2p5rV2FeNer8gOkKZSNm0HVwAtM\nUQH4Ny5A/u4YyN4bq5FGOmYmVG6e4KU/gyKkM3hpT8F7+hDC+AOVVGpuKi8flHy6HPyEc1AGBIN1\nEkMYtw+8B7egCA0rfX3qAHiP7gIMA6V/MFgXVwiunAZTXAhZ76GQjYiA4Nwx8FKTSuf0tDNv1RVS\nexgcIPfv349x48ahefPmGDp0KH777TeEh4eDZVns378fvr6+6Nu3rzXLSginEgWLqaeykScv/cH2\nn0t5yJWZXn3c26ceOriLNGbaqU4UXfpA3nuI/kQiG8jfHaN+qWzdAQCqXIAEAKV/Gyj9X60IKG2i\nOZyMa2o3abkpQBVd6buJGM/gALl8+XIEBQXh8OHDyM3NxW+//YYxY8agW7duSEpKQq9eveDj42PN\nshLC6UaWTB0cAeCQib1WX+r6usY2Wglby4dxEGIpRk01N2zYMAgEAvD5pR9ApbJ0bJa3tzcmTZqE\nn376yTqlJEQPqy81Vd3wKUASYgkGB0gbGxvY2pY+k3FwcADDMBqLDjds2BCPHj2yfAkJqUCxwnLT\nvYlFNSDYUg2SEIswOEA2bdoUDx48AAAIhUL4+flh79696v2xsbHw9PS0fAlJrXc6TYo/7hUiq4R7\nNhlLBsifOoktlleloRokIRZhcIDs1asXoqOjIZfLAQDTpk1DbGwsgoODERwcjMOHD2PSpElWKyip\nnXY+LMK7B57j47M56Lo3AzKldjAsMjBA9vOyxUQ/7h6MfRrZ4uu2Thjobce5v1qhGiQhFmFwJ525\nc+di6tSpELyYx3D8+PGwtbXFnj17wOfzMXfuXIwaNcpqBSW10wcns9X/n1yoxI6HRRjt66CRplCh\nu8dqqLsIh/q7aWz7/Z72zDqve/o4a2KpBkmIRRgUIJVKJdLS0lCnTh0wZTpEDB8+HMOHD7da4Ujt\nVsQR+G5kyTG63DZJse4AacgzxZZ1a9gcpVSDJMQiDGpiValUaNOmDf78809rl4cQteXXCwxK998r\neTr3OYsqfovPblXH4DJVC2bUIJWNm1mwIIRUbwYFSKFQCE9PT43aIyHW9sONfIPS+TjpDggNHSoO\nFjVumIgZNUh5n3C9+0umfmFy3oRUNwY/gxwzZgy2bt2KDz74QD3cgxBr+DtbjrHHMg1Or2uNR087\nHsY3c+DeWYaghsVHc2qQik69USx2Be+fO2DysqDyCYCiRQgEV05D5fkGVM2DLFhQQqo2gwPkm2++\nCZVKhXbt2mHUqFHw9vaGnZ12j7/Bg2kiYGKe7xLy8DDf8JWNdXXSuR7uCRt+xdFPZECaasXMTjrK\ngGAoA4I1tim6v2tWnoRURwYHyClTpqj/f9myZZxpGIahAEnMtifJsKniMoqV2J1UjGyp9jCPng1t\ndAbHd71sse9J6TmcRAy6eFbvqeXKo16shFiGwQEyJibGKgXYsGEDVqxYAYlEgubNm2PJkiXo1KkT\nZ9pp06Zh2zbthWHt7e2RmpoKANi7dy9+//133LhxA1KpFH5+fpgzZw769eunTv/nn39ixowZWvmk\npaVR83E18SBXjrbR6Zz7OriL8G17Z53HftXWGTkyFTJLVPgi2Am2Na2NlXqxEmIRBgfIzp07W/zk\n0dHRmD9/Pn788Ud06NABGzZsQHh4OM6fP49GjRpppV+6dCkWLVqkse2dd97RCKhnzpxB165d8cUX\nX8DFxQVRUVEYO3Ys9u3bp5HO3t4e165d08iLgmP18e013R14DpYb91iej7MA+/rqT1OtUQ2SEIsw\naj1IS1u9ejVGjx6NCRMmAChtuj127BgiIyPx5ZdfaqV3dnaGs/OrmsH58+eRlJSE9evXq7d99913\nGsfMnz8fhw8fxv79+zUCJMMw8PDwsPQlETM9zlfo3b/u70K42PAQ/aj4NZWoGqIaJCEWYXCAfO+9\nilcgZxhGY35WfWQyGRISEvDRRx9pbA8LC8OFCxcMymPjxo3w9/dHaGio3nQFBQUQizXn2CwuLkZg\nYCBUKhVatmyJzz//HK1btzbovMQ6VCyLznu4m03LWqKn9khANUhCLMTgAKlSqbTGQSqVSjx9+hQp\nKSlo2rQp6tevb/CJMzMzoVQq4eam2dTl5uaG9PSKvyRzc3Oxe/duLFy4UG+6X3/9FampqRgxYoR6\nm6+vL1atWoXAwEAUFBRg3bp16NOnD06fPq13TcvExMQKy6WPucfXNOXvx9VcHvLlxjdzv1mUhsWP\ntmNYxkVsadgdD+6OAMu3YuOISgm3i8dgl56MvDdbIiegncZuO8lTuF49BZVQhIz2PSF3qquVBb+4\nEO4XjoBleEjv8DacHtxEy4NbISgugMxRjMJGviYXL+lpMqTFhvcCrsroM6ON7okmc+6Hr6/+z5nB\n3yL79+/Xue/gwYP4+OOP8c033xheMjNFRUVBpVJh5MiROtPs2bMHCxcuRGRkJLy8vNTb27dvj/bt\n26tfh4aGokuXLli/fj2+//57nflVdDP1SUxMNOv4mobrftx8WAQgm/sAHfgqJY4lLEZDWelxY1NO\nQH7BGdIJ/7ZUUbWIdmyA6Oj/AACuN86huLE3lEEvmu+Li+Dw48dgigsBAPWSH6D4m0itPGy/mw3B\n31cBAPXjNTvAifJzIPr7ksnla9y0KViPN0w+vqqgz4w2uiearH0/DF7NQ58+ffpg+PDh+Oyzzww+\nxtXVFXw+X2NNSQDIyMiAu7t7hcdv3LgRAwYMgIuLC+f+PXv2YOrUqVi3bh369u2rNy8+n4+goCA8\nfPjQ4PKTqsGv+Jk6OL4kPL7HqucUHt2l8Vpw5oj6//lJ99TBEQD4yQ+B/BzNDGRSdXC0CnoGSYhF\nWCRAAkCTJk20eoXqIxKJEBQUhLi4OI3tcXFxFT5TvHLlCm7duoXx48dz7t+1axciIiKwZs0aDBw4\nsMKysCyL27dvU6edSpZSaHyzoL1SaoWS6Fc2AAIALzPt1Qup9hhORlGu45FcZo1ivSIUWTd/QmoJ\nizyoUSgU2LVrF1xdjVsyaMaMGYiIiEBISAhCQ0MRGRmJtLQ0TJw4EQAQEREBABq9VAHgjz/+gI+P\nD7p06aKV586dOxEREYGvv/4anTp1gkQiAVAakF/WNpcuXYp27drBx8cHeXl5WL9+PW7fvo3ly5cb\nfe3EMj45l4MNdwsrTliOrUpuhdKYgSv4qTQDP2PlAMlSgCTEIgwOkFwD64HSzjKXL1+GRCIx+hnk\nkCFDkJWVhWXLlkEikcDf3x9RUVHq54XJyclax+Tn5yM6Ohrz5s3jzDMyMhIKhQKfffaZRpPvW2+9\npX6Ompubi1mzZiE9PR1OTk5o1aoVYmNjERISYlT5iWU8ylPoDI6/dXPRWBOyPDuVlWtjBnnVeY0z\n+CnL1YypBklItWBwgDx16pRWL1aGYSAWi9GhQweMHz8eYWFhRhdg8uTJmDx5Muc+ro5Bjo6OSElJ\n0Zmfvs5ELy1ZsgRLliwxvJDEqg4l655ark8jW/ynjSO+0TG0w0ZXDZJlgcpYpUPG0eSrogBJSHVk\ncIC8efOmNctBajGhjifh+/vWg4OQhxmBdXAhXYajKZrBh88ADYQ6nlsqFYDg9S+EzFWDZBQKsBWk\nsWwhatjUeYRUkkqdSYcQABDytL/Q33QS4K0Xk4jbC3jY8XY9zmMFp+0Brr5hclmlBEhDnkFavQZJ\nCLEIg3uxbtq0CePGjdO5f/z48di6datFCkVqF676jsGVIB3Bxmq1tPLBDgBUZZbbMuAZpNVrkIQQ\nizA4QEZGRuodBuHp6YkNGzZYpFCkdilRai9XtTDEyaBjdQYbawUhOcczT8WrbZzloRokIdWSwU2s\n//zzj3pScS7+/v7466+/LFIoUrsUKbQDZFs3jo4mCjlsIn+A8MwhAICycTMweVncmVo4CPHu34D9\nN//i3pf2BHYLS9dL5T++r7Wf/+A2IBBCtH0dBLevWLRchBDrMThAMgyDrCwdX0YAsrKyoFJxr+xO\niD5LruVpbatvrz0bjPDEPnVwBLiD0UuMTAbtsGsimVRncAQARi7XWxabrauhcq4LXq7uzw8hpOox\nuIm1devW2LlzJ6RS7TSDpE8AACAASURBVG7sJSUl2LFjB1q1amXRwpGaL6VQiZJyLZCLdDSv2mz+\n2fCMLViDFFyJNzsPY4Oj0utNqNwaGH0eRYu2Rh9DCOFmcICcPXs27t69i379+iEmJgYPHjzAgwcP\nsHfvXvTr1w/379/H7NmzrVlWUgMdfqo9BtLFxgIzIFqyibXI+Bl+zCWd8G+UfDhf535Wx5JWsgFj\nrVUkQmodg5tYe/TogTVr1mDevHkazyJZloWjoyNWrlyJXr16WaWQpOayF2p3V/WqY/hk20WLSqch\ntP3xU/DKTApu7Z6i8q79IOs/CkxxkXobLyUJtr8aNwGFonkQZCOnAWDx9PFjNGrSFCr3hoCdPQCg\nYF0seBnPoHJ1L+3sw+OXToIAAA6OYHIygZIiMEUFUL3RFLAxfrkwQgg3o8ZBjhw5Ev3798fx48eR\nlJQEAPD29kZYWBgcHR2tUT5Sw9nytQOkJ8fzR11UTfxK/+vjD17CuVc7rBwglb4twXo20nzOacIa\nlCoff/U1FCn4UDUut3SPnT1UXrrXKGVdSseHWux5KyFEzehPtKOjo0ErZBBiiHy5dseu5mIT5q8o\nN72a1ccackznxopoijdCahKDH/bExsZi7ty5OvfPnTsXBw8etEihSO0x87TmWondG9hozflrCK0V\nLCpjxQxT5kClqh8hVZbBAXLlypUoKirSub+kpAQ//2xEL0NCONQRmDiP6GsOkBByTGNHk4QTUqMY\nHCD//vtvBAUF6dzfunVr3L171yKFIrWDitWuPkk5ZtUxRPkanSWbWBklx+w5HLVcWoeRkJrF4ACp\nUChQUqJ7WaLi4mLOMZKE6JIn0w6GHJPqGMaaNUhD86IASUiNYnBviICAAOzbtw8zZ87UekakUqkQ\nExOD5s2bW7yApObKlWl30Ck/LyuTlw37uWPAlOhu3gcAiGw0X5cLasLDOyA8+D8whfkV5sXyeICo\nzHAJhY41J8szoRcrIaTqMrgGOXXqVFy8eBHjxo3D9evXIZVKIZVKkZCQgLFjx+Ly5cuIiIiwZllJ\nDZPDESDLN7EKY/+qODhCfxMrk54Kmz9XgZcpMSgvRqUCU1L06s/QAGnKOoy0dCMhVZbBP3mHDh2K\nhw8fYunSpYiNjdXYxzAMPv30U4wYMcLiBSQ1Vy5HE2s9W83fbKID23UeL+s99NWL8rW3MkGNl/rY\ntALqoWzWknt7Y1/wHycanI+8+3uWKhIhxMKMahOaO3cuwsPDERMTozFRwHvvvQdvb288fPgQTZs2\ntUY5SQ3E1cQ6P8iwZa4AQN4n/NWL8lOvlV1iysI9WqVj/wXY2nPuk42aDtufPgMj1f28Xp229xCw\n7sbPt0oIeT2Mfmji7e2Njz76SP06MzMTO3fuRFRUFK5evap3xQ9CyuIKkG3qcQyfKKfg9+MAr9zT\nAV65AFlmkWJTerTKeg+FbOgkzY18gfazznKU/m1Q+EuZ8cDSklfBmi8oLZetnWnNsYSQ18qkXgXF\nxcXYv38/oqKicOLECcjlcvj4+GDmzJmWLh+pwco3sUb4Oxg2SUD54AjtybsZc2uQ9g6AnYPxx5VH\nc6MSUm0ZHCBZlkVcXBy2b9+O2NhYFBQUgGEYjBs3DjNnzoSvr2/FmRBSRvkapLM5q3hYuAZJYxoJ\nIRUGyISEBGzfvh27du2CRCKBj48Ppk+fjuDgYIwcORI9e/ak4EhMkiMtFyBFZgRISz+DpABJSK2n\n9xupffv2CAsLQ0xMDMLDwxEXF4fLly/js88+Q7NmzSxSgA0bNqBVq1bw8PBAt27dcPbsWZ1pp02b\nBrFYrPXXoIFmR4fTp0+jW7du8PDwQOvWrREZGWnWeYl15Ms1m1gdOZa+MpieGqQpAZJqkIQQvQEy\nMTERXl5eWLx4Mb744gu9U82ZIjo6GvPnz8ecOXNw6tQptG/fHuHh4Xj69Cln+qVLl+LevXsaf97e\n3hg0aJA6TVJSEoYPH4727dvj1KlTmD17NubNm4c9e/aYfF5ieTlyYNsDzTGJJs/DCuitQZo07RwF\nSEJqPb0BcsWKFfDy8sIHH3wAX19fRERE4MiRI1CW/XVuhtWrV2P06NGYMGEC/Pz8sGzZMnh4eHDW\n+ADA2dkZHh4e6r9Hjx4hKSlJYwHn33//HZ6enli2bBn8/PwwYcIEjBo1CqtWrTL5vMTytqZo91bl\nWjzZUGy5cZBM2fdoSbHxGQr191YlhNR8egPkuHHjsHfvXty8eRNz5szBrVu3MHz4cDRr1gxffvkl\nGIYxaWkiAJDJZEhISEBYWJjG9rCwMFy4cMGgPDZu3Ah/f3+Ehoaqt128eFErz549e+LatWuQy+UW\nOS8x3x6J9uNvO77ln0HyL56A6PAOo7OjJlZCiEG9WBs0aIBZs2Zh1qxZuHXrFqKiohAdHQ2WZTF7\n9mwcPHgQffv2RY8ePeDgYFjX+MzMTCiVSri5uWlsd3NzQ3p6eoXH5+bmYvfu3Vi4cKHG9vT0dHTv\n3l0rT4VCgczMTLAsa/J5ExMNnyHFGsfXJFly7YH2WWnJSCzU7LjThuNYrvvolCaBT5nXhXl5eJiY\niBabV5hUvtSMDORVwr8XvUc00f3QRvdEkzn3o6IOpkaPgwwMDERgYCC++uorxMfHY/v27YiJicHW\nrVtha2uLZ8+emVxYY0RFRUGlUmHkyJGv5XxAxTdTn8TEROrtW9bpFK1Nfk284OtS8UQBXPeRX5Kt\n8drBzha+Pk0hyjNt4grP9m/Bw9XDpGNNRe8RTXQ/tNE90WTt+2FymxbDMOjatStWr16NxMREREZG\natXc9HF1dQWfz0dGRobG9oyMDLi7/3979x5VVZk/fvx9ONy84VHiYipaiAkioiig5lhoy8rStPyJ\nFbYwFSe66BoDtBnzazailJOOliaSkpiSkcu0tEaZwRtQk6ZZ3kWj5BJ6QBA8Auf3h+PJzdlcROAA\n5/Nai7U4z3723s9+1ll8eJ79XFxrPX/Dhg2MHTuWTp06KdJdXV1Vr2lra4uzs/Nd31fcvbJq9rRq\nezeDdNRGsd5QX2T8RuDD1V7GqLGhLHwOxiYOjkKI5qdB9udxcHBg/PjxjB8/vs7n2Nvb4+/vT2pq\nqmIUampqKmPHjq3x3P/+97/8+OOPLF682OxYYGAgO3bsUKSlpqYyYMAA7P63C3x97ysaRuKpEtX0\nNtq7GaSjspJONaNXy4c+wvXIN+t9LyGEdbiLURF3LzIykk2bNpGYmMjJkyeJjo4mJyeH8PBwACIi\nIlS30Fq/fj2enp4MHz7c7Fh4eDiXLl0iJiaGkydPkpiYyKZNmxTL4NV2X9G4ojIKVdPbN/A8yGqn\nd8gAHCFEHVh0h9cJEyZw+fJl4uLiyM3Nxdvbm+TkZDw8PADIzs42O+fq1aukpKQQFRWles2ePXuS\nnJzMvHnzSEhIwN3dnSVLljBu3Lg631c0vUmebWhn18CjWKsJkDJCVQhRFxbfAn3atGlMmzZN9djO\nnTvN0jp06MCvv5oP8Ljdgw8+SFpaWr3vK5reB8M71Z6pJqrvIKtpQdpLgBRC1M6iXazCOlVtKN7f\nQYuN2nxao/pgHlUqLUjpYhVC3A0JkKLJDXZRBqhXfDuoZzSa7xdZrTtoQUoXqxCiLizexSqsT0WV\nhqF3J5WvYXER2gt1nwBsNoq19Bra8yfUM0uAFELUgQRI0eQMlcoIaWej7F7VHs3Acflf0ZSrz2NU\nVaUFaXM5D4eklapZjbYSIIUQtZMuVtHkblTpOa36TtJuV/KdBUcA2zv4X6/q+0ohhFAhAVI0ufJa\nWpCawuqXhyt7UX16j7GTC5W6e2q9d0WP3tCumneeQghxG+liFU3OUFE1QCqPVzf61PDo/6P8wdHq\nF7Wxoewvsdh9kYSN/nfl9a4WYnPpIjeGPoJhwtR6l1sIYV0kQIomd6PKIJ2qLciqo09Llm2p09qo\nlR69ZAk5IUSDkS5W0eRumLUgq3SxVm1ByqhTIYQFSIAUTS6nVDlKx2yFuSoBUuYtCiEsQQKkaFKH\nfzd/v1hbF6u0IIUQliABUjSpD44Xm6UpWpAV5WgqKkwfjRob0MqrciFE05MAKZqMocJI8rlSs3RF\nC7Lq/Ec7e1Bbp1UIIRqZBEjRZLZlmQdHULYgtSePVjko3atCCMuQvivRZNadKFFN1/yvhWj3r89x\n+Hi54pgM0BFCWIq0IEWTqboGK0DCiD/2gbT9j/n+nzg4NmaRhBCiWhIgRZOpugYrwP1Of3RiaEqu\nmh0v7ze4MYskhBDVki5W0WSqrsEK0Nb2tgE4VaZ3GMZMlqXhhBAWIy1I0WSqrsEKygBZdQUdw5hn\nwdau0cslhBBqJECKJlN1DVaAdrcPYZUFAoQQzYh0sYomUV5p5JfiCrP0Ntr/tSArK833gJQAKaxM\neXk5JSXqo70BHB0dKSwsbMISNW91qY927dpheyf7xd5GAqRoEv/+7bpqusOtvYurBEejLBAgrEx5\neTlXr15Fp9OZpj5V5eDggKOjjOy+pbb6MBqN6PV6OnToUK8gafEu1vj4ePz8/HBzc2PEiBEcPHiw\nxvwGg4G3334bPz8/XF1d8fX1ZfXq1abjY8aMQafTmf0EBweb8iQlJanmKSsra7TntHbvqywx16O9\n9o8/BNK9KqxcSUlJjcFR3DmNRoNOp6uxVV4Ti7YgU1JSiImJ4d133yU4OJj4+HgmTpxIeno63bt3\nVz1n6tSp/Pbbbyxfvpz777+f/Px8Skv/WKFl48aNGAx//LG9fv06w4YN46mnnlJcp23bthw+fFiR\nJv+ZNa2PHups+r3qAB1ZIEBYIwmODe9u6tSiAXLVqlU8++yzvPDCCwDExcWxZ88eEhISePNN841v\n9+7dS1paGocPH8bZ2RmAHj16KPJ06tRJ8Tk5OZlr167x/PPPK9I1Gg1ubrVvwisaRns78y/pQJfb\ngqC0IIUQzYzFulgNBgNHjhwhJCREkR4SEkJGRobqOTt37mTAgAGsWrUKHx8fBg4cSFRUFMXF5t13\nt2zYsIFRo0bRrVs3RXppaSm+vr74+PgwadIkfvjhh7t/KFEtvUFlCOtttD8fUSZIgBRCWJjFWpAF\nBQVUVFTg4uKiSHdxcSEvL0/1nKysLNLT03FwcCAxMZHCwkKioqLIyckhMTHRLP+ZM2c4cOAASUlJ\ninQvLy9WrlyJr68vxcXFrF69mkcffZT9+/fj6elZbZlPnz5djydtuPNbKqMR0i61VaS91fu6oj66\nHv2W2zu4y4uLrLK+rPGZa2JN9eHo6IiDg0Ot+axprERERATl5eWsW7eu2jx1qY+ioiLVuOLl5VXj\neS1qFGtlZSUajYa1a9fSsWNH4Ga37IQJE8jLy8PV1VWRf8OGDbi7uzN69GhFemBgIIGBgabPQUFB\nDB8+nDVr1rB06dJq719bZdbk9OnTd3V+S3axuBzIVaR1dTQq6sM+Q9k1buvgaHX1Zc3fETXWVh+F\nhYW1joMoKytrVmMldDpdjccnT57MBx98UO/rr1ixAqPRWO0z17U+nJycqh3XUhOLBUhnZ2e0Wi35\n+fmK9Pz8fLNAd4ubmxtdunQxBUeA3r17A5Cdna04z2Aw8Mknn/DCCy/UOrxXq9Xi7+/PuXPn6vs4\nogYXVeY/+nRQLsyqMSingRjGTG7UMgkh7t7JkydNv+/evZtXX31VkVZd8Lpx4wZ2drWvknX733pL\nsNg7SHt7e/z9/UlNTVWkp6amEhQUpHpOcHAwOTk5ineOZ8+eBTD772Dnzp0UFBQQFhZWa1mMRiPH\njx+XQTuN5OLVcsXnR7s7oq06ZqfqIB372ruahBCW5ebmZvq5Fcyqpp06dQqdTse2bdt4/PHHcXNz\n45NPPiEvL4/w8HC8vb3p0qULQ4YMITk5WXH9qVOnMmXKFNPnUaNGMXfuXP72t7/Rs2dP+vXrx//9\n3/9hNNY8xqG+LNrFGhkZSUREBAEBAQQFBZGQkEBOTg7h4eHAzf5ngDVr1gDwzDPPEBcXR2RkJDEx\nMRQWFhITE8O4cePM3mWuX7+eESNG0LNnT7P7xsbGMnjwYDw9PSkqKmLNmjUcP36cZcuWNe4DW6nf\ny5StxfudtOaZZBSrEKp0H/3apPfTh3dtlOsuWLCARYsW4evri4ODA6WlpQwaNIjZs2fj5OTEN998\nw5///Ge6d+/OkCFDqr1OUlISr7zyCnv27CE9PZ1XXnmFgQMH8uSTTzZ4mS0aICdMmMDly5eJi4sj\nNzcXb29vkpOT8fDwAG52m96uffv2bNu2jaioKEJCQtDpdIwZM8ZsSkhWVhZpaWkkJCSo3rewsJDX\nXnuNvLw8nJyc8PPz48svvyQgIKBxHtTKzf+uSPFZZ2/ecSHzIIVo3SIjI3niiSfM0m6ZPn06qamp\npKSk1Bgg/fz8eP311wHo2rUrGzduJC0trfUFSIBp06Yxbdo01WM7d5pvoOvl5cXnn39e4zV79uzJ\nlStXqj2+ePFiFi9efGcFFfVy5br5JpAdVQKktCCFaN0GDBig+FxeXs4777zD9u3b+e2337hx4wbX\nr19n1KhRNV6nb9++is/u7u5mY1kaisUDpGjdfrx8wyytncqiAdKCFKJ1a9tWOdXrnXfeIT4+nsWL\nF9OnTx/atWvHG2+8oVgJTU3VwT0ajYaKCvOBgA1BAqRoVG9/X2SWdu2GEaoOYJMWpBCqbn8n2Nym\nedyN9PR0nnjiCSZOnAjcnMZ39uzZek3HaCwSIEWjGnwhg5fOH6RNpYECu/asvvcRfOx647V+Ce2z\nzwBgePJ5tOdPKE+UAClEq9arVy92795NZmYmHTt2ZNWqVeTk5EiAFNbB5tQx/pHxjiLtmbxMnLI6\nYFPwx8IB9l9sNDtXuliFaN3mzp1LdnY248ePp23btkyZMoWxY8dy6dIlSxfNRKPX6xtnAolQsLZV\nQQDsvtiIw9b4ep1bvHIbdKh5lY7Wxhq/IzWxtvooLCysdWJ8a+pibQh1rY+61K0ai+8HKVqx63ex\nZqSVBUchRPMjAVI0mvLr12vPpKLsxagGLokQQtw5CZCi0RjK6hcgcZAuJCGE5UmAFI1GX1y/LlYZ\noCOEaA4kQIpG8WtJBYeyq9/IukYSIIUQzYAESNEoPjpRgmOl+So6dSEtSCFEcyDzIEWDuVZeyYpj\nxcQeuQrA9noGSGlBCiGaAwmQosHMOqAn+Vyp6XN9W5ASIIUQzYF0sYoGc3twxGgkRH+8XteRLlYh\nRHMgAVI0iOIbym2txv3+Xf0vJgFSCKuSmJho2ge4OZEAKRrE+8eVI1YDrp6v13WMtnYYO9z5klBC\niKYXGhrK2LFjVY+dPHkSnU7H3r17m7hUDUcCpGgQfz98VfG5Pu8fjRoNhiefB4c2DVUsIUQjCgsL\nY9++fVy4cMHs2Mcff0z37t156KGHmr5gDUQCpGgUDnUMkOV+QVxblMC1txMoWbmNG0+90MglE0I0\nlNGjR+Pq6kpSUpIi/caNG2zZsoXnn38eGxsb/vrXvxIQEIC7uzt+fn4sWLCA6/VcirIpyShW0Sjq\n2oKsvLcHld3vb+TSCNFytX/hoT9+b4L7FW/4d53z2traMnnyZDZt2kRMTAw2NjfbXF999RUFBQU8\n99xzAHTo0IH3338fd3d3Tpw4wezZs3F0dCQmJqYxHqHBSAtS1JuhwsjWc9dYfuwqNhrlsfsdK+p2\nERmQI0SLFhYWRnZ2Nv/+979NaRs3biQkJIRu3boBEB0dTVBQED169GD06NHMmjWLzz77zEIlrjtp\nQYp6+/O+K3x2vlT1WP86jrORKR1CtGyenp4MGzbMFBQvXbrEnj17SEhIMOVJSUlhzZo1nD9/npKS\nEsrLy02tzeas+ZdQNEslNyr5PEs9OAK0NdZxkI4ESCFavLCwMHbu3MmVK1fYtGkTnTp14vHHHwfg\n0KFDTJ8+nUceeYTNmzeTlpbGvHnzMBgMFi517SzegoyPj2fFihXk5ubSp08fFi9ezNChQ6vNbzAY\niIuLY8uWLeTk5ODq6srLL7/MzJkzAUhKSiIyMtLsvJycHMXO03d6X6F0tqicSqP6sZB7HbDNq+OX\nXwKkEDW6/Z1gWVmZ4u9YczFu3DiioqLYsmULGzduJDQ0FDs7OwAyMjLo3r07c+bMMeW/ePGipYp6\nRywaIFNSUoiJieHdd98lODiY+Ph4Jk6cSHp6Ot27d1c9Z+rUqfz2228sX76c+++/n/z8fEpLlS2Z\ntm3bcvjwYUXa7V+q+txXKP312yLF5x7ttYzr2Yau7bQ869UWzbd1C5DSxSpEy9emTRsmTpxIbGws\ner2esLAw0zFPT0+ys7PZunUrAQEBfPPNN3z++ecWLG3dWTRArlq1imeffZYXXrg5tD8uLs7Ud/3m\nm2+a5d+7dy9paWkcPnwYZ2dnAHr06GGWT6PR4Obm1mD3vVuawsv0X/xnNBpN7ZlbiF2VRoy3tSBt\nNGB720gdTbmyi9Wo0aAxqjQ5JUAK0SqEhYWxbt06goKCeOCBB0zpTz75JC+99BLR0dFcv36dkJAQ\n5s6d2+xHsIIFA6TBYODIkSO88sorivSQkBAyMjJUz9m5cycDBgxg1apVbN68GUdHR0aNGsX8+fNp\n3/6PAdClpaX4+vpSWVlJv379mDdvHv3796/3fRuCTUV5o13bEszCmhGoVMl4S9v2UHLVLNloLwFS\niNbA398fvV6vemzhwoUsXLhQkTZ9+nTT71OmTGHKlCmNWr76sFiALCgooKKiAhcXF0W6i4sLeXl5\nqudkZWWRnp6Og4MDiYmJFBYWEhUVRU5ODomJiQB4eXmxcuVKfH19KS4uZvXq1Tz66KPs378fT0/P\net1X3B2jVkuF9wBsv0szP+ba1QIlEkKI2ll8kM6dqKysRKPRsHbtWjp2vDmPIC4ujgkTJpCXl4er\nqyuBgYEEBgaazgkKCmL48OGsWbOGpUuX3tX9T58+Xb9yFxURcFd3brnKHdty6aGnKPL05b7sLNrm\n3Hw5X2Fnz++DHua360aoZ722RvX9jrVW1lQfjo6OODg41JqvrKysCUrTctSlPoqKilQbQF5eXjWe\nZ7EA6ezsjFarJT8/X5Gen5+Pq6ur6jlubm506dLFFBwBevfuDUB2drbqeVqtFn9/f86dO1fv+95S\nW2VWJ+9aOW3/tL5e57YEgS727HjsHvWDWls62djQCagMHEpx+Q0wGjl97jxeDzxA/Wq0dTp9+nS9\nv2OtkbXVR2FhYa0jVJvrKFZLqWt9ODk51WsApsXmQdrb2+Pv709qaqoiPTU1laCgINVzgoODycnJ\nobj4j50jzp49C1DtwxuNRo4fP24atFOf+941jQaDjV2r/XHv6HhzsI3aT9XJwLZ26ulCCNHMWPSv\nVGRkJJs2bSIxMZGTJ08SHR1NTk4O4eHhAERERBAREWHK/8wzz9C5c2ciIyP5+eefSU9PJyYmhnHj\nxpneKcbGxrJnzx6ysrI4evQoL7/8MsePH2fq1Kl1vq+4MyO7yn+0QojWx6LvICdMmMDly5eJi4sj\nNzcXb29vkpOTTRtnZmdnK/K3b9+ebdu2ERUVRUhICDqdjjFjxiimZhQWFvLaa6+Rl5eHk5MTfn5+\nfPnllwQEBNT5vg3N3kbD8M7ltGvXFEsNNx1bDYR0dSTUU7anEkK0Phq9Xl/NeiiiIVnb+5TaSH2Y\nkzpRsrb6KCkpAW4udFLdnGl5B6lUW30YjUauXbsGQLt27e74+i1qFKsQQrRW7dq14/r16xQVFVWb\np6ioCCcnpyYsVfNWl/qo6+hgNRIghRCimXBwcKjxj3leXp4sh3mbxq4PGUoohBBCqJAAKYQQQqiQ\nACmEEEKokAAphBBCqJBpHkIIIYQKaUEKIYQQKiRACiGEECokQAohhBAqJEAKIYQQKiRACiGEECok\nQDay+Ph4/Pz8cHNzY8SIERw8eNDSRWoUy5Yt4+GHH6Z79+54enoyadIkfvrpJ0Ueo9HI4sWL6dOn\nD+7u7owZM4aff/5ZkUev1zNjxgw8PDzw8PBgxowZ6PX6pnyURrFs2TJ0Oh2vv/66Kc0a6yMnJ4eZ\nM2fi6emJm5sbQUFB7N+/33TcmuqkoqKCRYsWmf4++Pn5sWjRIsrLy015Wnt9HDhwgNDQULy9vdHp\ndCQlJSmON9TzHz9+nMcffxx3d3e8vb1ZsmQJRmPtEzgkQDailJQUYmJi+Mtf/kJaWhqBgYFMnDiR\nX375xdJFa3D79+/nxRdfZPfu3Wzfvh1bW1ueeuoprly5YsqzfPlyVq1axZIlS9i7dy8uLi6MHz+e\nq1evmvJMmzaNo0ePsnXrVrZu3crRo0cVe4K2RN9++y3r16+nb9++inRrqw+9Xs/o0aMxGo0kJyeT\nkZHB0qVLTXu5gnXVyXvvvUd8fDxLliwhMzOT2NhY1q5dy7Jly0x5Wnt9lJSU4OPjQ2xsLG3amG+b\n1xDPX1RUxPjx43F1dWXv3r3Exsbyz3/+k5UrV9ZaPpkH2YhGjhxJ3759WbFihSlt4MCBjBs3TrGH\nZWtUXFyMh4cHSUlJPPbYYxiNRvr06cP06dOZM2cOAKWlpXh5efHWW28RHh7OyZMnCQoKYteuXQQH\nBwNw6NAhHnvsMb799tsWufVRYWEhI0aMYMWKFSxZsgQfHx/i4uKssj4WLlzIgQMH2L17t+pxa6uT\nSZMm0alTJ1avXm1KmzlzJleuXGHLli1WVx9du3Zl6dKlPPfcc0DDfR/WrVvHggULOHXqlCkIx8XF\nkZCQwE8//VTt1mIgLchGYzAYOHLkCCEhIYr0kJAQMjIyLFSqplNcXExlZSU6nQ6ACxcukJubq6iP\nNm3aMHToUFN9ZGZm0r59e4KCgkx5goODadeuXYuts1mzZjFu3Dj+9Kc/KdKtsT527txJQEAA4eHh\n9OrViwcffJAPP/zQ1NVlbXUSHBzM/v37OXXqFAAnTpxg3759PPLII4D11UdVDfX8mZmZDBkyRNFC\nHTlyJJcuXeLCXVyDhgAACJFJREFUhQs1lkG2u2okBQUFVFRUKLqPAFxcXMjLy7NQqZpOTEwM/fr1\nIzAwEIDc3FwA1fq4dOkScHPrGmdnZ8V/dBqNhnvuuadF1tmGDRs4d+4cH374odkxa6yPrKws1q1b\nx0svvcSsWbM4duwY0dHRAMyYMcPq6mTWrFkUFxcTFBSEVqulvLycOXPmMG3aNMA6vyO3a6jnz8vL\n49577zW7xq1jPXv2rLYMEiBFg5s3bx7p6ens2rULrVZr6eJYxOnTp1m4cCG7du3Czs7O0sVpFior\nKxkwYIDp9UL//v05d+4c8fHxzJgxw8Kla3opKSls3ryZ+Ph4+vTpw7Fjx4iJicHDw4MpU6ZYungC\n6WJtNM7Ozmi1WvLz8xXp+fn5uLq6WqhUjW/u3Ll89tlnbN++XfGfmZubG0CN9eHq6kpBQYFidJnR\naOT3339vcXWWmZlJQUEBwcHBODs74+zszIEDB4iPj8fZ2ZnOnTsD1lMfcPM78MADDyjSevfuTXZ2\ntuk4WE+dzJ8/n5dffpmnn36avn37EhoaSmRkJP/4xz8A66uPqhrq+V1dXVWvcetYTSRANhJ7e3v8\n/f1JTU1VpKempir6y1uT6OhoU3Ds3bu34liPHj1wc3NT1EdZWRmHDh0y1UdgYCDFxcVkZmaa8mRm\nZlJSUtLi6mzMmDEcPHiQffv2mX4GDBjA008/zb59++jVq5dV1QfcfDd05swZRdqZM2dMO8Jb23fk\n2rVrZj0sWq2WyspKwPrqo6qGev7AwEAOHTpEWVmZKU9qaipdunShR48eNZZBGxMTs6ABn0ncpkOH\nDixevBh3d3ccHR2Ji4vj4MGDrFy5ko4dO1q6eA1qzpw5bN68mfXr19OtWzdKSkooKSkBbv6zoNFo\nqKio4L333sPT05OKigreeOMNcnNzee+993BwcOCee+7hu+++Y+vWrfTr149ff/2V2bNnM3DgwBYz\nbP0WR0dHXFxcFD+ffvopHh4ePPfcc1ZXHwDdunVjyZIl2NjY4O7uzn/+8x8WLVrE7NmzCQgIsLo6\nOXnyJFu2bKFXr17Y2dmxb98+3nrrLSZMmMDIkSOtoj6Ki4s5ceIEubm5fPzxx/j4+ODk5ITBYKBj\nx44N8vyenp589NFHHDt2DC8vLw4dOsT8+fOZNWtWrf9EyDSPRhYfH8/y5cvJzc3F29ubv//97wwb\nNszSxWpwt0arVhUdHc3cuXOBm10fsbGxrF+/Hr1eT0BAAO+88w4+Pj6m/Hq9nqioKL766isAHnvs\nMZYuXVrt9VuSMWPGmKZ5gHXWx+7du1m4cCFnzpyhW7duTJ8+nYiICNMgC2uqk6tXr/L222+zY8cO\nfv/9d9zc3Hj66aeJiorC0dERaP31sW/fPp588kmz9MmTJ/PBBx802PMfP36cOXPm8P3336PT6QgP\nDyc6OrrGKR4gAVIIIYRQJe8ghRBCCBUSIIUQQggVEiCFEEIIFRIghRBCCBUSIIUQQggVEiCFEEII\nFRIghRAN5sKFC+h0OtNyaUK0ZBIghWhhkpKS0Ol01f7861//snQRhWgVZDcPIVqomJgY7rvvPrN0\nX19fC5RGiNZHAqQQLdTIkSMZPHiwpYshRKslXaxCtFI6nY7Zs2eTkpJCUFAQbm5uDBs2TLUL9sKF\nC4SHh3Pffffh7u7Oww8/zI4dO8zyGQwG4uLiGDx4MK6urnh5eTF58mR+/vlns7wbNmzA398fV1dX\nHn74Yb7//vtGeU4hGou0IIVooYqKiigoKDBLd3Z2Nv2ekZHB559/TkREBO3bt2fDhg2EhobyxRdf\nMGTIEODm3nijR4+muLiYiIgInJ2dSU5OJiwsjLVr1/LMM88ANzc8Dg0NZe/evTz11FPMmDGDa9eu\nsW/fPo4cOYK3t7fpvikpKZSUlBAeHo5Go2H58uWEhYVx5MgR2UBatBiyWLkQLUxSUhKRkZHVHs/J\nycHR0dG0m8HXX39NYGAgAJcvX2bgwIH06dOHXbt2ATBv3jzef/99vvjiC4YPHw5AaWkpDz30EHq9\nnh9//BE7OzvTfRcuXMirr76quKfRaESj0XDhwgX69+9P586dTTsnAHz55Zc8++yzbN68mUcffbTB\n60SIxiAtSCFaqCVLlvDAAw+Ypdvb25t+HzBggCk4AnTu3JmJEyeydu1a9Ho9Op2Or7/+mv79+5uC\nI0CbNm148cUXiYqK4ocffmDQoEFs374dnU7HzJkzze5ZddugsWPHKrYbGjp0KABZWVn1fl4hmpoE\nSCFaqIEDB9Y6SMfT07PatIsXL6LT6fjll19U9+S7FXwvXrzIoEGDOH/+PL169VIE4Op069ZN8flW\nsNTr9bWeK0RzIYN0hBANTqvVqqYbjfJGR7QcEiCFaMXOnj1bbZqHhwcA3bt35/Tp02b5Tp06pch3\n3333cebMGQwGQ2MVV4hmRQKkEK3Y4cOHyczMNH2+fPkyn376KUFBQaZuz9GjR/PDDz9w8OBBU76y\nsjISEhJwc3PD398fuPleUa/Xs3r1arP7SMtQtEbyDlKIFmrPnj2cO3fOLD0gIIBevXoB4OPjw6RJ\nk5gxY4ZpmkdxcTHz58835Z81axafffYZkyZNUkzzOHHiBGvXrsXW9uafidDQUJKTk5k/fz6HDx9m\n6NChlJWVsX//fsaPH09oaGjTPLgQTUQCpBAtVGxsrGr60qVLTQEyKCiI4cOHExsbS1ZWFr169SIp\nKYlhw4aZ8ru4uLBr1y4WLFhAfHw8paWleHt7k5iYqBi8o9Vq2bJlC++++y5bt25lx44ddOrUiUGD\nBplamUK0JjIPUohWSqfTER4eLjtrCFFP8g5SCCGEUCEBUgghhFAhAVIIIYRQIYN0hGilZNUaIe6O\ntCCFEEIIFRIghRBCCBUSIIUQQggVEiCFEEIIFRIghRBCCBUSIIUQQggV/x+GQh5PV7b39QAAAABJ\nRU5ErkJggg==\n",
            "text/plain": [
              "<Figure size 432x288 with 1 Axes>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "HJ3A1FhgcUiO",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 173
        },
        "outputId": "c2a7453f-d7a1-474d-f03c-7d7e0d9b923e"
      },
      "source": [
        "#Make a prediction & print the actual values\n",
        "prediction = model.predict(X_test)\n",
        "prediction  = [1 if y>=0.5 else 0 for y in prediction] #Threshold\n",
        "print(prediction)\n",
        "print(y_test)"
      ],
      "execution_count": 19,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "[0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1]\n",
            "[0. 0. 0. 0. 1. 1. 1. 1. 1. 0. 0. 0. 1. 0. 0. 1. 1. 1. 0. 0. 0. 0. 0. 1.\n",
            " 1. 0. 0. 1. 1. 1. 1. 0. 0. 0. 0. 0. 0. 0. 0. 1. 1. 0. 0. 1. 0. 0. 0. 1.\n",
            " 0. 0. 1. 0. 0. 0. 1. 0. 0. 0. 0. 1. 0. 0. 1. 0. 0. 0. 0. 0. 0. 1. 0. 1.\n",
            " 0. 0. 0. 1. 0. 1. 0. 0. 1. 0. 0. 1. 0. 0. 0. 1. 0. 1. 0. 0. 0. 0. 0. 0.\n",
            " 0. 1. 1. 0. 0. 0. 1. 1. 1. 0. 0. 1. 1. 0. 0. 1. 1. 0. 0. 0. 0. 0. 1. 1.\n",
            " 0. 0. 0. 0. 0. 0. 0. 0. 0. 1. 1. 0. 1. 0. 1. 0. 0. 0. 1. 0. 0. 0. 0. 0.\n",
            " 1. 1. 1. 0. 0. 0. 0. 0. 1. 1.]\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "YQsQHhjaT1GB",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 272
        },
        "outputId": "087ea5a4-3ae6-4300-f7fa-cb10488cab60"
      },
      "source": [
        "#Evaluate the model on the training data set\n",
        "from sklearn.metrics import classification_report,confusion_matrix, accuracy_score\n",
        "pred = model.predict(X_train)\n",
        "pred  = [1 if y>=0.5 else 0 for y in pred] #Threshold\n",
        "print(classification_report(y_train ,pred ))\n",
        "print('Confusion Matrix: \\n',confusion_matrix(y_train,pred))\n",
        "print()\n",
        "print('Accuracy: ', accuracy_score(y_train,pred))\n",
        "print()\n",
        "\n",
        "#Print the predictions\n",
        "#print('Predicted value: ',model.predict(X_train))\n",
        "\n",
        "#Print Actual Label\n",
        "#print('Actual value: ',y_train)"
      ],
      "execution_count": 20,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "              precision    recall  f1-score   support\n",
            "\n",
            "         0.0       0.81      0.87      0.84       398\n",
            "         1.0       0.73      0.63      0.68       216\n",
            "\n",
            "    accuracy                           0.79       614\n",
            "   macro avg       0.77      0.75      0.76       614\n",
            "weighted avg       0.79      0.79      0.79       614\n",
            "\n",
            "Confusion Matrix: \n",
            " [[348  50]\n",
            " [ 79 137]]\n",
            "\n",
            "Accuracy:  0.7899022801302932\n",
            "\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "bICvE12-Tg7a",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 272
        },
        "outputId": "b96693d8-848e-4a3e-a5d9-3db1c0a5e1fa"
      },
      "source": [
        "#Evaluate the model on the test data set\n",
        "from sklearn.metrics import classification_report,confusion_matrix, accuracy_score\n",
        "pred = model.predict(X_test)\n",
        "pred  = [1 if y>=0.5 else 0 for y in pred] #Threshold\n",
        "print(classification_report(y_test ,pred ))\n",
        "print('Confusion Matrix: \\n',confusion_matrix(y_test,pred))\n",
        "print()\n",
        "print('Accuracy: ', accuracy_score(y_test,pred))\n",
        "print()\n",
        "\n",
        "#Print the predictions\n",
        "#print('Predicted value: ',model.predict(X_test))\n",
        "\n",
        "#Print Actual Label\n",
        "#print('Actual value: ',y_test)"
      ],
      "execution_count": 21,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "              precision    recall  f1-score   support\n",
            "\n",
            "         0.0       0.83      0.80      0.82       102\n",
            "         1.0       0.64      0.67      0.65        52\n",
            "\n",
            "    accuracy                           0.76       154\n",
            "   macro avg       0.73      0.74      0.74       154\n",
            "weighted avg       0.76      0.76      0.76       154\n",
            "\n",
            "Confusion Matrix: \n",
            " [[82 20]\n",
            " [17 35]]\n",
            "\n",
            "Accuracy:  0.7597402597402597\n",
            "\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "rv6Dd5tfT_T7",
        "colab_type": "code",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 51
        },
        "outputId": "7b859ea0-ad64-4e35-f7a8-fcf2a5a03f8c"
      },
      "source": [
        "#Evaluate the test data set\n",
        "\n",
        "#The reason why we have the index 1 after the model.evaluate function is because\n",
        "#the function returns the loss as the first element and the accuracy as the \n",
        "#second element. To only output the accuracy, simply access the second element \n",
        "#(which is indexed by 1, since the first element starts its indexing from 0).\n",
        "model.evaluate(X_test, y_test)[1]   "
      ],
      "execution_count": 22,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "154/154 [==============================] - 0s 109us/step\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "0.7597402566439145"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 22
        }
      ]
    }
  ]
}
