{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "colab": {
      "name": "Tensor Networks in Neural Networks",
      "provenance": [],
      "collapsed_sections": []
    },
    "kernelspec": {
      "name": "python3",
      "display_name": "Python 3"
    }
  },
  "cells": [
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "8XnVMPBXmtRa",
        "colab_type": "text"
      },
      "source": [
        "# TensorNetworks in Neural Networks.\n",
        "\n",
        "Here, we have a small toy example of how to use a TN inside of a fully connected neural network.\n",
        "\n",
        "First off, let's install tensornetwork"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "7HGRsYNAFxME",
        "colab_type": "code",
        "outputId": "9189982e-51fe-4bb3-9124-2f01a3c933c8",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 423
        }
      },
      "source": [
        "!pip install tensornetwork\n",
        "\n",
        "import numpy as np\n",
        "import matplotlib.pyplot as plt\n",
        "import tensorflow as tf\n",
        "tf.enable_v2_behavior()\n",
        "# Import tensornetwork\n",
        "import tensornetwork as tn\n",
        "# Set the backend to tesorflow\n",
        "# (default is numpy)\n",
        "tn.set_default_backend(\"tensorflow\")"
      ],
      "execution_count": 4,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Collecting tensornetwork\n",
            "\u001b[?25l  Downloading https://files.pythonhosted.org/packages/d5/84/4421ac1add2011e50e8d85dc1a8446f5eeae8ad404cb4df6d4d598a61383/tensornetwork-0.2.1-py3-none-any.whl (232kB)\n",
            "\r\u001b[K     |█▍                              | 10kB 20.6MB/s eta 0:00:01\r\u001b[K     |██▉                             | 20kB 3.1MB/s eta 0:00:01\r\u001b[K     |████▎                           | 30kB 4.5MB/s eta 0:00:01\r\u001b[K     |█████▋                          | 40kB 3.0MB/s eta 0:00:01\r\u001b[K     |███████                         | 51kB 3.7MB/s eta 0:00:01\r\u001b[K     |████████▌                       | 61kB 4.3MB/s eta 0:00:01\r\u001b[K     |█████████▉                      | 71kB 5.0MB/s eta 0:00:01\r\u001b[K     |███████████▎                    | 81kB 5.6MB/s eta 0:00:01\r\u001b[K     |████████████▊                   | 92kB 6.3MB/s eta 0:00:01\r\u001b[K     |██████████████▏                 | 102kB 4.9MB/s eta 0:00:01\r\u001b[K     |███████████████▌                | 112kB 4.9MB/s eta 0:00:01\r\u001b[K     |█████████████████               | 122kB 4.9MB/s eta 0:00:01\r\u001b[K     |██████████████████▍             | 133kB 4.9MB/s eta 0:00:01\r\u001b[K     |███████████████████▊            | 143kB 4.9MB/s eta 0:00:01\r\u001b[K     |█████████████████████▏          | 153kB 4.9MB/s eta 0:00:01\r\u001b[K     |██████████████████████▋         | 163kB 4.9MB/s eta 0:00:01\r\u001b[K     |████████████████████████        | 174kB 4.9MB/s eta 0:00:01\r\u001b[K     |█████████████████████████▍      | 184kB 4.9MB/s eta 0:00:01\r\u001b[K     |██████████████████████████▉     | 194kB 4.9MB/s eta 0:00:01\r\u001b[K     |████████████████████████████▎   | 204kB 4.9MB/s eta 0:00:01\r\u001b[K     |█████████████████████████████▋  | 215kB 4.9MB/s eta 0:00:01\r\u001b[K     |███████████████████████████████ | 225kB 4.9MB/s eta 0:00:01\r\u001b[K     |████████████████████████████████| 235kB 4.9MB/s \n",
            "\u001b[?25hRequirement already satisfied: numpy>=1.16 in /usr/local/lib/python3.6/dist-packages (from tensornetwork) (1.17.5)\n",
            "Collecting graphviz>=0.11.1\n",
            "  Downloading https://files.pythonhosted.org/packages/f5/74/dbed754c0abd63768d3a7a7b472da35b08ac442cf87d73d5850a6f32391e/graphviz-0.13.2-py2.py3-none-any.whl\n",
            "Collecting h5py>=2.9.0\n",
            "\u001b[?25l  Downloading https://files.pythonhosted.org/packages/60/06/cafdd44889200e5438b897388f3075b52a8ef01f28a17366d91de0fa2d05/h5py-2.10.0-cp36-cp36m-manylinux1_x86_64.whl (2.9MB)\n",
            "\u001b[K     |████████████████████████████████| 2.9MB 45.0MB/s \n",
            "\u001b[?25hRequirement already satisfied: opt-einsum>=2.3.0 in /usr/local/lib/python3.6/dist-packages (from tensornetwork) (3.1.0)\n",
            "Requirement already satisfied: scipy>=1.1 in /usr/local/lib/python3.6/dist-packages (from tensornetwork) (1.4.1)\n",
            "Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from h5py>=2.9.0->tensornetwork) (1.12.0)\n",
            "Installing collected packages: graphviz, h5py, tensornetwork\n",
            "  Found existing installation: graphviz 0.10.1\n",
            "    Uninstalling graphviz-0.10.1:\n",
            "      Successfully uninstalled graphviz-0.10.1\n",
            "  Found existing installation: h5py 2.8.0\n",
            "    Uninstalling h5py-2.8.0:\n",
            "      Successfully uninstalled h5py-2.8.0\n",
            "Successfully installed graphviz-0.13.2 h5py-2.10.0 tensornetwork-0.2.1\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "display_data",
          "data": {
            "text/html": [
              "<p style=\"color: red;\">\n",
              "The default version of TensorFlow in Colab will soon switch to TensorFlow 2.x.<br>\n",
              "We recommend you <a href=\"https://www.tensorflow.org/guide/migrate\" target=\"_blank\">upgrade</a> now \n",
              "or ensure your notebook will continue to use TensorFlow 1.x via the <code>%tensorflow_version 1.x</code> magic:\n",
              "<a href=\"https://colab.research.google.com/notebooks/tensorflow_version.ipynb\" target=\"_blank\">more info</a>.</p>\n"
            ],
            "text/plain": [
              "<IPython.core.display.HTML object>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "g1OMCo5XmrYu",
        "colab_type": "text"
      },
      "source": [
        "# TensorNetwork layer definition\n",
        "\n",
        "Here, we define the TensorNetwork layer we wish to use to replace the fully connected layer. Here, we simply use a 2 node Matrix Product Operator network to replace the normal dense weight matrix.\n",
        "\n",
        "We TensorNetwork's NCon API to keep the code short."
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "wvSMKtPufnLp",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "\n",
        "class TNLayer(tf.keras.layers.Layer):\n",
        "\n",
        "  def __init__(self):\n",
        "    super(TNLayer, self).__init__()\n",
        "    # Create the variables for the layer.\n",
        "    self.a_var = tf.Variable(tf.random.normal(\n",
        "            shape=(32, 32, 2), stddev=1.0/32.0),\n",
        "             name=\"a\", trainable=True)\n",
        "    self.b_var = tf.Variable(tf.random.normal(shape=(32, 32, 2), stddev=1.0/32.0),\n",
        "                             name=\"b\", trainable=True)\n",
        "    self.bias = tf.Variable(tf.zeros(shape=(32, 32)), name=\"bias\", trainable=True)\n",
        "\n",
        "  def call(self, inputs):\n",
        "    # Define the contraction.\n",
        "    # We break it out so we can parallelize a batch using\n",
        "    # tf.vectorized_map (see below).\n",
        "    def f(input_vec, a_var, b_var, bias_var):\n",
        "      # Reshape to a matrix instead of a vector.\n",
        "      input_vec = tf.reshape(input_vec, (32,32))\n",
        "\n",
        "      # Now we create the network.\n",
        "      a = tn.Node(a_var)\n",
        "      b = tn.Node(b_var)\n",
        "      x_node = tn.Node(input_vec)\n",
        "      a[1] ^ x_node[0]\n",
        "      b[1] ^ x_node[1]\n",
        "      a[2] ^ b[2]\n",
        "\n",
        "      # The TN should now look like this\n",
        "      #   |     |\n",
        "      #   a --- b\n",
        "      #    \\   /\n",
        "      #      x\n",
        "\n",
        "      # Now we begin the contraction.\n",
        "      c = a @ x_node\n",
        "      result = (c @ b).tensor\n",
        "\n",
        "      # To make the code shorter, we also could've used Ncon.\n",
        "      # The above few lines of code is the same as this:\n",
        "      # result = tn.ncon([x, a_var, b_var], [[1, 2], [-1, 1, 3], [-2, 2, 3]])\n",
        "\n",
        "      # Finally, add bias.\n",
        "      return result + bias_var\n",
        "  \n",
        "    # To deal with a batch of items, we can use the tf.vectorized_map\n",
        "    # function.\n",
        "    # https://www.tensorflow.org/api_docs/python/tf/vectorized_map\n",
        "    result = tf.vectorized_map(\n",
        "        lambda vec: f(vec, self.a_var, self.b_var, self.bias), inputs)\n",
        "    return tf.nn.relu(tf.reshape(result, (-1, 1024)))"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "V-CVqIhPnhY_",
        "colab_type": "text"
      },
      "source": [
        "# Smaller model\n",
        "These two models are effectively the same, but notice how the TN layer has nearly 10x fewer parameters."
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "XPBvnB95jg4b",
        "colab_type": "code",
        "outputId": "4baa5bda-6077-4d66-ee42-48a894a68966",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 255
        }
      },
      "source": [
        "Dense = tf.keras.layers.Dense\n",
        "fc_model = tf.keras.Sequential(\n",
        "    [\n",
        "     tf.keras.Input(shape=(2,)),\n",
        "     Dense(1024, activation=tf.nn.relu),\n",
        "     Dense(1024, activation=tf.nn.relu),\n",
        "     Dense(1, activation=None)])\n",
        "fc_model.summary()"
      ],
      "execution_count": 19,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Model: \"sequential_6\"\n",
            "_________________________________________________________________\n",
            "Layer (type)                 Output Shape              Param #   \n",
            "=================================================================\n",
            "dense_15 (Dense)             (None, 1024)              3072      \n",
            "_________________________________________________________________\n",
            "dense_16 (Dense)             (None, 1024)              1049600   \n",
            "_________________________________________________________________\n",
            "dense_17 (Dense)             (None, 1)                 1025      \n",
            "=================================================================\n",
            "Total params: 1,053,697\n",
            "Trainable params: 1,053,697\n",
            "Non-trainable params: 0\n",
            "_________________________________________________________________\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "bbKsmK8wIFTp",
        "colab_type": "code",
        "outputId": "a4f49baa-1087-4d2d-b00e-2c8349c51086",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 255
        }
      },
      "source": [
        "tn_model = tf.keras.Sequential(\n",
        "    [\n",
        "     tf.keras.Input(shape=(2,)),\n",
        "     Dense(1024, activation=tf.nn.relu),\n",
        "     # Here, we replace the dense layer with our MPS.\n",
        "     TNLayer(),\n",
        "     Dense(1, activation=None)])\n",
        "tn_model.summary()"
      ],
      "execution_count": 27,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Model: \"sequential_8\"\n",
            "_________________________________________________________________\n",
            "Layer (type)                 Output Shape              Param #   \n",
            "=================================================================\n",
            "dense_20 (Dense)             (None, 1024)              3072      \n",
            "_________________________________________________________________\n",
            "tn_layer_4 (TNLayer)         (None, 1024)              5120      \n",
            "_________________________________________________________________\n",
            "dense_21 (Dense)             (None, 1)                 1025      \n",
            "=================================================================\n",
            "Total params: 9,217\n",
            "Trainable params: 9,217\n",
            "Non-trainable params: 0\n",
            "_________________________________________________________________\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "GWwoYp0WnsLA",
        "colab_type": "text"
      },
      "source": [
        "# Training a model\n",
        "\n",
        "You can train the TN model just as you would a normal neural network model! Here, we give an example of how to do it in Keras."
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "qDFzOC7sDBJ-",
        "colab_type": "code",
        "colab": {}
      },
      "source": [
        "X = np.concatenate([np.random.randn(20, 2) + np.array([3, 3]), \n",
        "             np.random.randn(20, 2) + np.array([-3, -3]), \n",
        "             np.random.randn(20, 2) + np.array([-3, 3]), \n",
        "             np.random.randn(20, 2) + np.array([3, -3]),])\n",
        "\n",
        "Y = np.concatenate([np.ones((40)), -np.ones((40))])"
      ],
      "execution_count": 0,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "crc0q1vbIyTj",
        "colab_type": "code",
        "outputId": "863fb1d3-c2cd-44e6-bca4-919955ce8868",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 1000
        }
      },
      "source": [
        "tn_model.compile(optimizer=\"adam\", loss=\"mean_squared_error\")\n",
        "tn_model.fit(X, Y, epochs=300, verbose=1)"
      ],
      "execution_count": 29,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Train on 80 samples\n",
            "Epoch 1/300\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "stream",
          "text": [
            "/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/framework/indexed_slices.py:424: UserWarning: Converting sparse IndexedSlices to a dense Tensor of unknown shape. This may consume a large amount of memory.\n",
            "  \"Converting sparse IndexedSlices to a dense Tensor of unknown shape. \"\n"
          ],
          "name": "stderr"
        },
        {
          "output_type": "stream",
          "text": [
            "80/80 [==============================] - 1s 8ms/sample - loss: 0.9842\n",
            "Epoch 2/300\n",
            "80/80 [==============================] - 0s 238us/sample - loss: 0.9290\n",
            "Epoch 3/300\n",
            "32/80 [===========>..................] - ETA: 0s - loss: 0.8913"
          ],
          "name": "stdout"
        },
        {
          "output_type": "stream",
          "text": [
            "/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/framework/indexed_slices.py:424: UserWarning: Converting sparse IndexedSlices to a dense Tensor of unknown shape. This may consume a large amount of memory.\n",
            "  \"Converting sparse IndexedSlices to a dense Tensor of unknown shape. \"\n"
          ],
          "name": "stderr"
        },
        {
          "output_type": "stream",
          "text": [
            "80/80 [==============================] - 0s 260us/sample - loss: 0.8794\n",
            "Epoch 4/300\n",
            "80/80 [==============================] - 0s 228us/sample - loss: 0.8258\n",
            "Epoch 5/300\n",
            "80/80 [==============================] - 0s 230us/sample - loss: 0.7643\n",
            "Epoch 6/300\n",
            "80/80 [==============================] - 0s 209us/sample - loss: 0.6869\n",
            "Epoch 7/300\n",
            "80/80 [==============================] - 0s 200us/sample - loss: 0.5934\n",
            "Epoch 8/300\n",
            "80/80 [==============================] - 0s 307us/sample - loss: 0.4874\n",
            "Epoch 9/300\n",
            "80/80 [==============================] - 0s 249us/sample - loss: 0.3713\n",
            "Epoch 10/300\n",
            "80/80 [==============================] - 0s 266us/sample - loss: 0.2546\n",
            "Epoch 11/300\n",
            "80/80 [==============================] - 0s 220us/sample - loss: 0.1580\n",
            "Epoch 12/300\n",
            "80/80 [==============================] - 0s 211us/sample - loss: 0.1004\n",
            "Epoch 13/300\n",
            "80/80 [==============================] - 0s 247us/sample - loss: 0.0962\n",
            "Epoch 14/300\n",
            "80/80 [==============================] - 0s 260us/sample - loss: 0.1127\n",
            "Epoch 15/300\n",
            "80/80 [==============================] - 0s 335us/sample - loss: 0.1125\n",
            "Epoch 16/300\n",
            "80/80 [==============================] - 0s 292us/sample - loss: 0.0933\n",
            "Epoch 17/300\n",
            "80/80 [==============================] - 0s 250us/sample - loss: 0.0785\n",
            "Epoch 18/300\n",
            "80/80 [==============================] - 0s 280us/sample - loss: 0.0740\n",
            "Epoch 19/300\n",
            "80/80 [==============================] - 0s 228us/sample - loss: 0.0751\n",
            "Epoch 20/300\n",
            "80/80 [==============================] - 0s 268us/sample - loss: 0.0759\n",
            "Epoch 21/300\n",
            "80/80 [==============================] - 0s 337us/sample - loss: 0.0722\n",
            "Epoch 22/300\n",
            "80/80 [==============================] - 0s 319us/sample - loss: 0.0670\n",
            "Epoch 23/300\n",
            "80/80 [==============================] - 0s 304us/sample - loss: 0.0660\n",
            "Epoch 24/300\n",
            "80/80 [==============================] - 0s 275us/sample - loss: 0.0642\n",
            "Epoch 25/300\n",
            "80/80 [==============================] - 0s 245us/sample - loss: 0.0617\n",
            "Epoch 26/300\n",
            "80/80 [==============================] - 0s 269us/sample - loss: 0.0603\n",
            "Epoch 27/300\n",
            "80/80 [==============================] - 0s 365us/sample - loss: 0.0594\n",
            "Epoch 28/300\n",
            "80/80 [==============================] - 0s 207us/sample - loss: 0.0570\n",
            "Epoch 29/300\n",
            "80/80 [==============================] - 0s 235us/sample - loss: 0.0554\n",
            "Epoch 30/300\n",
            "80/80 [==============================] - 0s 253us/sample - loss: 0.0550\n",
            "Epoch 31/300\n",
            "80/80 [==============================] - 0s 257us/sample - loss: 0.0530\n",
            "Epoch 32/300\n",
            "80/80 [==============================] - 0s 375us/sample - loss: 0.0510\n",
            "Epoch 33/300\n",
            "80/80 [==============================] - 0s 289us/sample - loss: 0.0494\n",
            "Epoch 34/300\n",
            "80/80 [==============================] - 0s 282us/sample - loss: 0.0479\n",
            "Epoch 35/300\n",
            "80/80 [==============================] - 0s 307us/sample - loss: 0.0468\n",
            "Epoch 36/300\n",
            "80/80 [==============================] - 0s 246us/sample - loss: 0.0453\n",
            "Epoch 37/300\n",
            "80/80 [==============================] - 0s 240us/sample - loss: 0.0439\n",
            "Epoch 38/300\n",
            "80/80 [==============================] - 0s 250us/sample - loss: 0.0430\n",
            "Epoch 39/300\n",
            "80/80 [==============================] - 0s 368us/sample - loss: 0.0418\n",
            "Epoch 40/300\n",
            "80/80 [==============================] - 0s 349us/sample - loss: 0.0398\n",
            "Epoch 41/300\n",
            "80/80 [==============================] - 0s 335us/sample - loss: 0.0392\n",
            "Epoch 42/300\n",
            "80/80 [==============================] - 0s 296us/sample - loss: 0.0370\n",
            "Epoch 43/300\n",
            "80/80 [==============================] - 0s 294us/sample - loss: 0.0355\n",
            "Epoch 44/300\n",
            "80/80 [==============================] - 0s 347us/sample - loss: 0.0347\n",
            "Epoch 45/300\n",
            "80/80 [==============================] - 0s 261us/sample - loss: 0.0337\n",
            "Epoch 46/300\n",
            "80/80 [==============================] - 0s 292us/sample - loss: 0.0318\n",
            "Epoch 47/300\n",
            "80/80 [==============================] - 0s 262us/sample - loss: 0.0306\n",
            "Epoch 48/300\n",
            "80/80 [==============================] - 0s 299us/sample - loss: 0.0295\n",
            "Epoch 49/300\n",
            "80/80 [==============================] - 0s 216us/sample - loss: 0.0282\n",
            "Epoch 50/300\n",
            "80/80 [==============================] - 0s 383us/sample - loss: 0.0274\n",
            "Epoch 51/300\n",
            "80/80 [==============================] - 0s 310us/sample - loss: 0.0255\n",
            "Epoch 52/300\n",
            "80/80 [==============================] - 0s 282us/sample - loss: 0.0242\n",
            "Epoch 53/300\n",
            "80/80 [==============================] - 0s 219us/sample - loss: 0.0238\n",
            "Epoch 54/300\n",
            "80/80 [==============================] - 0s 213us/sample - loss: 0.0216\n",
            "Epoch 55/300\n",
            "80/80 [==============================] - 0s 244us/sample - loss: 0.0204\n",
            "Epoch 56/300\n",
            "80/80 [==============================] - 0s 304us/sample - loss: 0.0197\n",
            "Epoch 57/300\n",
            "80/80 [==============================] - 0s 275us/sample - loss: 0.0178\n",
            "Epoch 58/300\n",
            "80/80 [==============================] - 0s 223us/sample - loss: 0.0170\n",
            "Epoch 59/300\n",
            "80/80 [==============================] - 0s 231us/sample - loss: 0.0158\n",
            "Epoch 60/300\n",
            "80/80 [==============================] - 0s 276us/sample - loss: 0.0147\n",
            "Epoch 61/300\n",
            "80/80 [==============================] - 0s 285us/sample - loss: 0.0139\n",
            "Epoch 62/300\n",
            "80/80 [==============================] - 0s 411us/sample - loss: 0.0131\n",
            "Epoch 63/300\n",
            "80/80 [==============================] - 0s 223us/sample - loss: 0.0120\n",
            "Epoch 64/300\n",
            "80/80 [==============================] - 0s 293us/sample - loss: 0.0110\n",
            "Epoch 65/300\n",
            "80/80 [==============================] - 0s 234us/sample - loss: 0.0105\n",
            "Epoch 66/300\n",
            "80/80 [==============================] - 0s 229us/sample - loss: 0.0097\n",
            "Epoch 67/300\n",
            "80/80 [==============================] - 0s 254us/sample - loss: 0.0088\n",
            "Epoch 68/300\n",
            "80/80 [==============================] - 0s 339us/sample - loss: 0.0083\n",
            "Epoch 69/300\n",
            "80/80 [==============================] - 0s 303us/sample - loss: 0.0076\n",
            "Epoch 70/300\n",
            "80/80 [==============================] - 0s 268us/sample - loss: 0.0070\n",
            "Epoch 71/300\n",
            "80/80 [==============================] - 0s 243us/sample - loss: 0.0065\n",
            "Epoch 72/300\n",
            "80/80 [==============================] - 0s 219us/sample - loss: 0.0060\n",
            "Epoch 73/300\n",
            "80/80 [==============================] - 0s 227us/sample - loss: 0.0055\n",
            "Epoch 74/300\n",
            "80/80 [==============================] - 0s 349us/sample - loss: 0.0053\n",
            "Epoch 75/300\n",
            "80/80 [==============================] - 0s 268us/sample - loss: 0.0049\n",
            "Epoch 76/300\n",
            "80/80 [==============================] - 0s 281us/sample - loss: 0.0045\n",
            "Epoch 77/300\n",
            "80/80 [==============================] - 0s 233us/sample - loss: 0.0043\n",
            "Epoch 78/300\n",
            "80/80 [==============================] - 0s 216us/sample - loss: 0.0039\n",
            "Epoch 79/300\n",
            "80/80 [==============================] - 0s 267us/sample - loss: 0.0036\n",
            "Epoch 80/300\n",
            "80/80 [==============================] - 0s 386us/sample - loss: 0.0034\n",
            "Epoch 81/300\n",
            "80/80 [==============================] - 0s 303us/sample - loss: 0.0033\n",
            "Epoch 82/300\n",
            "80/80 [==============================] - 0s 304us/sample - loss: 0.0031\n",
            "Epoch 83/300\n",
            "80/80 [==============================] - 0s 318us/sample - loss: 0.0029\n",
            "Epoch 84/300\n",
            "80/80 [==============================] - 0s 277us/sample - loss: 0.0025\n",
            "Epoch 85/300\n",
            "80/80 [==============================] - 0s 343us/sample - loss: 0.0023\n",
            "Epoch 86/300\n",
            "80/80 [==============================] - 0s 236us/sample - loss: 0.0022\n",
            "Epoch 87/300\n",
            "80/80 [==============================] - 0s 257us/sample - loss: 0.0021\n",
            "Epoch 88/300\n",
            "80/80 [==============================] - 0s 232us/sample - loss: 0.0019\n",
            "Epoch 89/300\n",
            "80/80 [==============================] - 0s 273us/sample - loss: 0.0017\n",
            "Epoch 90/300\n",
            "80/80 [==============================] - 0s 279us/sample - loss: 0.0016\n",
            "Epoch 91/300\n",
            "80/80 [==============================] - 0s 289us/sample - loss: 0.0016\n",
            "Epoch 92/300\n",
            "80/80 [==============================] - 0s 366us/sample - loss: 0.0015\n",
            "Epoch 93/300\n",
            "80/80 [==============================] - 0s 343us/sample - loss: 0.0013\n",
            "Epoch 94/300\n",
            "80/80 [==============================] - 0s 285us/sample - loss: 0.0012\n",
            "Epoch 95/300\n",
            "80/80 [==============================] - 0s 265us/sample - loss: 0.0012\n",
            "Epoch 96/300\n",
            "80/80 [==============================] - 0s 236us/sample - loss: 0.0011\n",
            "Epoch 97/300\n",
            "80/80 [==============================] - 0s 264us/sample - loss: 9.3151e-04\n",
            "Epoch 98/300\n",
            "80/80 [==============================] - 0s 240us/sample - loss: 8.6390e-04\n",
            "Epoch 99/300\n",
            "80/80 [==============================] - 0s 265us/sample - loss: 8.3163e-04\n",
            "Epoch 100/300\n",
            "80/80 [==============================] - 0s 268us/sample - loss: 7.5102e-04\n",
            "Epoch 101/300\n",
            "80/80 [==============================] - 0s 230us/sample - loss: 7.2688e-04\n",
            "Epoch 102/300\n",
            "80/80 [==============================] - 0s 362us/sample - loss: 6.5222e-04\n",
            "Epoch 103/300\n",
            "80/80 [==============================] - 0s 368us/sample - loss: 6.5107e-04\n",
            "Epoch 104/300\n",
            "80/80 [==============================] - 0s 282us/sample - loss: 6.3698e-04\n",
            "Epoch 105/300\n",
            "80/80 [==============================] - 0s 296us/sample - loss: 5.3166e-04\n",
            "Epoch 106/300\n",
            "80/80 [==============================] - 0s 305us/sample - loss: 4.7975e-04\n",
            "Epoch 107/300\n",
            "80/80 [==============================] - 0s 308us/sample - loss: 4.7437e-04\n",
            "Epoch 108/300\n",
            "80/80 [==============================] - 0s 372us/sample - loss: 4.9682e-04\n",
            "Epoch 109/300\n",
            "80/80 [==============================] - 0s 346us/sample - loss: 4.4769e-04\n",
            "Epoch 110/300\n",
            "80/80 [==============================] - 0s 304us/sample - loss: 4.0579e-04\n",
            "Epoch 111/300\n",
            "80/80 [==============================] - 0s 234us/sample - loss: 4.2932e-04\n",
            "Epoch 112/300\n",
            "80/80 [==============================] - 0s 325us/sample - loss: 4.0622e-04\n",
            "Epoch 113/300\n",
            "80/80 [==============================] - 0s 303us/sample - loss: 3.3058e-04\n",
            "Epoch 114/300\n",
            "80/80 [==============================] - 0s 334us/sample - loss: 3.3755e-04\n",
            "Epoch 115/300\n",
            "80/80 [==============================] - 0s 303us/sample - loss: 3.0059e-04\n",
            "Epoch 116/300\n",
            "80/80 [==============================] - 0s 266us/sample - loss: 3.4185e-04\n",
            "Epoch 117/300\n",
            "80/80 [==============================] - 0s 292us/sample - loss: 2.6187e-04\n",
            "Epoch 118/300\n",
            "80/80 [==============================] - 0s 278us/sample - loss: 2.4878e-04\n",
            "Epoch 119/300\n",
            "80/80 [==============================] - 0s 321us/sample - loss: 2.4584e-04\n",
            "Epoch 120/300\n",
            "80/80 [==============================] - 0s 296us/sample - loss: 2.2617e-04\n",
            "Epoch 121/300\n",
            "80/80 [==============================] - 0s 260us/sample - loss: 2.0133e-04\n",
            "Epoch 122/300\n",
            "80/80 [==============================] - 0s 291us/sample - loss: 1.8818e-04\n",
            "Epoch 123/300\n",
            "80/80 [==============================] - 0s 273us/sample - loss: 1.6604e-04\n",
            "Epoch 124/300\n",
            "80/80 [==============================] - 0s 269us/sample - loss: 1.7035e-04\n",
            "Epoch 125/300\n",
            "80/80 [==============================] - 0s 364us/sample - loss: 1.5003e-04\n",
            "Epoch 126/300\n",
            "80/80 [==============================] - 0s 312us/sample - loss: 1.4791e-04\n",
            "Epoch 127/300\n",
            "80/80 [==============================] - 0s 295us/sample - loss: 1.2680e-04\n",
            "Epoch 128/300\n",
            "80/80 [==============================] - 0s 253us/sample - loss: 1.2657e-04\n",
            "Epoch 129/300\n",
            "80/80 [==============================] - 0s 272us/sample - loss: 1.2066e-04\n",
            "Epoch 130/300\n",
            "80/80 [==============================] - 0s 323us/sample - loss: 1.1825e-04\n",
            "Epoch 131/300\n",
            "80/80 [==============================] - 0s 267us/sample - loss: 1.0894e-04\n",
            "Epoch 132/300\n",
            "80/80 [==============================] - 0s 333us/sample - loss: 1.0311e-04\n",
            "Epoch 133/300\n",
            "80/80 [==============================] - 0s 253us/sample - loss: 1.0394e-04\n",
            "Epoch 134/300\n",
            "80/80 [==============================] - 0s 258us/sample - loss: 9.2189e-05\n",
            "Epoch 135/300\n",
            "80/80 [==============================] - 0s 248us/sample - loss: 8.4650e-05\n",
            "Epoch 136/300\n",
            "80/80 [==============================] - 0s 256us/sample - loss: 8.3155e-05\n",
            "Epoch 137/300\n",
            "80/80 [==============================] - 0s 279us/sample - loss: 8.0666e-05\n",
            "Epoch 138/300\n",
            "80/80 [==============================] - 0s 250us/sample - loss: 7.7635e-05\n",
            "Epoch 139/300\n",
            "80/80 [==============================] - 0s 259us/sample - loss: 7.7501e-05\n",
            "Epoch 140/300\n",
            "80/80 [==============================] - 0s 240us/sample - loss: 7.6662e-05\n",
            "Epoch 141/300\n",
            "80/80 [==============================] - 0s 249us/sample - loss: 6.8760e-05\n",
            "Epoch 142/300\n",
            "80/80 [==============================] - 0s 270us/sample - loss: 6.5677e-05\n",
            "Epoch 143/300\n",
            "80/80 [==============================] - 0s 293us/sample - loss: 6.1257e-05\n",
            "Epoch 144/300\n",
            "80/80 [==============================] - 0s 272us/sample - loss: 6.8409e-05\n",
            "Epoch 145/300\n",
            "80/80 [==============================] - 0s 387us/sample - loss: 7.6242e-05\n",
            "Epoch 146/300\n",
            "80/80 [==============================] - 0s 238us/sample - loss: 6.2456e-05\n",
            "Epoch 147/300\n",
            "80/80 [==============================] - 0s 235us/sample - loss: 5.8871e-05\n",
            "Epoch 148/300\n",
            "80/80 [==============================] - 0s 300us/sample - loss: 7.4420e-05\n",
            "Epoch 149/300\n",
            "80/80 [==============================] - 0s 316us/sample - loss: 5.8465e-05\n",
            "Epoch 150/300\n",
            "80/80 [==============================] - 0s 270us/sample - loss: 5.1686e-05\n",
            "Epoch 151/300\n",
            "80/80 [==============================] - 0s 234us/sample - loss: 5.3041e-05\n",
            "Epoch 152/300\n",
            "80/80 [==============================] - 0s 269us/sample - loss: 5.4061e-05\n",
            "Epoch 153/300\n",
            "80/80 [==============================] - 0s 330us/sample - loss: 5.1879e-05\n",
            "Epoch 154/300\n",
            "80/80 [==============================] - 0s 328us/sample - loss: 4.6496e-05\n",
            "Epoch 155/300\n",
            "80/80 [==============================] - 0s 234us/sample - loss: 5.5916e-05\n",
            "Epoch 156/300\n",
            "80/80 [==============================] - 0s 362us/sample - loss: 4.6813e-05\n",
            "Epoch 157/300\n",
            "80/80 [==============================] - 0s 288us/sample - loss: 5.6480e-05\n",
            "Epoch 158/300\n",
            "80/80 [==============================] - 0s 270us/sample - loss: 5.2724e-05\n",
            "Epoch 159/300\n",
            "80/80 [==============================] - 0s 225us/sample - loss: 4.6230e-05\n",
            "Epoch 160/300\n",
            "80/80 [==============================] - 0s 300us/sample - loss: 5.5393e-05\n",
            "Epoch 161/300\n",
            "80/80 [==============================] - 0s 310us/sample - loss: 5.3761e-05\n",
            "Epoch 162/300\n",
            "80/80 [==============================] - 0s 290us/sample - loss: 4.5535e-05\n",
            "Epoch 163/300\n",
            "80/80 [==============================] - 0s 240us/sample - loss: 5.9120e-05\n",
            "Epoch 164/300\n",
            "80/80 [==============================] - 0s 244us/sample - loss: 6.0277e-05\n",
            "Epoch 165/300\n",
            "80/80 [==============================] - 0s 277us/sample - loss: 6.0580e-05\n",
            "Epoch 166/300\n",
            "80/80 [==============================] - 0s 268us/sample - loss: 5.4407e-05\n",
            "Epoch 167/300\n",
            "80/80 [==============================] - 0s 289us/sample - loss: 5.3495e-05\n",
            "Epoch 168/300\n",
            "80/80 [==============================] - 0s 231us/sample - loss: 3.6709e-05\n",
            "Epoch 169/300\n",
            "80/80 [==============================] - 0s 212us/sample - loss: 4.9593e-05\n",
            "Epoch 170/300\n",
            "80/80 [==============================] - 0s 273us/sample - loss: 4.0684e-05\n",
            "Epoch 171/300\n",
            "80/80 [==============================] - 0s 272us/sample - loss: 4.6589e-05\n",
            "Epoch 172/300\n",
            "80/80 [==============================] - 0s 293us/sample - loss: 4.0371e-05\n",
            "Epoch 173/300\n",
            "80/80 [==============================] - 0s 420us/sample - loss: 4.5683e-05\n",
            "Epoch 174/300\n",
            "80/80 [==============================] - 0s 343us/sample - loss: 3.7670e-05\n",
            "Epoch 175/300\n",
            "80/80 [==============================] - 0s 217us/sample - loss: 3.4601e-05\n",
            "Epoch 176/300\n",
            "80/80 [==============================] - 0s 284us/sample - loss: 2.3439e-05\n",
            "Epoch 177/300\n",
            "80/80 [==============================] - 0s 238us/sample - loss: 2.2799e-05\n",
            "Epoch 178/300\n",
            "80/80 [==============================] - 0s 344us/sample - loss: 2.0087e-05\n",
            "Epoch 179/300\n",
            "80/80 [==============================] - 0s 272us/sample - loss: 2.3606e-05\n",
            "Epoch 180/300\n",
            "80/80 [==============================] - 0s 251us/sample - loss: 1.9352e-05\n",
            "Epoch 181/300\n",
            "80/80 [==============================] - 0s 248us/sample - loss: 2.3715e-05\n",
            "Epoch 182/300\n",
            "80/80 [==============================] - 0s 263us/sample - loss: 2.4014e-05\n",
            "Epoch 183/300\n",
            "80/80 [==============================] - 0s 249us/sample - loss: 2.4038e-05\n",
            "Epoch 184/300\n",
            "80/80 [==============================] - 0s 341us/sample - loss: 2.1474e-05\n",
            "Epoch 185/300\n",
            "80/80 [==============================] - 0s 243us/sample - loss: 1.9672e-05\n",
            "Epoch 186/300\n",
            "80/80 [==============================] - 0s 313us/sample - loss: 1.7820e-05\n",
            "Epoch 187/300\n",
            "80/80 [==============================] - 0s 221us/sample - loss: 1.5253e-05\n",
            "Epoch 188/300\n",
            "80/80 [==============================] - 0s 217us/sample - loss: 1.8206e-05\n",
            "Epoch 189/300\n",
            "80/80 [==============================] - 0s 279us/sample - loss: 1.6989e-05\n",
            "Epoch 190/300\n",
            "80/80 [==============================] - 0s 304us/sample - loss: 1.5647e-05\n",
            "Epoch 191/300\n",
            "80/80 [==============================] - 0s 275us/sample - loss: 1.5527e-05\n",
            "Epoch 192/300\n",
            "80/80 [==============================] - 0s 274us/sample - loss: 1.6099e-05\n",
            "Epoch 193/300\n",
            "80/80 [==============================] - 0s 283us/sample - loss: 1.7626e-05\n",
            "Epoch 194/300\n",
            "80/80 [==============================] - 0s 249us/sample - loss: 1.4902e-05\n",
            "Epoch 195/300\n",
            "80/80 [==============================] - 0s 296us/sample - loss: 1.4011e-05\n",
            "Epoch 196/300\n",
            "80/80 [==============================] - 0s 381us/sample - loss: 1.3903e-05\n",
            "Epoch 197/300\n",
            "80/80 [==============================] - 0s 360us/sample - loss: 1.2729e-05\n",
            "Epoch 198/300\n",
            "80/80 [==============================] - 0s 370us/sample - loss: 1.2183e-05\n",
            "Epoch 199/300\n",
            "80/80 [==============================] - 0s 268us/sample - loss: 1.0802e-05\n",
            "Epoch 200/300\n",
            "80/80 [==============================] - 0s 222us/sample - loss: 1.0722e-05\n",
            "Epoch 201/300\n",
            "80/80 [==============================] - 0s 232us/sample - loss: 1.0619e-05\n",
            "Epoch 202/300\n",
            "80/80 [==============================] - 0s 218us/sample - loss: 9.9713e-06\n",
            "Epoch 203/300\n",
            "80/80 [==============================] - 0s 306us/sample - loss: 1.0005e-05\n",
            "Epoch 204/300\n",
            "80/80 [==============================] - 0s 213us/sample - loss: 9.7906e-06\n",
            "Epoch 205/300\n",
            "80/80 [==============================] - 0s 272us/sample - loss: 9.3796e-06\n",
            "Epoch 206/300\n",
            "80/80 [==============================] - 0s 214us/sample - loss: 9.2135e-06\n",
            "Epoch 207/300\n",
            "80/80 [==============================] - 0s 343us/sample - loss: 8.6900e-06\n",
            "Epoch 208/300\n",
            "80/80 [==============================] - 0s 245us/sample - loss: 8.2565e-06\n",
            "Epoch 209/300\n",
            "80/80 [==============================] - 0s 216us/sample - loss: 7.7426e-06\n",
            "Epoch 210/300\n",
            "80/80 [==============================] - 0s 224us/sample - loss: 9.4238e-06\n",
            "Epoch 211/300\n",
            "80/80 [==============================] - 0s 218us/sample - loss: 9.5810e-06\n",
            "Epoch 212/300\n",
            "80/80 [==============================] - 0s 266us/sample - loss: 8.3745e-06\n",
            "Epoch 213/300\n",
            "80/80 [==============================] - 0s 226us/sample - loss: 7.6993e-06\n",
            "Epoch 214/300\n",
            "80/80 [==============================] - 0s 403us/sample - loss: 7.3743e-06\n",
            "Epoch 215/300\n",
            "80/80 [==============================] - 0s 251us/sample - loss: 6.9396e-06\n",
            "Epoch 216/300\n",
            "80/80 [==============================] - 0s 308us/sample - loss: 6.7948e-06\n",
            "Epoch 217/300\n",
            "80/80 [==============================] - 0s 239us/sample - loss: 6.5376e-06\n",
            "Epoch 218/300\n",
            "80/80 [==============================] - 0s 203us/sample - loss: 6.8594e-06\n",
            "Epoch 219/300\n",
            "80/80 [==============================] - 0s 252us/sample - loss: 7.7030e-06\n",
            "Epoch 220/300\n",
            "80/80 [==============================] - 0s 435us/sample - loss: 7.1884e-06\n",
            "Epoch 221/300\n",
            "80/80 [==============================] - 0s 333us/sample - loss: 6.7609e-06\n",
            "Epoch 222/300\n",
            "80/80 [==============================] - 0s 343us/sample - loss: 6.4750e-06\n",
            "Epoch 223/300\n",
            "80/80 [==============================] - 0s 301us/sample - loss: 7.2972e-06\n",
            "Epoch 224/300\n",
            "80/80 [==============================] - 0s 318us/sample - loss: 6.4829e-06\n",
            "Epoch 225/300\n",
            "80/80 [==============================] - 0s 287us/sample - loss: 6.7412e-06\n",
            "Epoch 226/300\n",
            "80/80 [==============================] - 0s 269us/sample - loss: 5.8594e-06\n",
            "Epoch 227/300\n",
            "80/80 [==============================] - 0s 270us/sample - loss: 5.3580e-06\n",
            "Epoch 228/300\n",
            "80/80 [==============================] - 0s 226us/sample - loss: 5.8172e-06\n",
            "Epoch 229/300\n",
            "80/80 [==============================] - 0s 246us/sample - loss: 5.3314e-06\n",
            "Epoch 230/300\n",
            "80/80 [==============================] - 0s 258us/sample - loss: 5.2835e-06\n",
            "Epoch 231/300\n",
            "80/80 [==============================] - 0s 385us/sample - loss: 4.9139e-06\n",
            "Epoch 232/300\n",
            "80/80 [==============================] - 0s 358us/sample - loss: 5.5161e-06\n",
            "Epoch 233/300\n",
            "80/80 [==============================] - 0s 245us/sample - loss: 5.2079e-06\n",
            "Epoch 234/300\n",
            "80/80 [==============================] - 0s 292us/sample - loss: 4.9370e-06\n",
            "Epoch 235/300\n",
            "80/80 [==============================] - 0s 278us/sample - loss: 4.8542e-06\n",
            "Epoch 236/300\n",
            "80/80 [==============================] - 0s 360us/sample - loss: 5.0252e-06\n",
            "Epoch 237/300\n",
            "80/80 [==============================] - 0s 280us/sample - loss: 6.0323e-06\n",
            "Epoch 238/300\n",
            "80/80 [==============================] - 0s 276us/sample - loss: 5.8640e-06\n",
            "Epoch 239/300\n",
            "80/80 [==============================] - 0s 215us/sample - loss: 5.8375e-06\n",
            "Epoch 240/300\n",
            "80/80 [==============================] - 0s 202us/sample - loss: 5.0084e-06\n",
            "Epoch 241/300\n",
            "80/80 [==============================] - 0s 253us/sample - loss: 5.3107e-06\n",
            "Epoch 242/300\n",
            "80/80 [==============================] - 0s 396us/sample - loss: 6.9470e-06\n",
            "Epoch 243/300\n",
            "80/80 [==============================] - 0s 257us/sample - loss: 5.1279e-06\n",
            "Epoch 244/300\n",
            "80/80 [==============================] - 0s 347us/sample - loss: 4.6312e-06\n",
            "Epoch 245/300\n",
            "80/80 [==============================] - 0s 308us/sample - loss: 4.6249e-06\n",
            "Epoch 246/300\n",
            "80/80 [==============================] - 0s 258us/sample - loss: 4.0908e-06\n",
            "Epoch 247/300\n",
            "80/80 [==============================] - 0s 400us/sample - loss: 4.6141e-06\n",
            "Epoch 248/300\n",
            "80/80 [==============================] - 0s 271us/sample - loss: 5.3335e-06\n",
            "Epoch 249/300\n",
            "80/80 [==============================] - 0s 252us/sample - loss: 4.5967e-06\n",
            "Epoch 250/300\n",
            "80/80 [==============================] - 0s 270us/sample - loss: 4.4175e-06\n",
            "Epoch 251/300\n",
            "80/80 [==============================] - 0s 297us/sample - loss: 3.7273e-06\n",
            "Epoch 252/300\n",
            "80/80 [==============================] - 0s 208us/sample - loss: 3.5868e-06\n",
            "Epoch 253/300\n",
            "80/80 [==============================] - 0s 295us/sample - loss: 3.8783e-06\n",
            "Epoch 254/300\n",
            "80/80 [==============================] - 0s 246us/sample - loss: 4.6820e-06\n",
            "Epoch 255/300\n",
            "80/80 [==============================] - 0s 219us/sample - loss: 3.8402e-06\n",
            "Epoch 256/300\n",
            "80/80 [==============================] - 0s 286us/sample - loss: 3.6388e-06\n",
            "Epoch 257/300\n",
            "80/80 [==============================] - 0s 234us/sample - loss: 3.3160e-06\n",
            "Epoch 258/300\n",
            "80/80 [==============================] - 0s 255us/sample - loss: 2.9783e-06\n",
            "Epoch 259/300\n",
            "80/80 [==============================] - 0s 351us/sample - loss: 3.0353e-06\n",
            "Epoch 260/300\n",
            "80/80 [==============================] - 0s 327us/sample - loss: 3.2506e-06\n",
            "Epoch 261/300\n",
            "80/80 [==============================] - 0s 242us/sample - loss: 3.5131e-06\n",
            "Epoch 262/300\n",
            "80/80 [==============================] - 0s 284us/sample - loss: 2.9673e-06\n",
            "Epoch 263/300\n",
            "80/80 [==============================] - 0s 231us/sample - loss: 3.2775e-06\n",
            "Epoch 264/300\n",
            "80/80 [==============================] - 0s 242us/sample - loss: 2.9067e-06\n",
            "Epoch 265/300\n",
            "80/80 [==============================] - 0s 249us/sample - loss: 2.9360e-06\n",
            "Epoch 266/300\n",
            "80/80 [==============================] - 0s 307us/sample - loss: 3.1205e-06\n",
            "Epoch 267/300\n",
            "80/80 [==============================] - 0s 241us/sample - loss: 2.4764e-06\n",
            "Epoch 268/300\n",
            "80/80 [==============================] - 0s 235us/sample - loss: 2.8608e-06\n",
            "Epoch 269/300\n",
            "80/80 [==============================] - 0s 264us/sample - loss: 2.5285e-06\n",
            "Epoch 270/300\n",
            "80/80 [==============================] - 0s 242us/sample - loss: 2.7170e-06\n",
            "Epoch 271/300\n",
            "80/80 [==============================] - 0s 224us/sample - loss: 2.9606e-06\n",
            "Epoch 272/300\n",
            "80/80 [==============================] - 0s 292us/sample - loss: 2.6021e-06\n",
            "Epoch 273/300\n",
            "80/80 [==============================] - 0s 332us/sample - loss: 2.4352e-06\n",
            "Epoch 274/300\n",
            "80/80 [==============================] - 0s 231us/sample - loss: 2.7566e-06\n",
            "Epoch 275/300\n",
            "80/80 [==============================] - 0s 235us/sample - loss: 2.1990e-06\n",
            "Epoch 276/300\n",
            "80/80 [==============================] - 0s 263us/sample - loss: 2.5519e-06\n",
            "Epoch 277/300\n",
            "80/80 [==============================] - 0s 268us/sample - loss: 2.5170e-06\n",
            "Epoch 278/300\n",
            "80/80 [==============================] - 0s 269us/sample - loss: 1.9573e-06\n",
            "Epoch 279/300\n",
            "80/80 [==============================] - 0s 276us/sample - loss: 2.6911e-06\n",
            "Epoch 280/300\n",
            "80/80 [==============================] - 0s 240us/sample - loss: 2.3851e-06\n",
            "Epoch 281/300\n",
            "80/80 [==============================] - 0s 233us/sample - loss: 2.4802e-06\n",
            "Epoch 282/300\n",
            "80/80 [==============================] - 0s 253us/sample - loss: 2.2609e-06\n",
            "Epoch 283/300\n",
            "80/80 [==============================] - 0s 253us/sample - loss: 3.1520e-06\n",
            "Epoch 284/300\n",
            "80/80 [==============================] - 0s 309us/sample - loss: 2.7761e-06\n",
            "Epoch 285/300\n",
            "80/80 [==============================] - 0s 368us/sample - loss: 2.4314e-06\n",
            "Epoch 286/300\n",
            "80/80 [==============================] - 0s 269us/sample - loss: 2.3167e-06\n",
            "Epoch 287/300\n",
            "80/80 [==============================] - 0s 242us/sample - loss: 2.2000e-06\n",
            "Epoch 288/300\n",
            "80/80 [==============================] - 0s 267us/sample - loss: 2.7738e-06\n",
            "Epoch 289/300\n",
            "80/80 [==============================] - 0s 236us/sample - loss: 2.7103e-06\n",
            "Epoch 290/300\n",
            "80/80 [==============================] - 0s 290us/sample - loss: 3.5895e-06\n",
            "Epoch 291/300\n",
            "80/80 [==============================] - 0s 386us/sample - loss: 2.0682e-06\n",
            "Epoch 292/300\n",
            "80/80 [==============================] - 0s 271us/sample - loss: 2.2282e-06\n",
            "Epoch 293/300\n",
            "80/80 [==============================] - 0s 227us/sample - loss: 2.2646e-06\n",
            "Epoch 294/300\n",
            "80/80 [==============================] - 0s 254us/sample - loss: 1.6006e-06\n",
            "Epoch 295/300\n",
            "80/80 [==============================] - 0s 277us/sample - loss: 2.0118e-06\n",
            "Epoch 296/300\n",
            "80/80 [==============================] - 0s 291us/sample - loss: 1.7880e-06\n",
            "Epoch 297/300\n",
            "80/80 [==============================] - 0s 352us/sample - loss: 1.5494e-06\n",
            "Epoch 298/300\n",
            "80/80 [==============================] - 0s 308us/sample - loss: 1.6165e-06\n",
            "Epoch 299/300\n",
            "80/80 [==============================] - 0s 232us/sample - loss: 2.1783e-06\n",
            "Epoch 300/300\n",
            "80/80 [==============================] - 0s 283us/sample - loss: 1.9141e-06\n"
          ],
          "name": "stdout"
        },
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "<tensorflow.python.keras.callbacks.History at 0x7f5c4b7f0358>"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 29
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "n-aNP4n3sqG_",
        "colab_type": "code",
        "outputId": "1c42b67a-5d5f-491c-845b-e964129d6714",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 265
        }
      },
      "source": [
        "# Plotting code, feel free to ignore.\n",
        "h = 1.0\n",
        "x_min, x_max = X[:, 0].min() - 5, X[:, 0].max() + 5\n",
        "y_min, y_max = X[:, 1].min() - 5, X[:, 1].max() + 5\n",
        "xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n",
        "                     np.arange(y_min, y_max, h))\n",
        "\n",
        "# here \"model\" is your model's prediction (classification) function\n",
        "Z = tn_model.predict(np.c_[xx.ravel(), yy.ravel()]) \n",
        "\n",
        "# Put the result into a color plot\n",
        "Z = Z.reshape(xx.shape)\n",
        "plt.contourf(xx, yy, Z)\n",
        "plt.axis('off')\n",
        "\n",
        "# Plot also the training points\n",
        "plt.scatter(X[:, 0], X[:, 1], c=Y, cmap=plt.cm.Paired)"
      ],
      "execution_count": 30,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "<matplotlib.collections.PathCollection at 0x7f5c4bdcdbe0>"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 30
        },
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADnCAYAAAC9roUQAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0\ndHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO2deZycZZXvf8+71dpb9d6dTmffyU4I\nCQkkEHaYYRFlUNxHvXodHR1n3PXe0evV0Y+OdxxwQRRUFER2wRAgARJISEJCVrInnU56767qWt/l\nuX9Ub9X1VndV9Vtrn+/nw+dDd1e99VR16tunznPOeRjnHARBEER2EHK9AIIgiMkESZcgCCKLkHQJ\ngiCyCEmXIAgii5B0CYIgsog01g9vqPsfVNpAFDSR+U0xX/fNtMV87WtmcfcJT43EfD1tSkdSj7Wp\n7kiKq7OOO0v35OyxiXjmNrXG/8MaYEzpEsRko9CES7ItPEi6BFGAkGwLF5IuQUyQbEa5JNvChzbS\nCKJAIOEWByRdomhJdRMtnXxutqJcEm7xQNIliDQh4RLpQNIliDyGhFt8kHQJwoTxUgvZiHJJuMUJ\nSZcoSkbnc8djdD4315BwixeSLjEpMetES5ZMR7kk3OKGpEsUHWZR7ujKhXQh4RIThZojiKIi1bSC\nGYnyuZkULsl28kCRLlE0pCvcZPK5JFzCKki6xKQj1XwuCZewEkovEEXBWFGuVflcKyHZTl4o0iUK\nHivyuIOMzudmIsol4U5uSLpEQTNR4Wa7PpeES1B6gZhUTKQ+dyKQbIlBKNIlCpZkotx8zOcSkxuS\nLlGQWJnHTUQuzzwjiheSLlFwZEq4yZ6HliqUWiBGQtIlJg3jDS3PBCRcYjQkXaJooXwukY+QdImC\nIhu5XKugKJcwg6RLFCUTPQ9toptoJFwiESRdgrAYEi4xFiRdomBINrWQy1wuCZcYD5IuUVSYCTdX\nXWgEYQZJlygako1wM5XPpSiXSAaSLlEQ5HvVAgmXSBaSLlEUJIpys5FaIOESqUDSJQqeVDbO8u2o\ndWLyQaMdibwnk6mFicxboAiXSAeKdImiJZ1ZC8luopFwiXShSJcoaLJdk0uyJSYKRbpEUZLMBlqq\nqQUSLmEFFOkSBUu2NtBItoSVUKRLFB1WlomRcAmrIekSeY0VlQtmUa5ZaoGO5yGyAUmXKEhy2QxB\nEBOBpEsUDWbCpWYIIt8g6RIFx0TLxDJ1ACVBJANVLxBFwUTTCoWUzw2eE9H5vA3hCyJkj4GqTWG4\n5mq5XhaRJBTpEkVLshtohUTwjIhz97sQOC5B9wsInZNw/iEnvG/LuV4akSQkXYIoIDqes4OrDMBw\nZM9VhvZn7OA8d+sikoekS6SNZJNR3lwNxW3P2mMmezJEKlFuotRCPtbohlpF0+/rfgYjlOXFEGlB\nOd1JRnlzNRa9dw0802vQfbIN7/xxO/rOdqZ8nUvuXotL7loDQzcgyiJObzuMHf/5HAxVz8CqiUGk\nUgNqZ7x4mQQIuTsajkgBku4konp+I6757t0QZQmCKKC0qRJNa+Zg85d/j84jrUlfZ+Y1l2DRey6H\nZB/OIzavmwc1FMHO/3rBsvWm2xhRjLncQSqvDqPtccdAiiEKkzkqrgiD0efWgoB+TZOIVZ+6FrJd\ngSBGf+2CKEC2K1j1yWtTus6i966B7FBivifZZMzatBiCbP7xN1NMdLBNIVUtAEDZchXV14cg2DmY\nHP2vYk0YVZvCuV4akSQU6U4iPDPrzL8/y/z7ibCXOU2/zwQG2a4grAZTXptVTIZmiIorIii/PALd\nzyA4OAQqXCgoSLqTiEggDJvJppfqj4+SmMAw58ZlmH3DMgiSiJMvHcDhJ3ZCD2voOHweDStnQBBi\nPyiF+oII+zIn3HSaIoopyh0JEwGplMoVChFKL0wijjy5C2ooNhJUQxEcenJn3G2v/OrtWP7RjfDM\nqEX51Cosvnstrv/BB8AEhj2/fhl6WIOhGwAAbnBooQh2/sy6fG4ypHMyBEHkGop0i5iKGTVYeOdq\nlE6pRPuBczj8xE44KtyYec0l0FUNoizh1JYDOPDI9pj7eWbWomH5dEj24bytZJNR2uhB0+o5OLv9\nKJ79nw9g8T9cgaq5DfBd6ME7j7yO9oMt2X6KY1Ism2dEcUHSLVIaVszAVV+7A4IsQhAFVEyrxqxN\ni/HsP/0aex98Be66MvRf7DNNB1TPnwKw+A0q2WlDzSVNOLv9KLznu/HaD57KxlOB4lBQVlMKzanC\nH7CmGHW81EI+1ugSxQFJNwfM//tLsei9a2Avc8Lb0o23fvEizu86YeljrP7sDTElXaIsgQkCln3o\nKmz77l/GzL0Gu/uHUgcj0cIq/B1eS9c5HqtuXo4F6+ZB13SIsohTZy7guc1vQtNj64FHpxYKJZcb\n6WIInpEguTicszSw7BZ/EDmApJtlLnnfGiy6a7jkqqypEld+5XZs+eafAM5RPa8RgS4fzrx+BHo4\nvSEmtjInHOWuuO8LooC6Jc3j3r9l13HoEQ2GXY7ZLOMGx6ktB9JaUzrMvXw25q+dA0kWIQ2Uok2b\nWoeN65fhzyd3p3XNfBEu50DbE3Z431IAgQOMQVA4pn7CD6U6/g8eUTyQdLOIIAlY9J7L42tc7TI2\nfOMOMEGAKIvQIxou/cQmvPClh9F7JvW8pBZMvKEU8Y3/8dxQdbzwxYdw5dfuQEl9OTjnCHuDePV7\nTyDUF0h5Pemy+KoFkG2x9VCyLGH+nGZIp/dCM1KTUz4dr+7bL6NvtwJow3MU9AjQ8hsnpn+h3yy7\nQxQJJN0sYitxgInmBSOy0wY28E4TJBHc4Fj/ldvw1Cd+bnr70kYP7OUudJ9si5OsHtFw+tXDaL5i\nHqQR0lJDERx8/M2k1uo9342nP/ULuGrKIMgifOe7k7qfldhd5iVijAGyKEEzzP+45PsGmtrLcPHP\ndkAdZVbOoPUKiLQLsNVStFuskHSzSMgbNM2VAhgS7tDXAoO7tgyumlL424fzqPZyFzZ+6z0ob66G\noekQJBF7H9qKw4/Hln29+dPnobjtqF86DYaqQ5BFvPvsXhx7bm9Ka/a396V0eyu5cKINUxdOiasH\n7veHEFSHhZtMqVim0grcALS+aJOCmMTcH24AZ+9zgYfNQ1kmIKbFlyg+SLpZhOsG3nlkOxbfvTYm\nxcANDiYkeqPFfn/DN+5AxcxaiJIIIBrFLn3/evSd7kDrnlNDt9PCKl7+1qNwVZfCWV0K77mujDYu\nJEvl7Dos//AGVM6uR6DLh/1/eB2ntx4yve2uZ/eiYVYdRFmCKAkwDAO6buDFrbtHvyxDTPTAyVRS\nC953JLQ/4YARZoABuBaoqH9PcMzBM4ETInS/gIRPQOCw1dPQoGKGmiOyzMFHd2DPAy8h0OWDoRvo\nOd2O45v3QQurMbfjBkeo1w9XdQlEJfq30V1XjooZg8IdRnYomH/7ZaaP5+/wouNQS14I1zOzFtd+\n//2oWzINituO8uZqXP65GzHv1pWmt+9r9+Lx/3gGR988hvbOXhw7eR6PPP4S3mFtST9mpiLc4BkR\nF//ohN4vgKsMXGfwH5bR+jvzFulBtN4x3nKMo/69waQrGLR+hvAFAYY6/m2J/IEi3Rxw9Jk9OPrM\ncEQlKhLKmqpQMa0akkOBHtYgKiJsZS5s/PZdYIKAN376V3jPd8PQdMAW32zvqIivVsg3ln5gPURF\nionqZbuCpfeux9Fn94CPSL0MThjr7/Fj+593xrYAu4bvP1ZqIZOVCt1bFfBRsuMaQ+CEBLWXQS43\nb9G1N+mA2Y8EjqrrQnDPH79ixQgDF/7ogP+oDCZGKyGqrg3Bs4468goBinTzAD2i4fkv/hZbv/M4\n9v/+NWgRFRyAbJehuOyQHQou/+yNAI/P/QKAFtHQ8uax7C04zZRj5ez6uPwsADBBgNPjnuCiJr6B\nlkpqQe0WYfZCMIlD8yZ+W9nqDLjmqWDyCPOKHHKFgYq1yUnzwqNR4XKNwQgz8AhDx1/t6HxRodMj\nCgCKdPMFDrTuOQUtrGH+bavi5CTIAmZfvxRv/eolXPrxqyEqMpjAoEVUhPuCOPyXXRlfomdmLS77\n9PWomtsAPaLi2Av7sftXW5IeXO672AuHiVwZYwh5rS1Fy3Q9rmO6hnCbABix4uUaA5N4NBI9KUKQ\nAanMgL3OQNmlEdjqDDTcHUTPdh29byrgKkPJJRFUbgwnNS1MDwD+w1Hhxv6AoetFO/xHZEz5mD+p\nTT0iN5B08wxbqQNm4YogimhYMR0zrl4EQRSgRzT4O7049fJBHHnqLUT6M3tWi6umDNf94ANDG4CS\nXcHs65fAXVuKl7/9WFLX2P+7V3HV1++M6ZTTQhG8+/zbYzaCTPTI9WS42fc2uvcpEF0cJQvVcU9h\n8FwZhnevHD0ih0cFyGSOskvDOHefG0YYABh0AGqHiOBxjt43FdTcGkT5KhWedZGE6QBDBXz7ZPjf\nlSCVGyi/TIVSGU296AEh8edTzhA6J+L4/ypF1fXRdAPV++YfJN08o+NQCwQ5/tdiaDocnhKIA51Z\ngiTCVVWK1t0nMy5cINq6LEix73bJJqN+2XS468rRf7F33Gu07jmF7T9+Fpd+4hoobju4znH02T3Y\n88DLSa9j5GQxS6aKcY41zx7B2UNucCM6MrH9SQemfMwPR1PiCF4u52j+bD86/2ZH4LgE0cVReVUY\n/UclRMuHR9uOgavRa5dcokJ0mF/XCANn/p8baq8AHmGAwNG73YbGDwTgmqtBrjDARA6eMMfDAB3o\n+psdop2jfBXtsuUbJN08I9QXwIE/bcfCO1YPRZVaSIWoSBBGlZWJNgnLP7IBf/vX32V8XZ6ZtRDN\n/hioOkqneJKSLgCc3noIp7cdgs3tQCQQjtk8mwgj87mpdJ5598q4eGj4+Bs+sM91/jdOzPyKb8wj\ncBQPR8P7YqtCOl6wDUW+pghA4ISEkkXmkX33qzao3cJw+sBg4EZ042zm13xgIlBzcwhtTzjGrOfl\najTdQNLNP0i6ecj+372GzqOtmHfLSihuOzoOn8fsG5ZBcca2DzPGULOwCYIsZvxAyK53L6B6fmOc\neAVZRN/ZrtQuxpHTEraRG2a9O2VTeRlhhnCrCPuU1F5XuYxD60n8cx4BLj7qgCAH4ZobL17ffpN8\nLQBDZYi0CbDVGyhbqUIqN3D+QddABYW5fDUf5RbyEapeyFNa3zqJl775Jzz/hd9i/+9fhaSYF29y\nztG4cmbG13P4yV3QIxq4MZxv1kIqWnYez2nXWqrEVSgYCcTEot1jqeLZEI6tTDC5sBEScP4hJyJd\n8Y8tKAnuywE24m+ua5aO5k/3g9kGfmj2SBRS5SUk3TzGXuHCyn+8Bjf+5MPQEkSyhqZDTjCjwEoC\nnT48/4WHcHHfaeiajrAviMNP7sJr33/S8scaeQqwVZtod5buMS0JK10RMZUkEznsjal/enDP01Bz\ncxCCjUenh2Hwv1i4AfTtVOK+X77GZD2MQ640hjbTBrHVG2j8oD9hCZ+thjrb8hH6W5in2MqcuOW/\nPgrFbR/6SM85j5/RwBja9p3Jypp6z3Rg81f+kJXHMiPRJpppPpdzMAO4o2LsWRNlK1T49ssInpHA\nIwPRIQMa7gmkPdu2fLWKspUq1B4BvgMSOjfbAD2+xEvtiY95SpepCJ4S4d0zMPIRDGAc4MC5XzpR\nsTYS00DhnK5DKuXQ+oAY+0oclVfTCcGZ5jHvctPvf3WM+5B08wRBFlHeXI2wNwB/uxcLblsFxWWP\nyaEyxobEyw0OLazi8BO7sj5YXLLLqFnYBC2souNQS0zKYSKMjHDTxuCoebUbVTv7IKoGTla4UfN3\nIbjnmW9cMRGY8tEAAsfFaBWCm6N0qQqpZGLPiUmAUm2gZLGKrhftcbEukzlcs+PXxBhQd0cInqvC\n8B+X0LXZDiMYzedG2kQEz0jwXBVC1dXRPzpMABo/5Me5X7iim4A6AB0QJI7enQqkUp5UXlrzMXj3\nytB8DM4ZOlxztTE3EScbieSaDiTdPGDWtYtx6Sc2gfPozN2u4xch2+WhmQsj0cMquk+2w9/pxfG/\nvo0Lb5/O6lpnXL0Iqz9z/cC0NAY9rGLLN/6I7hPJz0MwwxLhAqjb0oXKvV4IajRKVLtFtD7sxJSP\n+uGcbi4fxgDXbB2u2dZ/HFcqOUqXqfC+PbxhxyQOudxAydLElQVKJUf/QQYjxGI21niEofslOyou\nVyE6oyq3NxiY9VUful9T0Pk3O8ABIyTAf5ghcFxC44f8cM1K/NwCJ0W0/NoFGNHmjt43OOwNOqZ8\n3A9hEhrCSsGaMQlf0vyiZlETVn3q2phDIKvmNEALRmAYRnzbLGN47T+eQv+F5Eq0rKRsahVWfyb2\nGCC4bNj03bvx6D3/CUNLfefJKtkCwLWeQ6jc44WgxcaV0fIpG5wfz94A9pHU3h6EY7qG3h0KjDBD\nyWIVnvXjd6D5j0imlRVM5AidE2OqH5gE9B+QR6UxhmuDp3+h3/QxuAG0/s4ZrQke/F6EIXReRO8b\nCjxXFP88h0xLdjQk3Ryz4LZVEJXYd58oixAke9z+i65q6Dp2MaPCnbp2LhbffQWcVSXofLcVex98\nBT0n2wEAs69bEtcgAQBMFFC/fAbO7zye0mOlItxk8rlyv56wRDbSkbvDx5gQzR2XrUitZlYq4wP5\n3FGtxpxBdMf/gQu1mj/HSLsA3yEJooPD0azHpA3CFwUYkfgXjasM3j1yUUo325IdDUk3xzirSkxn\n6TLGwMHBOYce1iDIItRABKe3HcpYXe6cG5dhxcevhjwQdTesmIHaRVPx13/+DXpPd8BW6oQgmbyx\nGYOSQgVFsrJNtXJBLRETbOTn14xargG+d2T4DkZzyBWXRWCrj5do+ZoIfO/IsdPMWDQ1YWuIv73o\n4ND7zV+BC39wgDEGpnA0fdQ/9Hhj5W2L6ZDMXIt2JCTdHHP+rZMon1YDySR/yxiDrurRI34YYC9z\nYvmHN2DG1ZfghX95yFLxMlHA8g9vGBIugGhqwyZh2QevxMvffgzn3jyGqWvnxp3xJkgiLu7PTgXF\nWHBZQMfqclS90QtRHf6YwGSgalPmW6WTwdCAc/e5EG4Tox/pGYf3LQW1twXjImFHk47a24Joe9IB\nhmgqQKnS0fihgOlMBc/6MDo320elJKK5bR5h0Q9OYeDcL13RbjsRUGoNiC4DWiR2sDpTOMpX5XeU\nm08iTQWSbo458uSu6Mf2MvMocnDWwiCyQ0H51CrM2LAQx/+237J1OD1u09SBIAiomtMAADi3/Si6\nblmBytn1kB3KUAXFocffRLDLPGc4mnRyuCNTC+PRvq4CmlNE8xvt0P0Mtnod1TeHYJ+SH2eO9e2W\nEb4oDouRR/OubX+JzmQQRpXulq1QUbJYRfiCCNHBxzwpuGJdBFo/Q+92G5iI6AyIuCKM6OMFTolw\nzdLBGNB4bwDnfu4CN6LVD4wBrrkqSlNMh4xHoUoyVTZfnIevLkz8c5Jujgl7g3j607/Cyo9txIyN\ni8BGbZyZ1ebKDgVTr5hnqXRD3gASjaTqH+g44wbH5q/8AdOvWoBpVy6AGozg2HN7cTHJOuFUhJso\ntTDukBvG0L2yDBs3nkj6sbJJ/37ztmMmAMGzommVgSADjqnjf6phAlBzUxhV14Sh9ghof8aGwLH4\nBgzOASM4vAZ7g4GZX/Wh/6AMvZ/BMV2DvTEq98kiykRsvjjP8muSdPOAcF8Ar//wGbiqy1A1r2Ho\nBF9d06NDbkbJ0DCMpI5STwU9rOH45v2Ydc0lMZUUWiiC/b9/behrrhs4ueUATm45kNL1raxSGCTf\nT/01Q7ANdqiN3hxDUvN0k3uM6LB0rV8wfSxdZXixaiF076hPVjNG/H92S7+zRiYkmiok3Tzixa8/\ngkveuwazro1WCZx59Qia1s6BsyJ2s82IaDj6bPKnHCTLrvs2g+sGZl+/FEB0tsLuX27B+V3ZjRpH\nRrmppBYKgfLVKvzvjtocA4fo4NGjfCziz51LsaDtlGmfP1cYdFdx7JLlg0RThaSbRxiqjn0Pv4p9\nD7869L2jz+7Bpu/cDcmhAODRI9d/sxUdh1osf3yuG9h132bs/tVLsLntCPUFLOk2syKtUCy45mjw\nrA+je6sNEKMxKJM5pnzEn1YHWKKP/4KRWOA84cnTuafQJHq6pTrl+5B085y+s5147N6fomZhExSX\nDe0HWzI+tNxQdQR7/JZcy0rhWjK0PA+oujaM8ssjCJyM1s46Z2pJl2clm2M17CIilQrsHbGvGRcA\n7+zcHGJaCEJNR6KpQtItBDjQfuBcrleRMhMVbrGlFkYilXCULkm+OiCdDa2Wm6sx/XetYDqHoAOG\nxKDbBbRd5Un5WsmQz1LNhkyThaRL5BSr0gmZPogyF0y0ciDYYMe7n5wKz14vbF0RBKbY0bO4FIZt\nYpNs8kmu+STTZCHpEhkhmSh3IsItxMqFZLGyTEsrkdC+Pv3INteCzSep2s7Gl9+lA0m3CCidUoma\nBVMQ7OlH655Tlp07li5WCHd0aqFY8rljkQ81sdmWbD5I1SqZJgtJt5BhwNrP34zm9fPBDT7UIfbC\nlx6G73x3Vpdi5TyFZIR7uqU6JtrdfHFeTIphLIGZnSCRLvkgymTIRcSaa6FmW6bJQtItYGZefQmm\nXjFvqJkCACSHjA1fvxNPffLnGXvcdBod0pEtMHaEO554E1EoohyLyShRIH9Fmgok3QJmzk3L44fP\nCALctWUoaaiAr3WMY2lTYKLdZKmmEgZJJqWQrnjzgVznS4Hci7SQJFpyxpoTUki6BYzZyRIAwA0j\n4c+SwaqW3XSj21TJtnjzQZZmkECTxyqBpgNJt4A59cpBlDZ4Yk9yAKCFNfSd7Uz6OlbPRbBCtqlu\nnI0n3nwVZSJIoMmRS3mmC0m3gDn61G5MX78AJY0VkB026BENhm7gte8/mVT7biaG0KSbShhkIlUK\nZuLNB3Ip0HyWZyEK0wpIugWMFlbx3OcfxNS181C/dBr8HV4c/9s+BDp9Ce+TC9EOkknhDjJavJmA\nJDp5hWlG2YnUjron6RY4hmbg9NZDOL310Ji3K3bZjiRd8eZCprmS6GSSZqpSzDQk3SImE6IF8le2\nIxkt3mwLNZsyLSSB5psAcwFJtwjJtWyB3Ap3kEyINlsyzbVISY6Zg6Sbp2RKnMmS6lyEZEq/xhOt\nWUqgmD7yZ1OkJM38haSbQXItzmRIZ+hMKrW1Y4k210NrMiFXK8TqKS9BdVU5evr60d4xdoMLybXw\nIOmmSCGIdCQTHZ2YavNCOtHseLe3Itq1WrCZiFoFQcAt11+O5qZaGEb0QNLOrj48/vQ22A8nd9oy\nkTzK4dzMqCbpDlBoMh3EyuNtrBbsILmKaK0QbTZTAqtXzse0xlpI0vDbsqaqHDesWIqXD782xj2L\nj1wJMRsUtXQLVaSJsEqw6bTeprrxZaVo04l20xVutiRrlhZYeu8MSKPatyVJxLTFUyGIAowcj+xM\nh2KWZ7oUpHSLTaZmFJJggdznZ9Mhk4JNJ9cqyuYHpTHGoqdBW3dY8IQgkU6MvJHuZBBpIqwQbLqD\nY3IZwQKJj9kZ3cKbSrRrFuVaKdhMbV61HGmNRrVC7HE63a090NXsGJeEmnkyJt3JLNHxKATJpitX\nq6Z7bao7ktbshJHCnahos10Z8OZTu1E/qw6yIkFSJGiqDkPX8eqf3pjwtUmm+cOY0iVxWkcuRJtJ\nwSYtV87hPBeC3K/DP8UOrTT5v/Opineiws11+ZW/N4DHvvck5lw2C7XTqtFzoReHd7yLQF/Q9PYk\n0sIkb9ILxUi+lWsNkopkJxK5yr0qZjzcCjEY/WjMDKB7WQkubKoCWOqR+lgphlQ3znIt2ETC5ACO\n7j6BoyNvm5UVEdmCpGsh2Y5mrZZsuoJNdObYqV+5EfEKAB9+ThVv+xCY4kDfAnfSaxov2h0tXLMo\n1wrJUmRJWAFJNw1yVRubyqZXrkQ7SKRTgNoVK1wAEFWOyl19SUt3PMYSbrqiJbkSmYSkOworhTqS\nfKqNnehmVzKn6RphAIL5z4RI4dWbEoRVTCrpZkqoI5nImV/pTt5KRrZWVBWkcnS5rc4AEzg4Yl8P\nJnH0zbcmyiWIQqQopZvvch0k000IVpVvpSLbQZgI1N8VROvvneA6AIOByRxyuYGuS8ssWRdBFCIF\nLd1syBWYuGAzGcGasanuCJjGwYIGDLuQVqXAIOkIdxD3Ag3T/qkfvTsUqL0MrnkaSpep2O1nKHnX\nD6VPQ7DehkCjbUJrJIhCoiCkWyhyBbIv2EEGo1qmGmh4phPlB/oBzqGWSmi9oRr9M5wpXW8ish2J\nUm2g5tbQ0NdqD8Pcn52BGDLADA7OGIINNpx+Xz24lCAJnCTZmJtQt3QaFt5xGZxVJWh96yQO/vlN\nhHr9GX9conjIS+kWc3oAsE6wZjQ92Y6S434IA12jtl4NzY9exIkPNiJUZ4Pcq8KzxwulT0P/dAd6\nF7rB5WHZJSPbwCkRPa8q0PoEOOdq8KyNQHQlJ7wLjzgh+XQIQzfncJ4Po3p7L9rXe5K6RiYZq3Jh\n9o3LcOnHr4Zkj1ZMlDZ4MOOaS/D0//glQj0kXiI58kK6hSDZXAg21Zys5NVQcjwwJNxBmM5RvaMH\n3UtLMe3Ri4DOIRhAyTE/qt7oxYkPNeL2mn1JPUbvLhntTzrAVQBgCF8U4d2loPlz/ZDGEa8eBPxn\npRHCjSJoHBX7fKbSzZdj1AVZxMqPDQsXAERFgsKAhXesxu5fbsnh6ohCIifSJcnGYtWGl+LVwCXE\nTaNiHKjt6UPl0z7o6nBUK6ocUl8E694+Alw7/vUNDWh/2gGuDr+2XGPQ/EDPNgXVN4xTF2swIMGv\nhen5fbhi+dQqcCN+jaIsoXHlDJIukTRZkW4h5GSzNc7QKsEOMjIdoE9jOKGXxJVpQeBQagxEuuJH\nB3KNwbdfRvW14zcSRNoEMERbVWPQGfoPy+NKV3RxhCsV2NsjMSs0RFjWLJEpQn0BCAlyzoEuX8zX\ndUuaseD2y+DwuHF+1wkcfmInwl7z+QnE5MNy6WZLsED2o9lUJZuJki3OgeApEWqPAHujDpQO3050\ncpStDqPvTdtwNMo4BBkov74dX90AABYRSURBVDyM/oOy6fUFJbkoU3TyaPmX2c/cyTU8tNxagxkP\ntQI6h6hx6DKDViKhbV1FUvfPFYFOHzoOn0fNwikQ5eG3jR7RcO6NY0Nfz7lpOVZ+bONQGqJ8ahVm\nXbsYT3/6Vwj3BVDWVIlp6xdAUEScff0out69MObj1ixqwqK7Loe7thxtB87iwB93wN/eN+56BVnE\norsux+xrl0CQRZx57Qj2PfQqwj6Sf65JWrrZlKkZudj4yvV4w9GbWpqP4ez9Lmh9AgAOcAbnbA2N\n7w+ADQSxNTeHoVRydG9TYAQFOKZrqL4xBFuNAaVGR/iCGNOay2SOisuTe13kCg5bo47QOTGaKhhx\nDc/65K4RqrXh6KenovwdH5QeFYEpdnjnusGl/C8Z2/qdx3Hl125H9bxGCKIANvDfpZ/YhIW3X4bn\nv/QQVnx0Y1ze11biwILbViHY3Y/lH9kQva/AMP/WlTj2/D7sun+z6eNNu3I+1nzuJoiKDCYwlDRU\nYPr6BXjmsw+g/0LvmGvd+O27ULNgCiRb9A/t7OuXonHFTDz5yZ/DyNJsXsKcMaWbC9FaIVegMDa+\nEpGoguDCI47oPAODYTA5GjgmoXubgsoN0efLGFCxJoKKNfHPv/EDAZz9uQt6QIiGzAZDyRIVpSvU\npNfWeG8A5x90InxRBBMAbgCVm0Jwz9OSvobuENG1qjzp22cTJgpYcs86zL1pOSSHgvaD57Dr/s3o\nPd2BSH8Im//t91j5j9dg3q0roic6iNHfg6u2DDf/18fAjfiIX1QkNK2eDXdd+ZAEAUCyK5h9/RKc\neuUgOo+2xq5DYFj1qetiBS6JYA6Gpe9fj9d+8FTC51A5px7V8xpjHkuUJdjLnZi2fj5ObjmQ9utD\nTJycbKRZJdZB0hUskNnyrXSISSXoQP8hCf1HZAg2A4GTUkyECQBcZeh9c1i6YyF7OGZ8qR+BkyI0\nrwBHsw6lMrU5CJKbo/kzfkQ6BGj9DLZ6HaI9pUvkNVd88RY0rZ49JLu6xc24/of34ulP/gL+Di8A\nYPrGRRDE2Pw4Yww2tx2GZh5Fcs6jf+hGISoSpl4xN066zqqSGGkOIogC6pY0j/kcKmfVmfaayE4b\nquc1knRzTEala7VcgYkJdhArjpzJpGyBaKXAuZ+7EL4ggkcYIHAggR9HVhOMBxMA1ywdEz1wS6k2\noEz8ZPS8wlXuRNOauTGHQzKBQZRFzL9tFd76+YsAANlunhtnjCHsC8JW4ow578zQdOhhzbTrjhsc\nhhb/i430h6LnopkQHKcmuL+tD4ZJpYUWUuG70DPmfYnMY5l081WwQH5KdpBEqYS+3TLCreKwUI0E\nr6/A4V6YfHpgED0I+PbL0P0CnDM02Jv1guvEteKI9ZGU15TBiGjAqBN5RVlC3ZJm2CtcCPX40X2i\nDdXzG8FGvWDc4Dj+wj7ULW6GZ1Zd9L6KBDCGytn1puVyhmbg9CuH4r6vBiI4u/0omi6fExPxqqEI\nDj66Y8zncWHvKYS9QUiKBEESh9Zm6DpOvPhOUq8FkTlSlm4m5ApMDsGOZLzOL9/bcoIIlgMiAJ2B\nKRyik6MqiXKvkQRPizj3gAvgHFxlYLINzlkaGj8QAJtYJ25B09fhhWByIi/nHGVTKnH7g5/Gma2H\nsP3Hz+Lv7vvHOInqqoZ3n9uLt3+7DdXzG7Hpe/fE5H0Hr6VHtGjdncCw96Gt6D1j/m92x4+fgyCL\naLx0FgxVhyAyvPOH13F62+Exnwc3OJ7/4m9xxRdvRc3C6JFbfec68foPn6bStTxgTOnms2CtOpU2\nG4IFUp9lIJh/ggVTgNIlERgqg2O6jrJlEQgp7HdyAzj/kBM8PLwZxyPRDTnvHhllK1OPmpPlMe/y\njF3bCvp7/GjdfRINK2bERJeMsWjECqB53Xz0d3jxwr8+jI3ffA8kuwzOObSwhu0/fBqemXUon1YN\nd105uG6SD+Icbe+cxbk3jqHlzWMIdPribzOAFlax9d8fh73CBafHDW9LN7Rwcr+fYFc/Nn/595Cd\nNgiiQKVieUTGN9ImWwQLWDMspnx1BIFTUjSfOwSH6OCovT2UdkQabhVgmETQXGXo26VkVLqFwLbv\nPYEVH9mI2dcvgWiT41IIkl3GvFtWYN9D2/DIXT+CZ0YtBEmEu74c6/7172Ho0Vz54Mf60XCDo+vY\nRbz7bPL/RkI9/rRnO6iB3J4FR8RjuXTzJYotJMGa4ZqvoWxVGH1v2KKbaIyBiRyNH/JPMAXAzLvK\nCACAoerYdf9m7Lp/Mz7w7JdN87Cyc+CjBQe6T7TBXVeOa//vPQPR8XCEzE2qFQzdwKlXDmZo9UQh\nMGHp5oNkMy3YTIl1LBgDam8Jw7M2gsBJCaKTwzVHA5vgb8zWoA/IIL5VuGylNZ9KCpnSKZVY8v51\nqJ7XADUUgeKMz92MLu+asWGhaaWBHtHAGIuWkTEGQRSw59cvo+9sZ8bWT+Q/Kb+Fcy3ZTAo2F3Id\nD9nDUeYZ/sivB4HeHTb0H5IgujkqrggPlIAlR6RDANdNwjcOOGcl3+BQjJTXluGm77wXoiJBEKMf\nJzjn4AaHIArR0i9Vx86fvRBzP8Vtj6vbHWTvb7Yi5A1AlES07DqOYFd/xp8Hkd+MK918yclONtma\noYeAM//phuYVwLVokiBwXELVdSF41pn/nsIXBES6BdjqdCiVHL79smlugUlA/xE56ZbgYmTljUsh\n2iQIwnD+hjEGXdfRfeIiuo+34eBjb8TVurbsPI7ZNyyD7IgvYTu74+i4LbsAINokNK+dB3ddOXpO\ntqFl53HTqWZE4TOmdHPZ6QUUZ9pgEM3H0LNdQeisCFudgfK1YSiesd9kvTuUEcIFAAauAp0v2FG+\nKraKQQ8CLb92Idw60K6rA+4FKuQqw1S6XB+IgvnkPTmndlpNjHCH4Bwvf+vRhE0JF/edwfm3TqBx\n5UzIDgXc4NDCKo4+szsp4brrynHDj+6FZJMh2RVooQj8HV789Z9/SxthRYglG2nFXr5lNZEuAWd+\n6gJXGbjGEDjJ0btTQdPH/XBMTZwq8B+WRwh3GCYCoRYRzpnD9237iwOhFhHQ2ZBj+w/LKFsZARMB\nPjqTYAC9OxWEWkQ0fdyfsGQtl5xuyWwLXMAbgKPEvKc54h9bftv+z1/QtHoOpm9YCD2i4cTm/bi4\n70xSj7vmn2+CrdQ5lNKQnTaU1Fdg6b3rses+82E4AFA+rRpzb1oOh8eNljeP4eTLB2mYTQGQlnSt\nkiyQPdEOkmvhAkD7M3YYITY87ctg4BGg7XEHpn1uOOdnhKNdY5EuAfYp+sD4xPhNMCMcrb8dhGtA\n/wEZGJW75SqD74AMz1VhdG+1DZz+gIHrMUAFwudFdL1kQ/V1uY+wsn1qxNsvHsD6910OeUSNrhZS\ncfKlA9GGhhHYSh2wlTjgu9gbrcflwLkd7+LcjndTekzJJqNm/pQh4Q4iKhKmX7UwoXSnXbUAa/7p\nJgiyAEEUUb9sOubdein++oXfRFuOibwlKelaKVkgu6LNB8mOJnBcihmvOEi4TYARAQQl+lH/7M9c\nMDQGHhnsPjPAZIyQ5QA8evbY9C/6IDqiqQKTaqXoTSMMVZvCcM3RcPa/XXHr4BqDd49iuXSz0Rjh\na2Yxh1P2zbSh7ETyz+PUvjNwlTmx4rrFAOcQJBGntx3Czvv+NnQb2angin+5FQ3LZ8DQDRiajl33\nv4iTW7LXXisqEi7/7I2QRsyAkB0KShsrMPu6pTjy1FtZWwuROmNKt9DSBoPko2hHItg49IhJmoBh\naC7uhT86oAeHo2EeYdB0AbZ6DeEWCbHRLoMRBvreUuBZF83tKtUGIm2jdtQZh2tu1NhylQEIMJ97\nk9rgsaLiwLbDOP7AFrhryhDs9UMdlVZY/2+3oW5JM0RFwuCre9lnroO/vQ9t75xN+fG0sIr2gy2o\nWdQUE+3qEQ0nXzKfBlY5u950hKRkVzDtygUk3TwnIx1pJNmxKV8dQfcrtpjZCkziKFmsgonRTbDQ\neTE+GtYZIh0imMJHdapFUwfBs8OSrbsziHO/cEVPetAZmMTBFI7qG6LHoUsuDluNgfAFASMFzkSO\nkqWTt4IBiDZIeM93x33fUelG7YBwRyIpMhbeuTot6QLA6z96JrqR5lAg2WToYRW+i73Y9/A209ur\nwTCY2YYfotPJiPxmQtLNtlwHKTTJjqZyQxiRNgH9h2QwiYPrDPapOmr/fqA/fozqgWglgkmULEUl\nOohjqo7pn/ehZ7uCcJsIR7OO8tURSO7hj9/1dwdw9r9d4FpU2kzhkD0GKq/OfT43H3FUuGGo8VPI\nmMDgqilNcK/x8bf34fEP/wxT18yFu7YMPafa0br7pGnJWEl9BURFQrDHH1fepgYjOPrM7rTXQWSH\npKWbK8EOUuiiHQkTgYZ7glC7QwhfFCFXGrDVDgtTtEelGTwz6mgdiaN0ZQTBExLCbeKIjTIOJkYj\n6JHIHo6amxML1FZrYOaXffC+LUPtFmCfqsM9TxtKcRCxeM91xW14AdHpYslWKiTCUHWc3ho/4nEQ\ne4ULG775HlRMq4ah6WCiANUfBhOF6OA5WcThv+zE+V0nJrQOIvOMKV1KE2QW2cMhe8x3muvfG8CZ\nn7lhRAbGL0qAUqOj6uowjLURtD9jR//haKODrd5A3Z0BSCWpF9MLNqD8ssk95CZZtLCKtx9+FUvu\nWTfUCGFoOrSgigPjzLidKBu/9Z6h4ToYqK7QEMGeB19BoNOH9kMtaQ/FIbJLTo7rmWxyTQfZwzHj\n33zwH5KiHWW1OnyHZJz491JwA5A9Bho/6Idzug7B2lnexBgc+vOb8LX2YOGdq+HwuHFhzym888jr\nGW3vLW30oHxqddzkMlGRUbd4Gl75349l7LEJ68mqdEm2qSFIQMniaCR8/iEH/EeGmyPUThGtD7kw\n9dP9sNfnb7kB16MlcmVdPvinOqCV5uTvfEpE5jdBOXwu4c/TqcedCPZy58DIyNiOFSYwOCvdWVsH\nYQ1ZeQeQbCeG5mUxwh2Ea0D3KzY03J2fA6rDFwWc+4ULhsrQyDvAdKBzVRnaNlZmbQ2p1urmI90n\n2kwH6mhhFS07j+dgRcREyKh0SbbWoHYL0SqH0S3AnCHSnp/n63ADaHnABb0/2u0mDjQjV77Vh8BU\nO3yzXLldYAEx56blYAID53xoqLoWVhHuC1BNbgFiuXRJtNajVBumMxcgcNib8rPXPnRejDZ3jKp/\nE1UOzx6vJdINT41YfjhlvjH9qgVY8v51MbXBhmGg53QHtnztEarLLUAskS6JNrOILo6yVRH07VJG\nNFRwCDLguSo/PzpzNdphZ1ZPIYTzNwedb1zyvrWQ7bF/WARBQMW06qTPSyPyi7SlS6KdGJEuBt0n\nwFavJ3WwZM0tIcgeAz2v2qAHGRzTNdTcGBp3HGSusDfppvMfdJmhbwFt/iSLw2P+WjHGIDttCPcF\nsrwiYqIkLV2SrDXoAYbzv3UidE4EEzm4wVB5TQiVV43dessEwLMuknBYuZVofgbf2zI0L4Nzhg7n\nbC3lc9kEGai7I4iLjzmircgGgy4zhKsV9Cwpyci6E1HIm2mdR1rRsGJG3HFAkf4Qwl4SbiEypnRJ\ntNbT+jtHdEaCzobytF0v2mGrMeBekPuRfIFTIloecAFGdOJYz3YOe6OOpo/5Uz6frXSpClu9jr6d\nCk51V8I7xwnvPDe4OP6U9GyPdcxX9vz6ZdQsaoo5QkgLRbDz/s10umiBkv9Fk0WE5mUInpZM59x2\nb7PlRLrcAPoPS/DulsEZEDwRe+w7jzCEWkT0vKHAc0XqUbat1kDNLSFs89ZauexJQ8+pdvz187/B\n4nuuQNXcBvgu9OKdR17HxbdP53ppRJqQdLOI7meACMDErZov+2fkcB4dIdl/SB4h2vjwiasM3j1y\nWtIFsjNL12rGa5DIJr1nOrDtu3/J9TIIi8jPIs8iRak2zAeICRzuudmPckNnRfQflEeNiTSXf6o5\nXYIgzKG3UhZhElBzaxBM5hiKKEUO0cnh2ZD9jR7/u1L8KRQmMJmj7NLCnrHbNzOJEhGCyAKUXsgy\nZStVyJUGerbZoPYyuOZoqFgXO+c2Wwh2DiaZHFIJHv2XwaMRrmuOhrJLqSaUIKyApJsDnNN1OKfn\nvtyndImKzhdMTr+VgdqbgzAiDI7pOhx52vVGEIUISXcSI5VyNPxDAK1/cIKNSOU2vD8A15zcl68R\nRDFC0p3kuBdomPUNLwInJDAGOGZqEOhfBUFkDHp7ERBkwD0vfyJbaoywBofHjUs/sQlTLpsFbhg4\n9coh7P7lS1ADhdmdVyxQ9QJRVPiaE9c753sFgyCLEGRrDqgTbRJu/MmHMHXNHEg2GbLDhplXX4Lr\nvn+PJdcn0ociXYJIQLYaJNz15Vjz+ZtQs6AJ4BwX9p7G9h8/i2B3+kcATVu/AIrLHnPEj6hIKKmv\nQN2S5gkfpEmkD0W6BJFDJLuMG3/0QdQsaIIgChAkEfXLpuGGH90bN+QmFTwzaocOzxwJEwWUN1dP\nZMnEBCHpEkQOmbZ+AUSbHHO0uyCJUNwONF46K+3r9p7tgBqMb2jhuoG+lq60r0tMHJIuQeSQ0ike\n04hUVESUNlakfd1TLx+EFlZh6MMD43VVQ6DThwt7T6V9XWLikHSJvGKyVS50n2gzrSYwVB09J9vT\nvq4WUvHXzz2IC3tPwdAN6JqOs9vfxfP/8hCNhMwxtJFGTCrybaD52dePYNm9V0KQJYgDlQt6RIO3\ntQcX9p2e0LX72/qw5et/jM4wItHmDRTpEhmlEMc6ZhNDM/Dc5x/EyS3vDJwGEcSx59/GC1962DpR\nknDzCop0CSLHhL1B7PjJc9jxk+dyvRQiC1CkSxQEp1tyU+YUmd+Uk8cliheSLkEQRBYh6RIEQWQR\nki4x6cj3GQxEcUPSJQiCyCIkXSJvmGyNEcTkhKRLEASRRUi6BEEQWYSkSxQdYw0yTweq1SWshKRL\nTEpSrWAg8RJWQdIlMgbNXSCIeEi6BJEkFO0SVkDSJQiCyCIkXYJIAYp2iYlC0iUIgsgiJF0iLyik\nbjSKdomJQNIlJi00+IbIBSRdgkgDinaJdCHpEgRBZBGSLkGkCUW7RDqQdImMQN1oBGEOSZcgJgBF\nu0SqkHQJgiCyCEmXKEqsHu9IEFZB0iUmNVbU6lKKgUgFki6RcwqpG40gJgpJlyAsgKJdIllIugRB\nEFmEpEtYDtXoEkRiSLoEYRGUYiCSgaRLEASRRUi6xKSHRjwS2YSkSxAWQikGYjxIukRBE54ayfUS\nCCIlSLoEQRBZhKRLEBZDKQZiLEi6BEEQWYSkS+QUmrtATDZIuoSl5FM3WirjHa0uG6MUA5EIki5B\nEEQWIekSBEFkEZIuYRn5lFrIByjFQJhB0iUIgsgiJF2CIIgsQtIlCILIIiRdghggE9PGKK9LjIak\nS1gCbaIRRHKQdAmCILIISZcoalLpSiOIbEDSJYgMQ3ldYiSMc57rNRAEQUwaKNIlCILIIiRdgiCI\nLELSJQiCyCIkXYIgiCxC0iUIgsgiJF2CIIgs8v8BRe9w6XmrfDQAAAAASUVORK5CYII=\n",
            "text/plain": [
              "<Figure size 432x288 with 1 Axes>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "BMxSJo5gtOmQ",
        "colab_type": "text"
      },
      "source": [
        "# VS Fully Connected"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "NKQx7stYswzU",
        "colab_type": "code",
        "outputId": "53794514-0b80-4f67-8596-bfd87bb173c1",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 265
        }
      },
      "source": [
        "fc_model.compile(optimizer=\"adam\", loss=\"mean_squared_error\")\n",
        "fc_model.fit(X, Y, epochs=300, verbose=0)\n",
        "# Plotting code, feel free to ignore.\n",
        "h = 1.0\n",
        "x_min, x_max = X[:, 0].min() - 5, X[:, 0].max() + 5\n",
        "y_min, y_max = X[:, 1].min() - 5, X[:, 1].max() + 5\n",
        "xx, yy = np.meshgrid(np.arange(x_min, x_max, h),\n",
        "                     np.arange(y_min, y_max, h))\n",
        "\n",
        "# here \"model\" is your model's prediction (classification) function\n",
        "Z = fc_model.predict(np.c_[xx.ravel(), yy.ravel()]) \n",
        "\n",
        "# Put the result into a color plot\n",
        "Z = Z.reshape(xx.shape)\n",
        "plt.contourf(xx, yy, Z)\n",
        "plt.axis('off')\n",
        "\n",
        "# Plot also the training points\n",
        "plt.scatter(X[:, 0], X[:, 1], c=Y, cmap=plt.cm.Paired)\n"
      ],
      "execution_count": 31,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "<matplotlib.collections.PathCollection at 0x7f5c4bbec390>"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 31
        },
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAV0AAADnCAYAAAC9roUQAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjMsIGh0\ndHA6Ly9tYXRwbG90bGliLm9yZy+AADFEAAAgAElEQVR4nO3dZ3Sk13kf8P99yzRgCnrv23vlFpHL\nthRJkxItUbKKHduy5RwnclySnJMPzsmHnJyTHMd2jm05dhw7tiRLMiVS7G1ZRC5F7pLcxm3YjrLo\ndXp7y82HWcxiMO8AM5g+eH7fFsDMvAAWfzx43ufeyzjnIIQQUhhCsS+AEELWEgpdQggpIApdQggp\nIApdQggpIApdQggpIGm5dz784H+n0QYCU//tYl8CACC6uSPvr+HpM2f08b4ultHHRzqjGX38Yt3t\n06t+bK480nyl2JdQFv546ysp/2NQpUtWVIiwKwWZBm6mKHAJsEKlS8iC6OaOola8+Q7+1QRuJlVu\nOQcuhW1uUaVL0rZWKl5yFwVu7lGlSzJSjIqXqtzCo7DNH6p0ScbWesVLgUuyQZUuWZVCVbylWOUW\nQjECl8K2MKjSJau2FiveQlS5FLiVjSpdkpV8VrylVuUWqq1QSBS2hUeVLsnaWqx486WQVS4FbnFQ\npUtyYiF4c1X1rsUqt1CBS2FbXFTpkpxa61UvBS5ZCVW6JOcyCd5S2ddhOZnur5ApCty1hUKXFFUx\nKuNMWgv5bisUInApbEsLtRcIyQEKXJIuCl1CUki3yqXAJZmg0CVrSrqthXz2cSlw1zbq6RKShUyr\n3HwHLoVt6aNKl5Al8tVWoMAlAIUuWUPSaS1Q4JJ8o/YCIXmWz8ClsC0/VOkSckc+qlwKXLIUVbpk\nTcjVvrmlsHsYhW15o0qXEKRX5ZZCH5cCt/xR6BKSBxS4JBUKXVLxctFaKHYflwK3clDokjVvpdZC\nsfu4FLiVhUKXkGUUu49LgVt5aHqBrGm53GMhl4FLYVu5qNIlFS2bfm6x+rgUuJWNQpesWfk+EWI1\nKHArH4UuIQaKUeVS4K4NFLqkYuVqFdpyKHBJpih0yZq0XGsh3SqXApesBk0vkIqU7yo3F4FLYbs2\nUaVLKs5KgZttlUuBS7JBlS4hGcg2cClsCVW6pKLks8qlwCW5QKFLKkY++7gUuCRXKHRJRcj2/LPl\nqlwKXJJLFLqk7FGFS8oJhS5ZE3Ixl5spClxihEKXlLVsq9x8thUIMUIjY6RspRu4xdjYJp9VrnU0\njKb35mCdiiDqkjF5pBb+PlveXo/kFoUuWbPyUeXmu6VgGwmj50djYAoHAyAFI+h6dgIjTzbAs9We\n19cmuUHtBVKW8lnllmrgAkDzOzMQ7gTuAkHlaHl7FuA8769PskehS1ZNMkmoaXbBbDMV+1IMFfLm\nWaFumlkmja9bCmoQInpBroFkh9oLa0xNsws7j25DbUsN5sbmcO7ti3BPejJ+nl2PbMeuh7dB13UI\nooiBc4P44CcnoWv5/8EvpZtnhZ5QUO0SxDkl6e26yKCbqIYqBxS6a0hjdwMe/92jEEUBgijA1ehA\n17YOvPa3b2N6aCbt51m/vxc7H9oKyXT3v0/Pzi4oURUfPfdJPi49Y4W4eVaMkbCpe11ofX0GonK3\nlaDLDDP3OAGh9E7CIMnoV+MacvhL+yGbJAhi7NsuiAJks4xDv7w/o+fZ+fA2yGY54W2SScKGe/ri\nz50vpbDy7JHmK0WbwXVvd2DygVpoZgG6xKDLDLN7nZg6UluU6yGZo0p3DalrN/7BrO/I7AfWWm0x\nfDtjDLJZQiSYn8UG6ch3H7cUFjzM3uPC7D4npIAGzSqAS1Q7lRMK3TUkGlIMb3pFQ8k9QiYwbDq0\nHpsOrocgCbhxagAXj/dDUzRMDk6jfVMrBCHxhz3sj+Q1cIu93LcUAjdOYFDt9ONbjuhX5Bpy6RdX\noETUhLcpEQWXjvcnfezDv34E9zy5F3VttahpcmH3I9vx5O89CiYwfPrqWWiKFr9pxnUe6+f+rLj9\n3Hy1FYrZTiCVh35VVrDa1hrseHALnI1OTN6awsXj/bBWW7B+fx80VYMoibhxegDn3r6Y8Li6tlq0\nbWyFbL7730MySXA2ONC1tR2DF27jhT9/Dbs/vx0NnfXwzvpw7q0LmBzI37LZbPbJzQaFLck1Ct0K\n1b6pFQ//5v0QJQGCIKC2xYX19/Thxf/1Gk69dhb22mr45vyG7YDG7nowgwwzWWQ09zVh8MJteKa9\neO+HHxbgMwFMVhNammrh9QURCIaT3r9S4NL+CqSUUOgWwdYjm7Dr4e2wVJvhmfbi5IunMNI/ltPX\n+NxXDkBeNNIlSiKYwLD/id1453vHEQnOpXxsyBuCrifP26pRFf75QE6vcyX3PLkHW+7bBE3XIIoi\nBobG8dpbH0PVtLQen21bId/keQVVI2GoNhH+HiuNfa0BFLoFtuvotoSRK1ejE0d/4368+ffvgnOO\nxq4GBD1BDJwfhqakFyxLWarMsNmtSW8XBAEt65pWfPzw5dFYz9akJ9ws45zjxumBVV3Tamw8tB6b\nP7cBkixCgggA6O5sxkNHduPYz08BKN7G5FnjHC1vzKD2vA+cAWCAbhJw69daEa0rzRV+JDfoRloB\nCaKAHQ8Zz7ge/dYDePTbD2Hf47tw+OkD+MZ/eRo1za5VvY4SVVO+L53pAl3T8cp3j8E96YUaVaFE\nFPjn/Xj9/7yDsD/5z/t82fHAlqSvlSxL2LyhC+IK88DZBm6+q1xnfwA1570QVA5R4RCjHJJfQ9dP\nJ2gPhQpHlW4BmW1mCCn+fDRZZLA7jVSTKIDrHA/9+n147k9eNvx4Z4MDlmoL5sbmkiYSNEXDrc+G\n0LOzE5J891usRBRceO9yWtfqmfbiZ//zZVTXVEGQBHinfWk9LpcsVcY3zxgDZEmCuz151G0lRa9w\nAcheFW2vTkFc8ruR3XmfeVZBpJ6q3UpFoVtA4UDYsFcKIB648X8LDPbaalTXVCX0Ua3VFnz+2w+i\nptkFXdMhiAJOvX4OF99PHPv68NmPYbaa0LqhGbqqQ5AE9J+4jisnrmd0zYXu4S42fnMSnVvbk+aB\n/YEwwpEoAONfYKmq3ExWnKWNc8heFZpFhG5O4w9HnaP3+6MQoimqWQYIUdq4ppJR6BYQ1zk+e/si\ndj2yPeHPZs55UuimcvS3HkBtay3ERauQ9j62E/MTboxeHY+/TY2qOPYPP0eVy4ZqVxXcU56irhRb\nUN9ei/1P7kF9Rx2CniDOHruAW+cGDT/201fPomVDCyQJEEUBuq5D03S8/f7plL3cbAM3E45+P1rf\nmIEY1QHO4V1fhdEnG5cN36qhEMSQluLXBcAFhlBz/haBkOKj0C2wz969hGhEwa6j22GttsA95cH0\n8Az6dvckbCDDdY6QP4wqlw0hfxiaosFeW4261pqEwAUA2Sxj+/1bEkJ3QcAdRMAdzPvnlY66tho8\n8Z1HIcmxSQqz1YT7vnYQVrsFlz5Iri49U158/5k3sX/3JrS21GPe7cMnp/sxNeMGDEI3F4GbbpVr\nHQ2j46UpCOrditVxPQDh+UkMfb0l5eNMXhVIUeRyBox8sTHtCQYxoEIKaIjWyOAy3Z4pFxS6RdD/\n4TX0f3gt/m9RFuFqdKKmpQayWYKmxBYuWKrNePTbD4EJDL949mN4prwpt0602o33Qyglex/bBVEW\nwBaFimyWseexnbj84VVwPTGNPH1mwBfEO8fPJLw9k4UQ+QhcAGg4MQ+mJl6voAHVQyFIXhWqw/hH\nK9hqATMIXV0AJu+vhW991YqvLUR1tL80BfuNILgIgMceO3tgdTdeSWHRr8cSoCkaXv7um3jne+/j\n7LHzUBUVHByySYbJaoJslnHvVw/GPtigDaEqKoYvjxTuglc5SlrfUZfUnwUAQWCwOZJH3DJhVOXm\n86aZaV41/DJwkUH2LTM90mCCd50Nmnz30boIKE4Js/udab1228uxwBW02NSDqHA0vzuLhuNzNPlQ\nBqjSLRUcGL06DjWqYdv9W5LCSRQFbDywDh+/fBoHn9oH6c5iB1XREA5Ekm6k5UNdWw0OP30AjZ31\nUFUN1z6+gU9ePg1NTe/Gj2/WZxiujDGEA5H0niODtkImMh0RC3RaYJ6JQljyqTONg0sM7S9Oomo4\nBF1mUOwywo0mzO+0I9Joxu0vNaHuUw9qz8ZGxjybqjD1uZq0WgRCSIPjeixwE96uA02/mIfjRhC3\nfq0F3CRm9PmQwqHQLTGWKrNhtSKIAto3tWL9/l4IggBN1eB3B3DzzCAu/+JK3m+SVddU4cnvPArZ\nErsBKJskbDywDtW1VXjrH95L6znOHDuPo996IGGlnBJRcfXk9aSFINnuKJZpW8EyEUHVUAiaTYR3\nY9WKpzBMH6qB66IfLKLH2wWazDC3047eH4xCiNw9x8wyq6J6MIS6M16MPVqP+V0OzB5wpWwHMFWH\n85If9lshRJ0i5nc7Ea2Jfd2lkA4uADBYN8M4YB2PYMufD2LygTrMHHAa/mVEiotCt8RMDk5DMNgf\nVdd02BxWiFKsghFEAVWuKoxcGSvIVMLWI5uSrksySWhb3xLfx2Elo1fH8cEzJ3DwqX0w20zQdY7+\nj67i01fPpnUN6Va5GQVuUz86np+E43oQ0Dm4yND65gwGvtGCUFvqPrnqkHDjt9vR9P4cqgdDUG0i\nZg65UHUzACHKk1oPDABTOVrfnIFnUxV0i3ElKkR09P3TCGSPClHh0AWg/pQXQ083w99nQ9QlgYsM\nUIzbCAwA04DG43PQLAzzu9JrWZDCodAtMWF/GJ+9cwk7Hry7GkuJqpBkEQJbEnqyiP1P7sZr//ut\nvF9XXVttPPAX0zQdzkZHWqELALfODuLWuUGYrSZEw0rSzTMgv/vmLlhoJ7gu+mG/HoSwEGIaB8DR\n9ewErvx+17KVouKSMfJU4rLqpvfmDG+ULeAMqBoMwbep2vD99Z+4YXKr8akIQQegc3S8NIn+P+gG\nBIbxo3Voe2MmYXJiKVHhaPzATaFbguhGWgk6e+w83vnecQxfHsHkwBT6P7wGJWqw0ThjaO5tzPsR\nOQAwc3sWmpr8N60oCZkfbMljy5EzCdxcVrmL+7c157wJ540tEKI6rOPp9ZkXi6aYWog/r8LR8coU\nqm8aj/E5+wOGYcpUDvNM7PN173Rg8GvN0GSWavoMAJa9oUeKh0K3RI1cGcOxv/85Xv6rN3H22HlI\nkvEPM9c5Oja35f16Ln1wBZqiJQSlGlUxfGkkJ6vWPH3mrAN3JUabkbPl7gGuYhBg+rALupy6OmYA\nxAhH13MTkOcNTvVN8VimI+FGW6Dbhpu/2QbdlDp4dYn6uaWIQreEWe0WHHhqH576o8ehKsZVi65z\nmCyy4ftyKeAO4uW/ehNjNyagqRoiwQguHu/PyZ66uWonpKpylzv5YX67PWF8awEXGUItmV+Xf10V\nxo7WQTMz6EIstw1DUeOoPetNevPsPmdS8HIGRGrk+M20BZFGMwZ/pTm2S5mBMO3fUJKop1uiLFVm\nfPk/PgmT1RTvpRotF2YMGLsxUZBrmp9w4/W/fTunz7maEyHSaitwDkHneLjtWtLHLja/ww5nvx+2\nkTAEJTbuxRkw/OWmVe9tO7/HCfdOB2S3AvtVP5qPz4Mt6cwIOmDyJP8idW+rhu12CDUX/LEpBc7B\nGQPjHN0/GsPsfmfCAopgpxVqtQjZl7i0WBOB6XtrVnX9JL8odEuEKAlwNbsQCUTgnw9g2/2bIVtM\nCTevGGPx4OU6h6qouHj8SsGX+UomCc29jVCjKiYHpsFXMZCfr5tlTOc4ePY69lwagElREXVJGH+0\nHr51KVZ6iQyD32hB1WAoPoXg2VoNtTq7Hw0uMkTrTPButqP5AzeW1ruazODvNVgQwhjGfqkR04dq\nUD0YRNPxeYghHZYZBZYZBbaRMKYPuzB9b23844e+1oKeH46BaRzQOQQNgMRQe84LtVpEqHXl1YqS\nX4Xrog+SX0Ogywpfn402VM8TCt0SsOFAHw4+tR8AhyAImBmZg2yWIMkG0wKKhtnROfjdQVw9eR1j\n1wtT5S5Yt7cXn/vqAeiaDsYANarhzf/7LmZHU59EsVS6gbuaKvfIJ/3YcXUY8p2bfma3is7nJjHw\nzRYEO1KsemMMgR4bAj22tK4rE0qNDPe2ajgv+eM37HSRQXFIcG81nmBYeJxwlUMM6wkLIUSFo/FD\nN+b2OqFZY/8/wk1mXPmDbtR94kbT+3MAj/WN7deDqB4MYfCrzct+blVDIXQ9Mw7GAUGNtT3CTWYM\nfLMVnPrCOUc93SJr7m3EoS/dA5NFhsligmSS0NBZhypXVcptIN/70Yf4+Q8+KHjgupqcuPersWOA\nzFYTTBYTbA4rHv/do2lNUCx3s2y1FgeupKjYcWUoHrgLBJWj8fh8Tl83E6O/1ICxxxoQbDUjXC9j\n+nMu3PxWO7jBPPZijhvGkwxcBKxjkSVvY3BeCUDQ7q7SZohNS7Qem0n9Ipyj4/lJiAqPv5YY5bCO\nR1B7NsOpFJIWqnSLbNv9myEtmX8VJTEWYkt+3jRVw8zILHyz6c3Erkb3jk7sfmQ7qpw2TA3P4tRr\nZzE3FgusjQfWGYarIDK0bWzB7cujKZ8307BdzcRCdSgCnmKu1jxbxG0tGYN7hx3uHfaMHqbYJXCG\n5LlfHVBtyX8FWSaMR9zMMwqqr/mhW0UE2ywJbQPLVBSCkvzLXVA5XOf9mN1Pm+jkGoVukVU5bQm7\nbi1gjIGDg3MOVdEgSgKiYQU3zw5BEIWUu41lY9Oh9Tjwxb3xRRntm1rQ0teIl//yDcyNu2GpMqes\naE2W1HfKc1HdpnPzzG+zQDKYAeOI/QleKpjG4ez3w34tANUmYm6PA5HG5Oub3e+E80ogYTczncU2\nxwk3J3+9NasIIWB8rl7nC5MAY9BNAga+0RJ/PS6w1FtN0vYNeUHthSK7fWUs5TgYY+zO6RCxULZW\nW3DPk7vx5O89mvMFEbGTgvckbK4uCAJEWcTex3cBAIYujUCJJM+WCqKA8ZvGrY7VBG4mWzcupkoi\nTm5blzQCxiWGySO1q3rOXGNq7OSI1tem4eoPoO6MF+v+cRSuC8njY6FWC0Z/qR6aiUEzMegSQ6TB\nhMFvtBqulJs+YDBuhjuzwQri57D1/GgcuDNvHamXodnEpNzVZIb53Y4cfdZkMQrdIrv0QWyzGi1F\n5SpKIkRRjO86Jptl1DQ7sW5vT06vw+awGu75IAgCGjrrAQBDF29j5vZsPHi5zqFEFJx/9xKCnlDS\nYwsZuAtO7l6PiYfqELWL0EWGYKsZA99sQXgVM7f54DrvhXk6Gr+ptnDzqu31GTCDY3rc2x3o/6Nu\nDHyzFde/3Y4bv9MBJcWqt9mDLszudUCXYiFtNL8b6/PqqBq68/1iDENfbYZmEWLBLsYWaPj6bJjf\nnlk7hKSH2gtFFglE8PyfvoIDX9yLdXt7k1oNRrO5sllG945OXPvkZs6uIxyIpNxmwD8f6yFzneO1\nv30bfXt60LurC0pExZUT1zFuMCecy8DNaPUZY5jb58TcvtLcc8DZHzBcdswFwDYaNpwy4JKw7OY7\ncYxh4uF6TN1bC9mjoPntWTgGkn8ZgnOIkbsBH24y48rvd8FxLQApoCHQYS2ZX1L58NbEpry/xh9v\nTf0+Ct0SEA5E8P6PP0JVTRUaO+vjx/Zomhbb5GZJFum6jmiOdxbTFA3XPrmJ9fv7ErdejKo4e+xC\n/N9c57hx6hZunLqV8rmKUeGWC90sxP/kT8Cx4naSmbxGpNEMOagZvhZTgcCS8TkuC/BsTb+yLURw\nVSoK3RLy5t+9g51Ht2HDPesgCAIGPhtC944O2OyJN9s0VUf/R8uvtFqNE89/Cq7p2HhwPYDY3gof\nv3Qat/tTTyUslevAzcUG5aVkbo8D1beCCdUuB6CZBYRac1ddsqgO83TU8HQLRZLwhm8r4MvZy5EM\nUOiWEE3VceaN8zjzxvn42/o/uobHf/coZLMEzmMr1069dhaTg7k/iobrHCdeOIVPXjkDk9WMsD+c\n0WqzQla4+TyKJ5/8vTbMHHSh4SN3fDpAlwUMfr0lJxuOL1Sg5oiCzRiE0WiCTivNiopCt8S5Jz34\n8X99Ds09jTBZTJgYmEI0lN/qT1N1hHwGvcAUVjsStlLgVlqVu2DqSC3m9jhQNRyGZhHg77amveQ2\n3T/rI2YZc85qNMwnlrMaY7jZ2ZTiUaQQKHTLAQcmbk0V+yqSZDN/u1Z6uKmo1RI8W1IvA15stf3T\nY/ftwFdePwlR45B0HYooIGKW8eG+jat6PpIbFLokY9kudkgncCu1yk1Hrm5STTa48E9feQDbrwyj\n1u3HWFMNLq9vR9SU/61A82lwpKHYl5AVCl2StlysLFvrFa6RfE4CBGwWnNyzIW/Pv5xyD8d8odCt\nAM5GB5p6GhHyhjBydczwGJzVytUGNZmE7UpV7ko30d6a2JTxkeqFVo4jVxSiuUGhW84YcOTrh9G7\nswuc39mnIarilb8+Bu905vNA+djjNpdhC5T21EI5BSkFaPFQ6Jax9ft60bOjM76YAohtMP7Itx7A\nc3/y8rKPLcSJu+kGbrr920wCt5wCMJcoTEsfhW4Z23x4Q8IGNUBsrwR7bTUc9XZ4Z2LVbiECdrFc\nV7dAaVe4+UZBWlkodMuYaHCyBADo4Aj1WOFxFm4CINMbZJlMJ1Ra4K61EDUPV84BmbmYqqHQLVOe\nPjMuDY7gUIMDspz4bVRUDbPzyVsF5tpqJhEy/U9bToFbTmFaSUFYSLn4ulHolgmjFsHZC9exaX0H\nXM5qmEwyVFUD5xyvvXVyVYdFriSbca/VVAilErilHKYUnuWHQrcEpduDVVUNP3z2HazvbUNnexN8\n/iAuXRmAz5/+Et7l5GKmdrV/jhUzcIsZshSilY9Ct8iyvcml6zqu3riNqzduZ30tuV64UA6BW6iA\npTAlCyh0C6DQ0wOZyMcKsVIN23wGbDmFqn0o962n1VirqxMpdHOklIN1sXz9R8/mrm6+wjbXIVvs\nYC2VsMyVfH8+pRrqFRm65RKA+ZbP/3TZjs7kI2hzGbKFDNhKC9NSkc3XNZ8/O8uGLoVX+SjlgF2Q\nz/ZBrgI3X2GbSQDUuuxoqHdh3uPH1PR8Xq6HLC+d79dqf+YqstKtVIX6cymX2yoW4qZYLgI312G7\nmipLEAR84bFD6Opogq7HDiSdmfXgZy8fRySq5PT6SPZWW0lT6JaYQveh8rVvbanM2KYj28DNVXvg\n4L7N6Gpvgizd/bFsrHfh6P178epbJ3PyGqT4KHQLrFjN/XxvCl5OIbtgtWGbrx7sjq19SasLJUnE\n+r42CO8I0HU9xSNJOaHQzZFSulOaz4AttXBdTWsh07At1I0uSTLeS4MxBkFgWG3mOm9GMvp4upeT\nXxS6KZRSiKZC1WtmSjVsFwwOjWN9XzsEQUh4+/SMG1VXgwW7jkxD2ggFd2oVH7rlEJ7poIDNTikH\n7kLInfnxp+j8942QTRIkkwRV0aBrGj76wYmCXUuupBvcazGcyyp0KyVAl1OIAxkLGrCco21iDlWh\nCMaaauCvsubsqdNpLZRC2KYbQAF3EM/+jxex4cA6NHU3YH7cjf4T1xD05GYvjVKUztem0oK5qKG7\nFkI0lUoK11TnkcluBb3/PAYxpAEAmA7M7bZj/JF6gC3/vc/FyQ+ZBG4uwjYXf5ZHglFc+PllXMj6\nmSpHpQVzzkN3LQfpUoU8RrwQAZvpYY9dP5mA5FUhLMqzmnM+BNut8GypXvG1lgvelarcfAduLgKW\n5M5y349SC+RlQ5cCdGWFDNbFCtkiWM3JuqY5BeZ5JSFwAUBUOOo+9awYuoWQbthSwJa3lb5/hQ7l\nsurpFkOxQnWxYt3kyuYYcyGqgwup35dP6VS5tN8BWbA4lAsRwGs2dEshTFMp9iRBNmG7INxoAhcY\ngMRw0yUGz+biV7npoip3bVn6/c5HCFdU6JZykKZS7IBdLBdhGycwjHyhEZ3PT4JpHIwDmsygOCTM\n7nfm7nUIyaN8hHDJh245BmkqhQxYUdMgqToiJslwUiCnAZuCb0MVrn+7HXWnvZC9Cnx9VXBvqwYX\nGezXAjB5VIRazAi2mZOuMRfTC4TkWi5CuCihW0lBulSxK1dJ1fDgiUvYfHMUjHOoDhFjjzfA32sr\nyvVE60wY/3x9/N+yR0Hv90chhnUwnYMzhlCrGYNfbwGXUjSBl8h2V7FsbqC1rm/G9ge3oMppw8iV\ncVz4+SWE/OGsroeUr9WEcM5Ct5KD1Eixw9XII81X0PnsBOw3AxBio7EQ3Sq6fjqBm7/RhnCzGbJb\nQe0ZL0weFf4eK9xbq8Hl9MJuge12CPUfeyD7VPh6bZjd74RmM943YKmOF6cg+bRFUw0cttEIGj5y\nY+pIbUbXYSSfm49vOrQeB764D7I59mPjbHBg/b4e/OxPX0HIR8FL0rsHsGzorrUgXawUQxVYvi0g\neVXYbwTjgbuAaRwNJ+Yxt8uB7p9OABqHoAP26wHUn3Tj5m+2QbekF5quc160HZsBUzgYAMtkBLXn\nvLj+Ox0rBq8Q1mAdDSeNkQkqR81nvpyEbq4s/eERJQEHvrg3Hrixt4kwWU3Y8eBWfPzS6UJfIilT\nJd/TzadyDNblmLwquARgaehywDyroOPFKQjK3cQTFQ7mVlH/sQdT968ceEzlaH1rJuE5BA1ASEP9\nSTcmH6pb/vE6gBSj30y7+5z57OeudlTM1eQE58mPFSUR7ZtaKXRJ2io2dEs1UBfk40ZWpE4GU5Pf\nrgux95nmkk8fEDQOZ78/rdA1Txv/5SNogONGYMXQ1WwiInUmWKaiCdmriyiJxRLLCfkjEETjNkzA\nk7gDWMu6Zmx/YDNsDhtu94/g4vEriARo9IzElF3olnqYLpXzcOUcVcNhyB4FoRYLIg13e5iaVcTs\nXgdqz3oh3qlGOQAuMczuc8JxzXh7QF1Ob+WhZhNi1aoBtSq9/0ojX2xE7w/GAI1DVDk0mUG1S5i8\nryatxxeCUV8u6AlicmAazb2NEBfte6spGoYv3o7/e9PhDTjwhbttCFeTExvuWYfn//QVhAMRuBod\n6N3dDVESMXB+GDO3Z5e9ltraEZUAABTjSURBVObeRux4aCscdXaM35zEZ+9chH8+sOLnIIgCdj68\nDRsPrIMgChg4P4Qzb3yGSHDttgxLRcmGbrmF62L5GseS/Cp6/nkMsjdWzjIO+HqtGP5yMyDGgnPi\naB2iNTLqT7ohhXUEOi2YeKgOkXoTwvUyLJPRhJ6qJjPM7UtvblZxygg1m2EdC0NYFL6azDBzIL3n\nCDeZcfU7nXBd8ME0ryDYboF3YzW4FLv+UmwtLHj3+8fx8G/cj8buegiCACYwMJHh4Jf2Y9uDW/Hq\nd9/EgS/sSej7SrII2MzYdv9mhHxh7H9iNwRRAGMMW+7dhKsfX8fJF04Zvl7v7i7c97XDkCQRTGCw\n19nRu7sbL/z5q/DN+pe91ke//RCaehogmWLXsungenRsasNzf/ISNJVOoCimooZuOQerkXzPvra/\nNAXznAK2KDvst0Ko/9iNmcN3KkUWC1GjIB1+ujlp1y/PlmrM77CnfQ1DX2lC908mYJmKgosA04Cp\n+2rgW1eV9nNoVhGz97jS/vh05WJyQRAY9j6+C5sPb4BsljA5MIUTL5zC/LgbkWAUr/3NWzjw1F5s\nvXcTGGNgd+aL7TVV+NJ/eAJcTw52SRbRta0D9rpqSIuO45HNEjYeWI+bZwYxPTyT8BjGGA5/6R7I\npsU37gQwQcLex3bivR9+mPJzqO+oQ2N3fTxwY48VYbVb0LOrGzdO3Vr114dkryChW2nhulTewlbn\ncFwLwH4jCM3MUDUUSghcIHbnv+6M927oLkNxybj6nU5UDYUg+zQE2y2I1sgZXZJWJeHmt9phmo1C\nCmgIN5mhmzMbOStljz98AH3drfGwa+lrxhf+3aN47k9eRsAda8+s29uT1N9ljMFsM0PXjKtIzjm4\nwbskWUTPjs6k0K1y2SCakn88BUFAy7rmZT+H+vZawwUxsllGY1c9hW6R5SR0Kz1UjeS7qmUqR88P\nR2GZjEJUeGzzmBR/FTI1gz+bGUOgO/uFEtE6E6LL3zfLWDqthVwct56KvdqKdb1tCWeVMYFBlERs\nu38zPn4xNqEgm4x/UTHGEAlEYK4yJ/Z9NR1qVDPcQphzDs0gqCPBKIQUew6HfMtvau6fD4AbHKim\nRFV4Z3zLPpbkX1qhuxZDdalCLJtdzHXeGw9cAClvYOkC4N2YeYgKYQ3O/gCkoIZAl9VwKe5aU+ty\nQNW0pAMiRUlE67oWWO0WhHxhzIzOoam7Id5aWMB1jquf3ETLuibUt9UCLPZYxoCGjjrDcTld03Hr\n7GDS25WIgsGLw+ja1pHQklAiKs6/e2nZz2P06jjCgQgkWYpX5Fzn4JqO61TlFt2yobuWw7bQIbuU\n65I/HriLcQBcAAQ9dgNLs4qYPJJZyWkbCaP7x2MAj7UndInB32PF8NPNgLB2g3fe44NoMBbGOYez\n0YGv/ecv4da5IXzwzEf4yn96KunjNE1D/0fXcPr1c2joasAT3zma0PddeC5N0cAR+x136vVzmJ9w\nG17PB8+chCiKaN/cCl3VwUQB594+j1vnhpb9PDjneOW7b+L+b34OzT2NAAD3lBfv/+hDGl0rASU7\nvVBIxQ5YI3qKpbm6zODeVg0hyhHssMC93Q7dlEFPlXN0PjsBMZq4SKJ6IATXBR/cOx3ZXnrZ8vqC\nGByeQE97c8JNKMZYbAoBQM/OLvjnA3j1r4/hkd9+ELJJAuccalTD+//yIerba6G3uFBdWw2u8eSf\nMA6M35rE8MURDF8eifeJjahRFW//0/uw2i2wOazwTPugRg0GsQ0EPSG8/jdvQ7bIEARGo2IlZM2F\nbikGrJG5PQ5UDYcSql0OQLMIGHu8YdWtAMtEFIKS3KsQFY7ac8UL3VLZVezVYydxdMtWbDy4HpIs\nJrUQZJOErfduxJk3PsMP/vMzqGuthSAKsNdX46Ffuy9+Iy3VQgrOOWZuz6L/o2tpX1PIF1713g5K\nOHlBDCmuig3dcgnXVHzrbZjb7UDdGS84A8AALjIMfa0lq94rM1jKSu7SNB0nXziFky+cwm//2a8Z\nfoxsvnMjjQOzo3Ow11bjiX/7SEJ1DMBw2bCu6bh5ZjDXl03KSFmHbrkH67IYw8Qj9Zjd70TVUAia\nVYS/zwYuZtdzDTWbDacgdAGY35X+vG6lqnXZceRf3YPGrnooEQUmS/Ls79LxrnV7e8AMeuGaooEJ\nDLqqAwxggoBPXz0L96Qnb9dPSl9ZhG5Fh+sKFJcMt+vuiJIQ1lB3ygvH9QDUKhEz+50I9KQ/vWCe\nVQyrXcYBX7c1J9dcrupqHfjm0w9DkkQIwp27/pyDcw5BEKBrOjRVw0c/+zThcSarKf7xS5167RzC\n/jAEScDt/lEEPcuPe5HKV5Khu5ZDdjlCRMe6fxiB7NcgqBwcQPVgCBP312L2gPEKL/NUBOZ5FaFG\nE5QaGc4rfsPxMy4yOK4H014SnEul0s+998D2hMAFYjfRNFXD7MgcZkbmcOG9y0mzrsOXR7Hp0Pq7\nbYf4g4HBC8MrLtkFAPHOIgl7nR2zo3O43T9quLqNlL+SCN21GLKSX0XtaQ9sIxFEGk2Y2e+E4lp+\ndVjtaU88cIHY2CdTOJrfm8P8LkfCyjAhrKH7mQlYJyLgQmzO17vehkhtitfQOcxzCsD5mp3XbW2u\nS1mxHvuH91IuShi/MYHb/WPo2NwK2SyD6xyqouLyh1fTClx7bTW+8AePQZIlyGYJSkRFwB3AS3/5\nBt0Iq0BFCd21GLKLmeYV9P2/EQiKDkEDqoZDqDnrxcCvtiLUZkn5OMf1QDxwF+MiYJ2IINB1tz3Q\n9vo0rOPhhA3NHdeDmN1pBxdZ0io2pgM1Zz2wjocx8M3WjE+TqAT+YBg2m/HXPxpefuTq3R8cR9fW\nDvTt6Yam6rj2yU2M35hI63WPfP0QLDZzfOLBZJEh1Nmx77FdOPHCpykfV9viwqbDG2BzWDF8aQQ3\nzwzQZjZloCChu9ZDdqnmt2YgRvT4PgqCDkDnaHttGjd+pyP+cUJUh/OyHya3glCzGWqVGBuqX/J8\nQpQDi5aSMo3DcTWQdIKEoHK4rgYwfciFhhPuePDeGY6AqMbCu+GjeUzdn+M1vmXg49OX8ehD98C0\neAVYVMWNU7egKYlfTHOVGRabGd5ZX6wNwIGhi7cxtGibx3RIJglNPY1JI2aSLKJvb3fK0O3d040j\nv3IIgihAEAW0bWjB1vs24aW/fCPpWklpyXnoUsCurHoweeMaALBMR8EUHVwWYJqNou97o2Aqh6hw\naCYG1SqAS8lVKjjQ8eI0rv0bC3SLGD/y3IigcEwdqYWv14re748ZHJ0D1Jz3r8nQvXZjBPYqGz63\nfys4j83a3jo3iBPP3w0+2SzjgV/9HNo2toLrOvQ7I2b5WF6barpPlEXc99WDCSNqslmGo8GBjQfX\n4fIHV3N+LSR3VhW6FKzZ0U0CRINqhAuIj4R1vDQFMaTHq1oxysE0DaEGE2wTiScvMMSq4prPfJg9\n4IJuEmInNCw56YEjtv8uAERrTYCApKN9gLU9y3v6s2sYeOkSqmuqEfKFEF3SU33o1+9Dy7rmOyvU\nYqvUDj99AL55PyZuTmX8empUxcTAFJqXVLuaouHm6QHDx9S31xneZJNNEnp3dVPolrhlQ5fCNT/m\n9jrQ8JE7oT+ri4B7SzUgsNgBjhOR5DaCBpjnFOgyS9qXQVQ5bKNhLJxDMPJEA3p+OAZ25xBKXWLQ\nZYaJO0fqaDYRkXoTLJPJR+fMby3c0TmlMrmwmKbq8Ex7k95uc1rRsq4pviR4gSSL2PHg1lWFLgAc\n//FH+MLvPwbZLEEySVCjGnxzPpx+4zPDj1ciiuFcMABEQ7Tct9SVxPTCWjN1uAbm6Sgc14Oxm1oa\nR7DNjPFH72xbyJhh7xZAbEMazWClk4SEo3tCbRZc/9cdqDvlgWU6imC7BbN7nNCq7gbG7aea0Pv9\nUTDtbgsj6pQxfW/pnMpbSqx2a2yhw9LJMIGhuib9TdyX8s8H8Mx/ex7d2zthr6vG7NgcRq+MG65o\nc9TbIcoiQr4wRDlxvE2JKLj8IVW5pY5CtxhEhttfbobsVmCZiiJaKyNSfzcwdbOAUJsF1pHE48p1\nkWFuZzXsg2GYp6PxI3NiO48xzO1O3DdBccmYOFqf8jIiDSZc/b2uuzfrWi3wrret6Z3GluOZ8oIZ\n7KmgqRrGr6c3qZCKrum4dW4w5futdgse+a0HUdvigq7pYCJDNKxAuPO9EkQRF97vx0j/WFbXQfKP\nQreIFJeccjb39hcb0fe9UfCoHt9+MVInY+reWszt19H81iwc1wNgAMKNJow80Qi1OvNvp24WML97\n7e4slgk1quL06+ew97Gd8YUQuqZDiaj47OfL73Gbrc//9oOoa6tdMuWg4tNXzyDoCWFyYGrVm+KQ\nwqLQLVGxo3W6YL8egMmtINxgguOqH1v+YghM54i6JAz+SjOCHVbwTLZ2JFm5+H4/fLN+7HhwC6wO\nK0avjuPc2xfyurzX2eBATbMreaxMEtG6rhlv/+P7eXttknsUuiWMSwzezbGbWh3PTsBxMwDhznaq\n5jkV3c9N4uZvtCHcZC7iVa5Aj+3VK4Y0BDqtUB3F+y/n62JZnwgMrG4eNxuWauOz15jAUOXM/ugl\nUlgUumVA8qlw3AgmLXaAytFwwo3bv9xUlOtaiXk6ip4fjsX372UaMHOPE5MPrb0Z4GzMjs4b7s+r\nKiqGL48W4YpINujv0jJg8qjgBr8eBQ6YZ0p0RIhzdP/LGKSABjHKIUY5BI2j7pQH9huBYl9dWdl8\neD0YYwnTDKqiIuyP4PIvaKyz3FDoloFIrQxmsIiBC4gdKFmCrOMRiGE9aexNVDhqzyTPwJYST1/p\nfE379nRjz6O7IC46xULXdcyNufH8n71Cx/CUIQrdMqDZRMztskOXFx1wiNiCh+lDNcW7sGUIBodq\nxt8XoU1Z0rXr6HbI5sQ/cwRBQG2LC0okvfPSSGmhnm6RyPMK5ICGcIMpYUvGVMY/X4+oS0b9Jx6I\nYQ2BDgsmHq5fcTvIYgm2mmO/GZbQZAbPlsKteCt3NkeKjeUZg8kiI0yn+5YdCt0CE0Maun46Aet4\nBFyM3VyaPFKDmZUqVsYwe8CVcrPynF5jUIPrkg+SPzZx4O+1ZrzHLpcFjD7RgPZXpuMb8GgyQ6TB\nhPmddCxQuqaGZtC+sTVp2W80FEU4SIFbjih0C6zjZxOwjt3Z5/bOX4eNH8wjUmeCb8Pql5Lmiu12\nCN0/Hgfjsa0gtU89CLWYMfjN1ozPZ/NstSPcaEbtOS8kvwbvBhu8m6qzPudtLfn01bNo7m1MWPKr\nRNXYlo9rd1+iskahW0CST0XV7UjS6JeocNSfdBcndDmH41oQrgve2PE/Q+GEzXREhcM2FkHtGQ9m\n92deZUcaTBh/JPVS5FyKdEZhHk4+SLKczY3N46W/eAN7Ht2Bhs46eGf8OPf2BYxlueyYFA+FbgGJ\nIQ1chOF2ilKgCBtPc472F6fguBaIB61R8SSoHK7z/lWFLsne/IQb73zveLEvg+QITS8UULTOZLh1\nmC4A/r7CryyyjUbgXBS4QIqdzYCK+Z/i60qvtVFKY2OkslTIj1J54CLD2CP10GUWryh1EdAsIqYP\nF76KrL4VBFtmtGuBJjPM7aKbX4TkArUXCsy904FojYz6j90weVX4emyYOeCEVlX4b4VmFuITFItx\nxA67BGJbRvp7rJjfSTuREZILFLpFEOy0YrgzxfxlAXm2VKP5vTks7eTqEjD+SD3EKEeg04JQa+oT\nigkhmaHQXcNUu4ThLzWh48XJ2BsYAA4MP90Mfy/tXkVIPlDornG+DVXo/8NuVA2FAMYQ6LKCSzRH\nS0i+UOgScFmAf13xF2aUGk+fGc6b5bvqy+aw4uAv70PnlnZwnePm2UF8/PJpKEtOOCaFRaFLSIkQ\nJQGcw3DD8oyfSxbx1B8+DqvdGt+Ld93+XtR31OGFP3816+cnq0ehS9acXJ0gkSv2umoc+dohNPU0\nAgBGr43jg2dOIOhd/RFAvbu6IVtMCZufS5IIR70dLeuaMX6DVrQVC83pElJEkknCF//gcTT1NkIQ\nBQiigLYNLfjC7z+WtMlNJuraamCyJO9AJwgMNc3ObC6ZZIlCl5Ai6t3dDWnRZjYAIIgCzDYTOja3\nrfp55yc8UCLJvVtd5/BMlfYm8pWOQpeQInLW2+PHuS8mSiKc9atfBXjzzADUqJrQH9ZUDUFPEKPX\nx1f9vCR7FLqELCPfezDMjs4jajBNoKk6ZsfmV/28alTFi3/xBkavj0PXdWiqhsELt/Hyd9+kLSGL\njG6kEVJEgxeGse+XdkGUBIhSbO21pmjwzngxluXNLv+cH2/+3bvxRS+kNFClSypKpLO8DmrUNR0v\n/sXruHHqFiKhKMKBCK6cvI5X//pY7oKSArekUKVL1qRSGhuLBCL44Ccn8cFPThb7UkgBUKVLCCEF\nRKFLyApoQ3OSSxS6hBBSQBS6hKSBql2SKxS6hKSJgpfkAoUuIYQUEIUuWbPSPRl4Map2SbYodAkh\npIAodAnJEFW7JBsUuqTilNtSYLK2UOgSsgpU7ZLVotAlRfPWxKZiX8KqbqYRkg0KXUJWiapdshoU\nuoQQUkAUuoRkgapdkikKXUIIKSAKXUKyRNUuyQSFLlnzaIKBFBKFLiGEFBCFLqlIhV6VRi0Gki4K\nXUIIKSAKXUIIKSAKXUKQm5tp1GIg6aDQJWWlu3262JdASFYodAnJIap2yUoodAkhpIAodAkhpIAo\ndAnJMWoxkOVQ6JKKlekCCVoOTAqBQpcQQgqIQpcURSkc1ZNP1GIgqVDoEkJIAVHoErII9XVJvlHo\nEpIn1GIgRih0CSGkgCh0CSGkgCh0SUUr9GbmS1GLgSxFoUvIEnQzjeQThS4heUbVLlmMQpcQQgqI\nQpcQQgqIQpcQA7nu61KLgSyg0CWEkAKi0CUVr9hjY4QsRqFLSIFQi4EAFLqEFBQFL5GKfQEkc19x\nnEl627PePUW4ksrm62KwD/FiXwapMBS6JcooWNP5eArf0ufpM8N5M1LsyyBFQqFbJJmGaqbPS+Fb\n2ih41y4K3TzJV6hm8vqlGryVflQPIcthnFPPihBCCoWmFwghpIAodAkhpIAodAkhpIAodAkhpIAo\ndAkhpIAodAkhpID+P12kN0gpBotEAAAAAElFTkSuQmCC\n",
            "text/plain": [
              "<Figure size 432x288 with 1 Axes>"
            ]
          },
          "metadata": {
            "tags": []
          }
        }
      ]
    }
  ]
}