{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2.1.0\n",
      "sys.version_info(major=3, minor=6, micro=9, releaselevel='final', serial=0)\n",
      "matplotlib 3.1.3\n",
      "numpy 1.18.1\n",
      "pandas 1.0.1\n",
      "sklearn 0.22.1\n",
      "tensorflow 2.1.0\n",
      "tensorflow_core.python.keras.api._v2.keras 2.2.4-tf\n"
     ]
    }
   ],
   "source": [
    "import matplotlib as mpl\n",
    "import matplotlib.pyplot as plt\n",
    "%matplotlib inline\n",
    "import numpy as np\n",
    "import sklearn\n",
    "import pandas as pd\n",
    "import os\n",
    "import sys\n",
    "import time\n",
    "import tensorflow as tf\n",
    "\n",
    "from tensorflow import keras\n",
    "\n",
    "print(tf.__version__)\n",
    "print(sys.version_info)\n",
    "for module in mpl, np, pd, sklearn, tf, keras:\n",
    "    print(module.__name__, module.__version__)\n",
    "    \n",
    "'''\n",
    "1. layer的函数作用\n",
    "2. 自定义layer类\n",
    "    流程：\n",
    "    # 1. 创建class，继承自 keras.layers.Layer\n",
    "    # 2. 在init函数中指定 layer的属性（激活函数 + neural个数）\n",
    "    # 3. 重写build函数，创建成员变量 kernel + bias，就是这一个layer所需要的训练的参数集合\n",
    "    # 4. 重写call，根据上面创建的训练参数 kernel + bias ，完成正向计算\n",
    "3.  创建对数据处理层次（没有训练数据，只需要对数据进行处理即可）  \n",
    "    ## 当需要自定义层次，为了对数据进行处理，没有需要训练的数据时（只是完成函数式的计算）\n",
    "    ## 可以使用 keras.layers.Lambda() ，传入一个lambda表达式或者函数（功能是对每个数据进行处理）\n",
    "    customized_softplus = keras.layers.Lambda(lambda x : tf.nn.softplus(x))\n",
    "    print(customized_softplus([-10., -5., 0., 5., 10.]))\n",
    "'''"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<tf.Tensor: shape=(10, 100), dtype=float32, numpy=\n",
       "array([[0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0.],\n",
       "       [0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0.]], dtype=float32)>"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "## 函数式API，每个layer对象都就可以当作一个单独的函数来使用\n",
    "## layer的输出结果是[None，num_of_neural]\n",
    "\n",
    "# layer = tf.keras.layers.Dense(100)\n",
    "\n",
    "layer = tf.keras.layers.Dense(100, input_shape=(None, 5))\n",
    "layer(tf.zeros([10, 5]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "[<tf.Variable 'dense_1/kernel:0' shape=(5, 100) dtype=float32, numpy=\n",
       " array([[ 1.86769530e-01, -2.36812457e-01, -5.16436398e-02,\n",
       "         -1.48240626e-01,  1.70920864e-01, -2.26178601e-01,\n",
       "          1.09826028e-03, -6.07571006e-02, -1.70103073e-01,\n",
       "         -2.17173889e-01, -1.56562686e-01, -1.31182998e-01,\n",
       "          4.86384183e-02,  2.26630718e-02, -1.07071385e-01,\n",
       "          1.09372035e-01,  6.93879277e-02, -2.03916356e-01,\n",
       "         -4.62349504e-02,  4.52593416e-02, -6.41764402e-02,\n",
       "         -9.35252458e-02, -1.22784227e-01,  1.33482769e-01,\n",
       "          2.08572164e-01, -2.12609544e-01, -1.76293761e-01,\n",
       "          1.07925072e-01,  2.37118796e-01, -2.50164270e-02,\n",
       "         -5.34447879e-02, -6.58439398e-02, -1.19483195e-01,\n",
       "         -1.21609941e-01, -1.56374842e-01,  1.39295474e-01,\n",
       "          1.28970072e-01, -1.48382097e-01, -1.04562268e-01,\n",
       "         -1.98104680e-01,  1.89350262e-01,  9.91653949e-02,\n",
       "         -3.70629132e-02, -2.22392842e-01, -1.09476373e-01,\n",
       "          2.28395566e-01,  2.38480762e-01, -1.42413676e-01,\n",
       "         -8.52066725e-02,  1.31372944e-01,  1.22395307e-02,\n",
       "         -1.49008632e-02,  2.35355362e-01,  8.07071477e-02,\n",
       "          1.34443894e-01,  7.36192018e-02,  1.91887155e-01,\n",
       "         -1.04586497e-01, -1.46033242e-01, -1.55067310e-01,\n",
       "          1.16266981e-01, -1.15403756e-01,  2.84133703e-02,\n",
       "          5.75708598e-02,  7.29310066e-02, -1.32935524e-01,\n",
       "         -1.07127815e-01,  1.02901146e-01, -1.55248091e-01,\n",
       "          6.17101789e-03,  5.66617697e-02, -1.40704229e-01,\n",
       "          4.45600003e-02,  1.83785364e-01, -1.56708062e-03,\n",
       "          7.11567849e-02, -2.13966608e-01,  2.27907255e-01,\n",
       "         -2.07748741e-01, -1.65005505e-01, -1.77255511e-01,\n",
       "          6.51004761e-02,  1.40245453e-01,  1.79346129e-01,\n",
       "          1.72359541e-01, -2.71517783e-02,  1.97006747e-01,\n",
       "         -1.39350876e-01,  2.25180551e-01,  9.77024436e-03,\n",
       "          1.19100258e-01,  1.27878144e-01, -1.07938588e-01,\n",
       "          1.76170990e-01, -2.31908217e-01, -1.28772929e-01,\n",
       "          1.86973944e-01,  1.71634123e-01, -4.81268317e-02,\n",
       "         -1.78239375e-01],\n",
       "        [-3.57603878e-02, -2.10500747e-01, -6.45631403e-02,\n",
       "          1.04700878e-01,  1.84694931e-01,  1.68832019e-01,\n",
       "         -8.32840800e-04,  9.36069638e-02,  1.77418441e-02,\n",
       "         -1.58099622e-01,  1.02239802e-01,  4.00515050e-02,\n",
       "         -1.82913840e-01, -9.50261503e-02, -4.49134558e-02,\n",
       "          1.13443479e-01, -1.18672587e-01,  1.12174466e-01,\n",
       "         -1.11313462e-02, -7.58662075e-02,  1.43084034e-01,\n",
       "          9.30304974e-02,  8.54255259e-03, -2.03713298e-01,\n",
       "         -1.90223604e-02, -1.51141182e-01, -2.04485148e-01,\n",
       "         -1.31742537e-01,  1.56564847e-01, -2.07947865e-01,\n",
       "         -7.19162077e-02, -1.62461802e-01,  2.03583077e-01,\n",
       "         -1.40899152e-01,  7.33395964e-02,  2.20954418e-04,\n",
       "          1.00470915e-01, -1.23157360e-01,  2.00796619e-01,\n",
       "          4.95370179e-02,  6.94406480e-02, -8.52707326e-02,\n",
       "         -6.13911450e-03, -1.91938549e-01, -1.06372833e-02,\n",
       "          8.88599902e-02, -3.14674526e-02, -2.00102106e-01,\n",
       "          1.83251753e-01, -1.84828222e-01,  1.38310358e-01,\n",
       "          6.96848780e-02, -1.64612427e-01, -4.91651297e-02,\n",
       "          2.22548142e-01,  2.28110656e-01, -5.54834306e-02,\n",
       "         -1.55276760e-01, -1.19933039e-01, -2.37942055e-01,\n",
       "         -1.94655001e-01, -1.87022507e-01,  4.28809226e-03,\n",
       "          5.97380549e-02,  2.25461617e-01,  9.43563133e-02,\n",
       "          3.97073179e-02,  1.81312546e-01, -1.62640482e-01,\n",
       "         -7.24694282e-02,  1.66012749e-01,  1.59474298e-01,\n",
       "          2.23649606e-01,  1.47886142e-01,  1.17505267e-01,\n",
       "          5.33694476e-02,  1.46202639e-01,  2.05684498e-01,\n",
       "         -8.65946263e-02,  9.02408510e-02, -9.99327302e-02,\n",
       "          2.58162767e-02,  7.76067227e-02, -6.51689768e-02,\n",
       "          2.22321495e-01, -1.66997924e-01,  7.42881447e-02,\n",
       "         -9.11465883e-02, -1.93166524e-01, -1.08402461e-01,\n",
       "         -2.20372096e-01,  1.12255588e-01, -2.10556388e-02,\n",
       "         -1.42120510e-01,  7.72367865e-02,  2.34095439e-01,\n",
       "          1.41440481e-02,  2.32421890e-01, -2.02847973e-01,\n",
       "          1.55787632e-01],\n",
       "        [ 8.48973244e-02,  1.74304321e-01, -1.18065894e-01,\n",
       "         -1.60225853e-01, -7.03434646e-03, -2.59400457e-02,\n",
       "         -1.69512287e-01,  2.04746559e-01, -4.92147803e-02,\n",
       "         -2.09777460e-01,  8.26485902e-02, -1.68825522e-01,\n",
       "          4.54294235e-02,  8.17328691e-05,  2.15776697e-01,\n",
       "         -1.97799310e-01, -6.35740310e-02, -1.40115142e-01,\n",
       "         -5.07559776e-02,  1.58029512e-01, -5.53971529e-03,\n",
       "          7.43031949e-02, -1.17930420e-01,  2.31071219e-01,\n",
       "          1.63535789e-01, -4.48450148e-02,  1.75314799e-01,\n",
       "         -8.13090354e-02,  5.37090749e-02, -6.08329624e-02,\n",
       "          8.81366283e-02, -8.75964463e-02, -1.81594670e-01,\n",
       "          1.44626424e-01,  1.53834209e-01, -2.83423662e-02,\n",
       "          4.25688773e-02,  5.67957610e-02, -2.25110352e-03,\n",
       "          8.12774152e-02, -4.17846590e-02,  1.27424315e-01,\n",
       "         -4.91743684e-02,  2.36720189e-01,  3.87813598e-02,\n",
       "         -7.74489045e-02, -3.97284180e-02,  1.52286574e-01,\n",
       "         -1.70877606e-01,  2.30210572e-02,  9.14978236e-02,\n",
       "         -1.17134802e-01, -6.36632890e-02,  1.14009067e-01,\n",
       "         -9.76574570e-02, -1.76739037e-01,  1.30201921e-01,\n",
       "         -4.03134525e-03, -1.29809111e-01, -5.92297465e-02,\n",
       "         -1.50059000e-01, -1.32625595e-01, -1.61867768e-01,\n",
       "          4.88721281e-02, -8.39135051e-02, -2.80585885e-02,\n",
       "          5.98369390e-02, -9.52465534e-02,  6.17738515e-02,\n",
       "         -1.78454131e-01, -5.60627580e-02, -4.91929054e-03,\n",
       "         -6.42697513e-03,  1.64371982e-01,  2.63521224e-02,\n",
       "         -1.53050274e-01,  1.11522228e-02,  2.36126140e-01,\n",
       "         -9.19868350e-02, -2.08330899e-02, -1.46401525e-01,\n",
       "         -1.61505178e-01,  1.74930140e-01, -1.85370177e-01,\n",
       "         -5.02226353e-02,  1.68541536e-01,  2.20614269e-01,\n",
       "         -1.30112663e-01, -6.41853958e-02, -1.53065920e-02,\n",
       "         -1.30219817e-01,  1.93827406e-01,  2.13394120e-01,\n",
       "          6.64111823e-02,  1.46489307e-01, -1.35940582e-01,\n",
       "          1.01217195e-01, -4.65080142e-02, -1.65766254e-01,\n",
       "         -2.15434119e-01],\n",
       "        [-1.91505522e-01,  6.01951927e-02, -1.41500935e-01,\n",
       "          1.35637447e-01,  1.50545701e-01, -2.04398572e-01,\n",
       "         -2.20596880e-01, -4.59918231e-02, -7.90336579e-02,\n",
       "          9.09823924e-02,  4.46020514e-02, -1.97991729e-02,\n",
       "         -1.29006371e-01,  4.20834273e-02,  3.50838155e-02,\n",
       "          1.15538821e-01,  1.52850598e-02,  9.40918922e-03,\n",
       "         -2.37149566e-01, -1.36337101e-01, -1.40915722e-01,\n",
       "          5.38554341e-02,  2.36245647e-01, -1.60628408e-02,\n",
       "         -6.64069802e-02, -1.90855294e-01, -1.85748488e-02,\n",
       "          1.33668974e-01,  1.35154948e-01,  1.46190375e-02,\n",
       "         -1.36030301e-01,  2.02879593e-01, -1.01746023e-02,\n",
       "         -4.39893156e-02,  1.06309041e-01, -3.72516662e-02,\n",
       "         -7.00707138e-02, -9.04951543e-02, -1.29024088e-01,\n",
       "         -1.46095753e-02,  2.12728724e-01,  2.23470137e-01,\n",
       "         -3.54734361e-02, -4.27146703e-02, -1.73036665e-01,\n",
       "         -2.13027477e-01, -1.11132532e-01,  6.92771524e-02,\n",
       "          9.30276364e-02, -1.41725898e-01, -1.06609628e-01,\n",
       "          1.78766295e-01,  9.30177122e-02, -2.27749720e-01,\n",
       "         -1.22720055e-01, -7.33838826e-02,  4.60043699e-02,\n",
       "         -3.89792323e-02,  1.42863169e-01,  2.23501757e-01,\n",
       "          1.31372675e-01,  6.14956766e-02,  3.99599224e-02,\n",
       "         -4.26476449e-02,  8.31740648e-02, -1.54266953e-01,\n",
       "         -1.99641287e-03,  6.31608218e-02,  4.09395248e-02,\n",
       "         -1.22873709e-01, -1.74684107e-01,  2.80043334e-02,\n",
       "          6.46744519e-02, -1.16960742e-01,  2.12321892e-01,\n",
       "         -2.21377283e-01, -2.09637597e-01,  1.42033711e-01,\n",
       "         -1.52723640e-01, -8.09682161e-02,  9.25535113e-02,\n",
       "         -2.84543633e-03,  1.73185810e-01,  7.21322745e-02,\n",
       "         -1.76383466e-01, -6.96772337e-03, -1.30287692e-01,\n",
       "          1.97388515e-01,  9.01450962e-02,  8.60769004e-02,\n",
       "          6.35366887e-02,  1.85413554e-01, -2.34454721e-01,\n",
       "          2.30504885e-01, -1.63713545e-01,  1.47728905e-01,\n",
       "         -2.00336799e-01, -1.44844204e-01, -3.73329967e-02,\n",
       "         -7.79274255e-02],\n",
       "        [ 4.35982794e-02, -9.15473104e-02,  3.68946642e-02,\n",
       "         -7.53418803e-02, -8.34866762e-02,  7.83064216e-02,\n",
       "          1.42545119e-01, -1.25085950e-01, -1.11863747e-01,\n",
       "          2.11839870e-01,  1.78851932e-02, -1.62840396e-01,\n",
       "          1.44705549e-01, -9.19004381e-02, -4.37396914e-02,\n",
       "          1.27551451e-01,  2.11423978e-01,  2.06313893e-01,\n",
       "          5.86729795e-02, -1.76969528e-01, -1.11348540e-01,\n",
       "         -1.64178193e-01,  2.13465914e-01, -2.99728662e-02,\n",
       "         -1.21647671e-01,  7.01680630e-02, -1.25980735e-01,\n",
       "          1.29207835e-01, -1.30387992e-01,  1.77940741e-01,\n",
       "          1.06380984e-01, -2.74294466e-02,  1.94348380e-01,\n",
       "         -1.76820606e-02, -1.77238077e-01,  1.69240430e-01,\n",
       "         -1.55628294e-01,  1.86972812e-01,  9.01875645e-02,\n",
       "         -6.70231283e-02, -1.40004903e-02, -2.10887671e-01,\n",
       "         -2.05523446e-01, -2.04558894e-01, -2.38501266e-01,\n",
       "          7.66417235e-02,  9.26810652e-02,  1.66685089e-01,\n",
       "          6.68680519e-02,  5.14303297e-02,  2.08713159e-01,\n",
       "         -1.47129565e-01, -1.65458947e-01, -1.21645778e-02,\n",
       "          1.40897945e-01, -2.35661194e-01,  1.03199854e-01,\n",
       "         -7.42632300e-02, -1.31688118e-01, -2.58191079e-02,\n",
       "          1.62019417e-01, -1.51683748e-01, -1.60945624e-01,\n",
       "          1.21513352e-01, -2.32237175e-01,  2.93476135e-02,\n",
       "         -5.16067743e-02, -2.05541909e-01,  1.15433052e-01,\n",
       "         -3.55049521e-02,  1.73971519e-01,  8.47640783e-02,\n",
       "         -1.69983506e-01, -1.69956982e-02, -4.11490202e-02,\n",
       "          7.31092840e-02,  2.17189148e-01,  1.17535904e-01,\n",
       "         -1.12300068e-02, -1.66406572e-01, -2.77805328e-02,\n",
       "         -1.61565185e-01,  6.20840043e-02,  4.96756881e-02,\n",
       "          9.08246487e-02,  1.58324555e-01,  1.18334904e-01,\n",
       "         -6.20903969e-02, -1.08517244e-01,  4.09514755e-02,\n",
       "         -6.32209033e-02, -1.68129981e-01, -2.14593858e-02,\n",
       "         -1.85604751e-01,  9.68155712e-02,  2.01674953e-01,\n",
       "          1.68963835e-01, -1.06922925e-01, -1.53113589e-01,\n",
       "         -2.69612521e-02]], dtype=float32)>,\n",
       " <tf.Variable 'dense_1/bias:0' shape=(100,) dtype=float32, numpy=\n",
       " array([0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,\n",
       "        0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],\n",
       "       dtype=float32)>]"
      ]
     },
     "execution_count": 3,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# layer.variables\n",
    "# x * w + b\n",
    "## 查看layer中能够训练的参数\n",
    "layer.trainable_variables"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      ".. _california_housing_dataset:\n",
      "\n",
      "California Housing dataset\n",
      "--------------------------\n",
      "\n",
      "**Data Set Characteristics:**\n",
      "\n",
      "    :Number of Instances: 20640\n",
      "\n",
      "    :Number of Attributes: 8 numeric, predictive attributes and the target\n",
      "\n",
      "    :Attribute Information:\n",
      "        - MedInc        median income in block\n",
      "        - HouseAge      median house age in block\n",
      "        - AveRooms      average number of rooms\n",
      "        - AveBedrms     average number of bedrooms\n",
      "        - Population    block population\n",
      "        - AveOccup      average house occupancy\n",
      "        - Latitude      house block latitude\n",
      "        - Longitude     house block longitude\n",
      "\n",
      "    :Missing Attribute Values: None\n",
      "\n",
      "This dataset was obtained from the StatLib repository.\n",
      "http://lib.stat.cmu.edu/datasets/\n",
      "\n",
      "The target variable is the median house value for California districts.\n",
      "\n",
      "This dataset was derived from the 1990 U.S. census, using one row per census\n",
      "block group. A block group is the smallest geographical unit for which the U.S.\n",
      "Census Bureau publishes sample data (a block group typically has a population\n",
      "of 600 to 3,000 people).\n",
      "\n",
      "It can be downloaded/loaded using the\n",
      ":func:`sklearn.datasets.fetch_california_housing` function.\n",
      "\n",
      ".. topic:: References\n",
      "\n",
      "    - Pace, R. Kelley and Ronald Barry, Sparse Spatial Autoregressions,\n",
      "      Statistics and Probability Letters, 33 (1997) 291-297\n",
      "\n",
      "(20640, 8)\n",
      "(20640,)\n"
     ]
    }
   ],
   "source": [
    "from sklearn.datasets import fetch_california_housing\n",
    "\n",
    "housing = fetch_california_housing()\n",
    "print(housing.DESCR)\n",
    "print(housing.data.shape)\n",
    "print(housing.target.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(11610, 8) (11610,)\n",
      "(3870, 8) (3870,)\n",
      "(5160, 8) (5160,)\n"
     ]
    }
   ],
   "source": [
    "from sklearn.model_selection import train_test_split\n",
    "\n",
    "x_train_all, x_test, y_train_all, y_test = train_test_split(\n",
    "    housing.data, housing.target, random_state = 7)\n",
    "x_train, x_valid, y_train, y_valid = train_test_split(\n",
    "    x_train_all, y_train_all, random_state = 11)\n",
    "print(x_train.shape, y_train.shape)\n",
    "print(x_valid.shape, y_valid.shape)\n",
    "print(x_test.shape, y_test.shape)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.preprocessing import StandardScaler\n",
    "\n",
    "scaler = StandardScaler()\n",
    "x_train_scaled = scaler.fit_transform(x_train)\n",
    "x_valid_scaled = scaler.transform(x_valid)\n",
    "x_test_scaled = scaler.transform(x_test)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tf.Tensor([4.5417706e-05 6.7153489e-03 6.9314718e-01 5.0067153e+00 1.0000046e+01], shape=(5,), dtype=float32)\n"
     ]
    }
   ],
   "source": [
    "# tf.nn.softplus : log(1+e^x)\n",
    "## 当需要自定义层次，为了对数据进行处理，没有需要训练的数据时（只是完成函数式的计算）\n",
    "## 可以使用 keras.layers.Lambda() ，传入一个lambda表达式或者函数（功能是对每个数据进行处理）\n",
    "customized_softplus = keras.layers.Lambda(lambda x : tf.nn.softplus(x))\n",
    "print(customized_softplus([-10., -5., 0., 5., 10.]))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Model: \"sequential\"\n",
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "customized_dense_layer (Cust (None, 30)                270       \n",
      "_________________________________________________________________\n",
      "customized_dense_layer_1 (Cu (None, 1)                 31        \n",
      "_________________________________________________________________\n",
      "lambda (Lambda)              (None, 1)                 0         \n",
      "=================================================================\n",
      "Total params: 301\n",
      "Trainable params: 301\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n"
     ]
    }
   ],
   "source": [
    "# customized dense layer.\n",
    "## 自定义layer\n",
    "# 流程：\n",
    "# 1. 创建class，继承自 keras.layers.Layer\n",
    "# 2. 在init函数中指定 layer的属性（激活函数 + neural个数）\n",
    "# 3. 重写build函数，创建成员变量 kernel + bias，就是这一个layer所需要的训练的参数集合\n",
    "# 4. 重写call，根据上面创建的训练参数 kernel + bias ，完成正向计算\n",
    "class CustomizedDenseLayer(keras.layers.Layer):\n",
    "    def __init__(self, units, activation=None, **kwargs):\n",
    "        ## 初始化函数中，设置神经元个数，与激活函数\n",
    "        self.units = units\n",
    "        self.activation = keras.layers.Activation(activation)\n",
    "        super(CustomizedDenseLayer, self).__init__(**kwargs)\n",
    "    \n",
    "    ## 根据传入的数据，能够构建出layer中索要训练的参数\n",
    "    ## 构建出kernel 和 bias 参数矩阵的shape\n",
    "    def build(self, input_shape):\n",
    "        \"\"\"构建所需要的参数\"\"\"\n",
    "\n",
    "        # x * w + b. input_shape:[None, a] w:[a,b]output_shape: [None, b]\n",
    "        self.kernel = self.add_weight(name = 'kernel',\n",
    "                                      shape = (input_shape[1], self.units),\n",
    "                                      ## 矩阵数据的初始化\n",
    "                                      initializer = 'uniform',\n",
    "                                      trainable = True)\n",
    "        self.bias = self.add_weight(name = 'bias',\n",
    "                                    shape = (self.units, ),\n",
    "                                    ## 初始化为zeros\n",
    "                                    initializer = 'zeros',\n",
    "                                    trainable = True)\n",
    "        super(CustomizedDenseLayer, self).build(input_shape)\n",
    "    \n",
    "    def call(self, x):\n",
    "        \"\"\"完成正向计算\"\"\"\n",
    "        return self.activation(x @ self.kernel + self.bias)\n",
    "\n",
    "model = keras.models.Sequential([\n",
    "    CustomizedDenseLayer(30, activation='relu',\n",
    "                         input_shape=x_train.shape[1:]),\n",
    "    CustomizedDenseLayer(1),\n",
    "    ## 添加自定以数据处理layer\n",
    "    customized_softplus,\n",
    "    # keras.layers.Dense(1, activation=\"softplus\"),\n",
    "    # keras.layers.Dense(1), keras.layers.Activation('softplus'),\n",
    "])\n",
    "model.summary()\n",
    "model.compile(loss=\"mean_squared_error\", optimizer=\"sgd\")\n",
    "callbacks = [keras.callbacks.EarlyStopping(\n",
    "    patience=5, min_delta=1e-2)]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Train on 11610 samples, validate on 3870 samples\n",
      "Epoch 1/100\n",
      "11610/11610 [==============================] - 1s 93us/sample - loss: 2.8135 - val_loss: 2.6260\n",
      "Epoch 2/100\n",
      "11610/11610 [==============================] - 1s 61us/sample - loss: 2.1538 - val_loss: 2.0095\n",
      "Epoch 3/100\n",
      "11610/11610 [==============================] - 1s 58us/sample - loss: 1.6691 - val_loss: 1.5977\n",
      "Epoch 4/100\n",
      "11610/11610 [==============================] - 1s 57us/sample - loss: 1.3563 - val_loss: 1.3099\n",
      "Epoch 5/100\n",
      "11610/11610 [==============================] - 1s 77us/sample - loss: 1.1160 - val_loss: 1.0580\n",
      "Epoch 6/100\n",
      "11610/11610 [==============================] - 1s 106us/sample - loss: 0.9136 - val_loss: 0.8712\n",
      "Epoch 7/100\n",
      "11610/11610 [==============================] - 1s 76us/sample - loss: 0.7838 - val_loss: 0.7756\n",
      "Epoch 8/100\n",
      "11610/11610 [==============================] - 1s 82us/sample - loss: 0.7186 - val_loss: 0.7322\n",
      "Epoch 9/100\n",
      "11610/11610 [==============================] - 1s 78us/sample - loss: 0.6836 - val_loss: 0.7078\n",
      "Epoch 10/100\n",
      "11610/11610 [==============================] - 1s 75us/sample - loss: 0.6613 - val_loss: 0.6911\n",
      "Epoch 11/100\n",
      "11610/11610 [==============================] - 1s 69us/sample - loss: 0.6451 - val_loss: 0.6777\n",
      "Epoch 12/100\n",
      "11610/11610 [==============================] - 1s 77us/sample - loss: 0.6318 - val_loss: 0.6663\n",
      "Epoch 13/100\n",
      "11610/11610 [==============================] - 1s 77us/sample - loss: 0.6202 - val_loss: 0.6553\n",
      "Epoch 14/100\n",
      "11610/11610 [==============================] - 1s 85us/sample - loss: 0.6095 - val_loss: 0.6446\n",
      "Epoch 15/100\n",
      "11610/11610 [==============================] - 1s 70us/sample - loss: 0.5995 - val_loss: 0.6345\n",
      "Epoch 16/100\n",
      "11610/11610 [==============================] - 1s 85us/sample - loss: 0.5898 - val_loss: 0.6247\n",
      "Epoch 17/100\n",
      "11610/11610 [==============================] - 2s 138us/sample - loss: 0.5805 - val_loss: 0.6152\n",
      "Epoch 18/100\n",
      "11610/11610 [==============================] - 1s 93us/sample - loss: 0.5714 - val_loss: 0.6058\n",
      "Epoch 19/100\n",
      "11610/11610 [==============================] - 1s 101us/sample - loss: 0.5625 - val_loss: 0.5965\n",
      "Epoch 20/100\n",
      "11610/11610 [==============================] - 1s 110us/sample - loss: 0.5541 - val_loss: 0.5877\n",
      "Epoch 21/100\n",
      "11610/11610 [==============================] - 1s 89us/sample - loss: 0.5459 - val_loss: 0.5792\n",
      "Epoch 22/100\n",
      "11610/11610 [==============================] - 1s 104us/sample - loss: 0.5381 - val_loss: 0.5710\n",
      "Epoch 23/100\n",
      "11610/11610 [==============================] - 2s 133us/sample - loss: 0.5308 - val_loss: 0.5633\n",
      "Epoch 24/100\n",
      "11610/11610 [==============================] - 2s 133us/sample - loss: 0.5238 - val_loss: 0.5560\n",
      "Epoch 25/100\n",
      "11610/11610 [==============================] - 2s 146us/sample - loss: 0.5172 - val_loss: 0.5487\n",
      "Epoch 26/100\n",
      "11610/11610 [==============================] - 2s 177us/sample - loss: 0.5109 - val_loss: 0.5421\n",
      "Epoch 27/100\n",
      "11610/11610 [==============================] - 2s 176us/sample - loss: 0.5050 - val_loss: 0.5357\n",
      "Epoch 28/100\n",
      "11610/11610 [==============================] - 2s 173us/sample - loss: 0.4995 - val_loss: 0.5296\n",
      "Epoch 29/100\n",
      "11610/11610 [==============================] - 2s 165us/sample - loss: 0.4944 - val_loss: 0.5237\n",
      "Epoch 30/100\n",
      "11610/11610 [==============================] - 2s 179us/sample - loss: 0.4897 - val_loss: 0.5187\n",
      "Epoch 31/100\n",
      "11610/11610 [==============================] - 2s 154us/sample - loss: 0.4852 - val_loss: 0.5139\n",
      "Epoch 32/100\n",
      "11610/11610 [==============================] - 2s 174us/sample - loss: 0.4811 - val_loss: 0.5089\n",
      "Epoch 33/100\n",
      "11610/11610 [==============================] - 2s 142us/sample - loss: 0.4772 - val_loss: 0.5044\n",
      "Epoch 34/100\n",
      "11610/11610 [==============================] - 2s 137us/sample - loss: 0.4736 - val_loss: 0.5003\n",
      "Epoch 35/100\n",
      "11610/11610 [==============================] - 2s 142us/sample - loss: 0.4703 - val_loss: 0.4965\n",
      "Epoch 36/100\n",
      "11610/11610 [==============================] - 2s 160us/sample - loss: 0.4671 - val_loss: 0.4928\n",
      "Epoch 37/100\n",
      "11610/11610 [==============================] - 2s 169us/sample - loss: 0.4643 - val_loss: 0.4896\n",
      "Epoch 38/100\n",
      "11610/11610 [==============================] - 2s 155us/sample - loss: 0.4616 - val_loss: 0.4865\n",
      "Epoch 39/100\n",
      "11610/11610 [==============================] - 2s 148us/sample - loss: 0.4590 - val_loss: 0.4832\n",
      "Epoch 40/100\n",
      "11610/11610 [==============================] - 2s 144us/sample - loss: 0.4567 - val_loss: 0.4810\n",
      "Epoch 41/100\n",
      "11610/11610 [==============================] - 2s 138us/sample - loss: 0.4543 - val_loss: 0.4788\n",
      "Epoch 42/100\n",
      "11610/11610 [==============================] - 3s 218us/sample - loss: 0.4523 - val_loss: 0.4758\n",
      "Epoch 43/100\n",
      "11610/11610 [==============================] - 3s 227us/sample - loss: 0.4503 - val_loss: 0.4735\n",
      "Epoch 44/100\n",
      "11610/11610 [==============================] - 2s 148us/sample - loss: 0.4485 - val_loss: 0.4717\n",
      "Epoch 45/100\n",
      "11610/11610 [==============================] - 2s 145us/sample - loss: 0.4466 - val_loss: 0.4697\n",
      "Epoch 46/100\n",
      "11610/11610 [==============================] - 1s 127us/sample - loss: 0.4449 - val_loss: 0.4677\n",
      "Epoch 47/100\n",
      "11610/11610 [==============================] - 2s 153us/sample - loss: 0.4432 - val_loss: 0.4656\n",
      "Epoch 48/100\n",
      "11610/11610 [==============================] - 2s 132us/sample - loss: 0.4419 - val_loss: 0.4638\n",
      "Epoch 49/100\n",
      "11610/11610 [==============================] - 2s 159us/sample - loss: 0.4403 - val_loss: 0.4620\n",
      "Epoch 50/100\n",
      "11610/11610 [==============================] - 2s 142us/sample - loss: 0.4387 - val_loss: 0.4606\n",
      "Epoch 51/100\n",
      "11610/11610 [==============================] - 1s 119us/sample - loss: 0.4377 - val_loss: 0.4590\n",
      "Epoch 52/100\n",
      "11610/11610 [==============================] - 1s 92us/sample - loss: 0.4362 - val_loss: 0.4574\n"
     ]
    }
   ],
   "source": [
    "history = model.fit(x_train_scaled, y_train,\n",
    "                    validation_data = (x_valid_scaled, y_valid),\n",
    "                    epochs = 100,\n",
    "                    callbacks = callbacks)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAecAAAEzCAYAAAALosttAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvnQurowAAIABJREFUeJzt3Xl8VeWB//HPc5fkZt9DIAFCCIRdQARcgOCK2Iraxdraqq067XSxv844Y5fpYjsz7djf2E5/tmoXrY7aUrtoldaiJYIbCMi+EwiEJRtk35Pn98e5WUAgAe7NPSHf9+t1XmfNuc99Xnq/POc85znGWouIiIi4hyfSBRAREZETKZxFRERcRuEsIiLiMgpnERERl1E4i4iIuIzCWURExGX6DGdjzK+MMeXGmC2n2W+MMf9jjNljjNlkjJkZ+mKKiIgMHf1pOT8JLDrD/uuBccHpXuBn518sERGRoavPcLbWrgSOneGQJcBT1vEOkGyMGR6qAoqIiAw1objnnA0c7LVeGtwmIiIi58A3kB9mjLkX59I3gUDg4lGjRg3kx/cprqGEDm+AQ2TS0GoZlTh4+8t1dnbi8Qze8ruZ6ja8VL/ho7oNr77qd9euXZXW2oz+nCsU4XwIGNlrPSe47X2stY8DjwMUFBTYnTt3huDjQ+jZW6H6AL+e/hzfenErK792FcMSA5Eu1TkpKiqisLAw0sW4IKluw0v1Gz6q2/Dqq36NMSX9PVco/gn1IvCpYK/tuUCNtfZICM478DInQuUuxqVFAbCnvD7CBRIRkaGoz5azMeY5oBBIN8aUAt8C/ADW2keBZcBiYA/QCNwVrsKGXeYk6GxnvL8cgL0V9Vyenx7hQomIyFDTZzhba2/rY78FPh+yEkVS5kQA0hr2Eh8dr5aziIhExIB2CHO9tHFgvJiK7YzNXKhwFhHppa2tjdLSUpqbmyNdFFdKSkpi+/btBAIBcnJy8Pv953wuhXNv/gCkjYXy7eRnfJA39lREukQiIq5RWlpKQkICubm5GGMiXRzXqaurIz4+nqqqKkpLSxkzZsw5n0t96k+WOQnKtpKfGU9ZbQu1zW2RLpGIiCs0NzeTlpamYD4DYwxpaWnnfXVB4XyyzElwfD/jU5z/+Pbq0raISDcFc99CUUcK55NlTgQsBT7nabC9FQ2RLY+IiHSLj4+PdBEGhML5ZJmTABjeso8or0edwkREZMApnE+WOga80XgrtpObHqtwFhFxIWst999/P1OmTGHq1Kn89re/BeDIkSPMnz+f6dOnM2XKFFatWkVHRwd33nln97EPP/xwhEvfN/XWPpnHCxkFUL6dsRm3sONoXaRLJCIiJ/nDH/7Ahg0b2LhxI5WVlVxyySXMnz+fZ599luuuu46vf/3rdHR00NjYyIYNGzh06BBbtmwBoLq6OsKl75vC+VQyJ8G+leRPjedv28poae8g2ueNdKlERFzjO3/eyrbDtSE956QRiXzrg5P7dewbb7zBbbfdhtfrZdiwYSxYsIB3332XSy65hE9/+tO0tbVx0003MX36dPLy8iguLuaLX/wiN9xwA9dee21Iyx0Ouqx9KsMmQd1hJiZ30NFpKalqjHSJRESkH+bPn8/KlSvJzs7mzjvv5KmnniIlJYWNGzdSWFjIo48+yt133x3pYvZJLedTCXYKm+ApBZwXYIwflhDJEomIuEp/W7jhMm/ePB577DHuuOMOjh07xsqVK3nooYcoKSkhJyeHe+65h5aWFtavX8/ixYuJioriQx/6EAUFBdx+++0RLXt/KJxPJTjGdk7bfiBbzzqLiLjMzTffzNtvv81FF12EMYb/+q//Iisri1//+tc89NBD+P1+4uPjeeqppzh06BB33XUXnZ2dAPznf/5nhEvfN4XzqSRmQ3QiUcd2kp2cz54KhbOIiBvU1zu/x8YYHnroIR566KET9t9xxx3ccccd7/u79evXD0j5QkX3nE/FGKf1XL6d/Ey9nUpERAaWwvl0MidC+TbGpsdRXNFAZ6eNdIlERGSIUDifTuYkaDrOlKRmmto6OFzTFOkSiYjIEKFwPp1gj+1J3oMAurQtIiIDRuF8OsEe26PaSwCFs4iIDByF8+nEpUNcJrHVu0iLi9IwniIiMmAUzmcS7BR28egU3t1/LNKlERGRIULhfCaZk6BiB3PGpFBS1cgRdQoTERlUzvT+5/379zNlypQBLE3/KZzPJHMitDUyL6MBgNXFaj2LiEj4KZzPJNhje6w9SGLAxzvFVREukIjI0PbAAw/wyCOPdK9/+9vf5nvf+x5XXXUVM2fOZOrUqbzwwgtnfd7m5mbuuusupk6dyowZM1ixYgUAW7duZfbs2UyfPp1p06axe/duGhoauOGGG7jooouYMmVK97ukQ0nDd55J5gQAvBXbmD1mIav3qeUsIgLAXx6Ao5tDe86sqXD99894yK233sqXv/xlPv/5zwOwdOlSXnnlFb70pS+RmJhIZWUlc+fO5cYbb8QY0++PfuSRRzDGsHnzZnbs2MG1117Lrl27ePTRR7nvvvv4xCc+QWtrKx0dHSxbtowRI0bw8ssvA1BTU3Pu3/k01HI+k+gESB4F5duZm5fKvsoGjtY0R7pUIiJD1owZMygvL+fw4cNs3LiRlJQUsrKy+NrXvsa0adO4+uqrOXToEGVlZWd13jfeeKP7bVUTJkxg9OjR7Nq1i0svvZT/+I//4Ac/+AElJSXExMQwdepUli9fzr/+67+yatUqkpKSQv491XLuS+YkJ5wvTQNg9b4qlkzPjnChREQirI8Wbjh95CMf4fnnn+fo0aPceuutPPPMM1RUVLBu3Tr8fj+5ubk0N4emIfXxj3+cOXPm8PLLL7N48WIee+wxrrzyStavX8+yZcv4xje+wVVXXcU3v/nNkHxeF7Wc+5I5ESp3MTEzQELAxzvqFCYiElG33norv/nNb3j++ef5yEc+Qk1NDZmZmfj9flasWEFJSclZn3PevHk888wzAOzatYsDBw5QUFBAcXExeXl5fOlLX2LJkiVs2rSJw4cPExsby+233879998fljdeqeXcl8xJ0NmO93gxs3NTWa1OYSIiETV58mTq6urIzs5m+PDhfOITn+CDH/wgU6dOZdasWUyYMOGsz/mP//iPfO5zn2Pq1Kn4fD6efPJJoqOjWbp0KU8//TR+v7/78vm7777L/fffj8fjwe/387Of/Szk31Hh3JfgMJ6Ub2NO3nRe21FOeW0zmYmByJZLRGQI27y5pzNaeno6b7/99imP63r/86nk5uayZcsWAAKBAE888cT7jnnggQd44IEHTth23XXXcd11151LsftNl7X7kj4ejBfKtjI3z7nv/I56bYuISBgpnPvii4YR02HPa0wankh8tE+XtkVEBpHNmzczffr0E6Y5c+ZEulhnpMva/TH5ZvjbN/DV7OeS3BQNRiIiMohMnTqVDRs2RLoYZ0Ut5/6YtMSZb/0jc/LS2FvRQEVdS2TLJCISAdbaSBfB9UJRRwrn/kgeBTmXwNY/dt93Xr1PrWcRGVoCgQBVVVUK6DOw1lJVVUUgcH6dhnVZu78m3wKvfJUp0eXERXlZXXyMD0wbEelSiYgMmJycHEpLS6moqIh0UVypubmZQCBAIBAgJyfnvM6lcO6vSUvgla/i2/ECs3IX6L6ziAw5fr+fMWPGRLoYrlVUVMSMGTNCci5d1u6vpGwYORe2OJe2d5fXU1mv+84iIhJ6CuezMflmKN/KglTnOec1et5ZRETCQOF8NiYtAQwFVa8RG+XVpW0REQkLhfPZSBwOoy/Du+1PXDw6hdV6CYaIiISBwvlsTb4ZKrazeFg1O8vqONbQGukSiYjIBUbhfLYm3ggYFrS/AcAaPe8sIiIhpnA+WwnDIPcKhh/8KzF+j97vLCIiIadwPheTb8ZU7WLJiBp1ChMRkZBTOJ+LiTeC8fCh6DXsOFrHcd13FhGREFI4n4v4DMidx9Tq1wDLaj3vLCIiIdSvcDbGLDLG7DTG7DHGPHCK/aOMMSuMMe8ZYzYZYxaHvqguM/lmArX7me4/qJdgiIhISPUZzsYYL/AIcD0wCbjNGDPppMO+ASy11s4APgb8NNQFdZ2JN4LxclfSe+oUJiIiIdWflvNsYI+1ttha2wr8Blhy0jEWSAwuJwGHQ1dEl4pLg7wFLGh/kx1Ha6hu1H1nEREJjf68lSobONhrvRSYc9Ix3wb+Zoz5IhAHXH2qExlj7gXuBcjIyKCoqOgsi+suWb5JTGj+O5PYz6/+vJKZw9zzkq/6+vpBX79upboNL9Vv+KhuwyuU9RuqNLkNeNJa+3+NMZcCTxtjplhrO3sfZK19HHgcoKCgwBYWFobo4yOkcRr2h49yk/8djsZdSWHhyVf7I6eoqIhBX78upboNL9Vv+KhuwyuU9dufy9qHgJG91nOC23r7DLAUwFr7NhAA0kNRQFeLTcXkFbLEv4Z39lZGujQiInKB6E84vwuMM8aMMcZE4XT4evGkYw4AVwEYYybihHNFKAvqWpNvIbOjDF/ZBmqa2iJdGhERuQD0Gc7W2nbgC8ArwHacXtlbjTEPGmNuDB72T8A9xpiNwHPAndZaG65Cu8qExXR6/Cz2vMMbu9V6FhGR89eve87W2mXAspO2fbPX8jbg8tAWbZCISYGxC1myZw33vr6HxVOzMMZEulQiIjKIaYSwEPBMvoUsW0HM4dUU7RwaV/NFRCR8FM6hMPED2IQR/DDwCx5bvpGhckVfRETCQ+EcCtEJmA//kmxbxm3l/82qXWo9i4jIuVM4h8roy+gs/CpLvG+x5eVH1HoWEZFzpnAOId/8f+JI2lzuqvkpG9a9FeniiIjIIKVwDiWPl9RPPkGjiSX9r5+F1oZIl0hERAYhhXOIRSePYO3M75PddpDy394X6eKIiMggpHAOgwXXf5QnvB8ic+/vYONvI10cEREZZBTOYRDwezGFD7C6cwIdf/4yVO6JdJFERGQQUTiHyW1z8/iO/ys0dvrgd3dCW3OkiyQiIoOEwjlMYqK83LzgEr7U/A9Qthn+9vVIF0lERAYJhXMYfWLuKDbGzGFZwkfg3V/AthciXSQRERkEFM5hFBvl4555edxX8UEaMmbAC1+A3csjXSwREXE5hXOYffLS0cTFxvDN6PsheTQ882F47UHoaI900URExKUUzmEWH+20nn+/B7Zc/3uY+SlY9X/h6Zug7mikiyciIi6kcB4An7p0NIkBHz96/SDc+BO46VE4tA4enQf7Vka6eCIi4jIK5wGQEPBz97w8Xt1extK1B2H6bXDP3yEmGZ5aAq8/BJ2dkS6miIi4hMJ5gHx2wVjmjUvngd9v4pWtRyFzItyzAqZ8CFZ8z7kX3VAV6WKKiIgLKJwHSJTPw6O3X8y0nGS++Nx7vL23CqLj4Zafwwcehv2r4LF5UKK3WYmIDHUK5wEUF+3jiTsvYXRqLPc8tZYth2rAGJj1afjMcvD64Ynr4bmPQ9nWSBdXREQiROE8wFLionj6M3NIivFzx6/WUFxR7+wYMR0++wYs/AbsfwN+djk8/2mNyy0iMgQpnCMgKynA05+ZDcAnf7mGIzVNzo7oBFhwP9y3Aa74P7DzL/DIbHjh81B9IIIlFhGRgaRwjpC8jHh+/enZ1DS18alfruF4Q2vPzthUuPpbcN8mmPMPsOl38D8z4eV/1rPRIiJDgMI5gqZkJ/HzT82i5Fgjdz35Lg0tJ40aFp8Bi/4TvvQezLgd1j0BP77ICenyHZEptIiIhJ3COcIuHZvG/7ttBptKq/ns/66jpb3j/QclZcMHfwRfeNd59Gr9r+Gnc+DJD8DWP0JH28AXXEREwkbh7ALXTs7i+x+axqrdlXz+mfXUNJ4mbFPz4Kafwle2w9XfgeoS513RD0+Gv/871Bwa0HKLiEh4KJxd4qOzRvLgkskU7azguh+t5I3dlac/OC4drvgyfGkDfPx3MHw6rHwIfjQVfvMJ2LsCrEYcExEZrHyRLoD0+NSlucwYmcL/WbqB23+5mjsvy+VfF00gJsp76j/weGH8tc50fD+sfQLWPwU7XmJudDq03w7TPgrDJg/o9xARkfOjlrPLTM1J4qUvXsFdl+fy5Fv7+cBPVrGptLrvP0zJhWu+41zy/tAvaYgbDW/9BH52Gfz0MnjjYagpDXv5RUTk/CmcXSjg9/KtD07mfz8zh8bWDm756Vv8z2u7ae/ox6VqfwCmfpjN074J/7QTrn8IomLh1W/Dw1PgiRtg3ZPQdDzcX0NERM6RwtnFrhiXzl/vm88N04bz38t38eFH32ZfZUP/TxCfAXPuhbtfdR7HWvg1qC+DP98HPxwPz37MeYa6pS58X0JERM6awtnlkmL9/PhjM/jJbTPYV9nA4h+v4mdFe2lsbe/7j3tLzYMF/+I8jnXPCph9LxzdBH+4Gx7Kh99+Erb+CVobw/NFRESk39QhbJD44EUjuCQ3la//cTM/+OsOfvlGMf8wfyy3zx19+g5jp2IMZM90pmu+CwdXw9Y/OMG8/UXwx0HB9TDlFsi/GnzR4ftSIiJySgrnQSQrKcAv77yEdSXH+NGru/n3Zdt5bGUxn12Qx+1zRxPwn0VIA3g8MPpSZ1r0feeFG1v/ANtehC3PQ3SiE9STboKxVzr3s0VEJOwUzoPQxaNTefozc1i7/xgPv7qL773cFdJj+cScUWcf0uA8lpW3wJkW/xCKX4dtf4TtL8Gm30JUAhQscoI6/yrwx4T+i4mICKBwHtRm5abyzN1zWbPvGA8v38V3X9rGY6/v5XOFYxnRbs/9xF4/jLvamT7wI9j3unPZe8dLsPl3EBUP468LBvXVTm9wEREJGYXzBWD2mFSeu3cu7xRX8fDyXXznz9uI8cGtjVu5fe4o8jMTzv3kXr8TwPlXwwcehv2rgven/wxbfg++GKclPeEDTmDHpobui4mIDFEK5wvI3Lw0fvsPl7J2/zF++MIanlldwpNv7WduXiqfnJvLtZOH4feeRwd9r9+59zz2Srjhv52g3vFycHoJjBdGX+YE9YTFkDwqdF9ORGQIUThfgGblpvLZiwJMmXUpS9ce5NnVB/j8s+vJSIjmY5eM5LbZoxiRfJ73jL0+GLvQmRY/BIff6wnqv/6rM2VN6wnqYVOcnuIiItInhfMFLD0+mn8szOcf5o9l5a4Knn6nhP+3Yg+PrNjDlROG8eGLs1k4IZNo3zl0IOut9+NZV/0bVO3tCeqi/4Si/4CkkU7P74LrYfQV4IsKzZcUEbkAKZyHAK/HsHBCJgsnZHLwWCPPrTnA0rWlvLq9jMSAjxumjeCWmdnMGp2CCUXrNm0sXP4lZ6ovh12vwM6/wPqnYc3jTs/v/KugYDGMu0b3qUVETqJwHmJGpsbyL4sm8JVrxvPm3ir+uL6UP713iOfWHCAnJYabZ2Rz04xsxmbEh+YD4zNh5iedqa3JeURr5zLY9VfY9ifnPvWouTB+kdOhLH28Ln+LyJCncB6ifF4PC8ZnsGB8Bg0t7byy9Sh/fO8Qj6zYw0/+voeLcpJYMj2bxVOHk5UUosFH/DHOs9IFi6Cz07lPvXOZ06pe/m/OlJLrBPW4ayH3Co1QJiJDksJZiIv2ccvMHG6ZmUNZbTMvbjjMH947xIMvbePBl7Zx8egUFk8dzvVTss6/I1kXjwdyLnamq/4Nqg/C7ldg19+ct2atftQZSnTsQqdFPe5aSMgKzWeLiLicwllOMCwxwD3z87hnfh57K+pZtukIy7Yc5bsvbeO7L21jxqhkbpg6nEVTsshJCeHgI8kj4ZK7nam1EfatDIb1K85jWgDDpweD+joYMcMJeBGRC1C/wtkYswj4MeAFfmGt/f4pjvko8G3AAhuttR8PYTklAsZmxPPFq8bxxavGUVxRz1+2HGXZ5iN87+XtfO/l7Vw0Mpnrp2RxzaRhobtHDc6IY12Xv62Fsi1OSO/+G6x8CF7/AcRlQP41MP5a57nrQFLoPl9EJML6DGdjjBd4BLgGKAXeNca8aK3d1uuYccBXgcuttceNMZnhKrBERl5GPJ9fmM/nF+ZTUtXAss1OUH//Lzv4/l92MDYjjmsnO0E9PScZjydEnbqMgaypzjT/n6GhCva+5nQo2/kybHwWPD4Ydalz6XvctZBRoE5lIjKo9aflPBvYY60tBjDG/AZYAmzrdcw9wCPW2uMA1tryUBdU3GN0WhyfKxzL5wrHcqi6iVe3lbF8Wxk/X1nMz4r2kpEQzTWThnHNpGFcNjbt/J+j7i0uDaZ91Jk62qF0TU+ruqtTWdJI5xGtcdfCmPkQFRe6zxcRGQD9Ceds4GCv9VJgzknHjAcwxryJc+n729bav4akhOJq2ckx3HFZLndclktNYxsrdpazfFsZL7x3iGdXHyAuysuCggyumjCMhRMySY0L4eAjXp8zXOjoy+Ca70BNKexeDntehU1LYe2vwBsFoy/vCeu0fLWqRcT1jLVnfnuRMebDwCJr7d3B9U8Cc6y1X+h1zEtAG/BRIAdYCUy11lafdK57gXsBMjIyLl66dGkIv4r0Vl9fT3x8CO8Dn6XWDsuOYx2sL+tgQ0UH1S0WA4xL8TA9w8v0TB/D40xoBj05BdPZRlLNdtKq1pF6bB1xjc6/L5sCmRxPmcGx1OlUJ0+j3X/2dRTpur3QqX7DR3UbXn3V78KFC9dZa2f151z9aTkfAkb2Ws8JbuutFFhtrW0D9hljdgHjgHd7H2StfRx4HKCgoMAWFhb2p4xyDoqKioh0/V4bnHd2WrYcruHV7eW8uq2MpbtqWbqrjdy0WK6emMlVE4cxKzfl/F7KcUrX9CxWH4Ddy4nZ+3di9q1kxJFXwHgge1bPyzyyL3Za431wQ91eyFS/4aO6Da9Q1m9/wvldYJwxZgxOKH8MOLkn9p+A24AnjDHpOJe5i0NSQhn0PB7DtJxkpuUk85VrxnO4uonXdjhB/dTbJfzijX0kRPu4Ylw6CwsyWVCQwbDEEA180iV5FFzyGWfqaIND62DPa7D377Dyv+D170N0EuTNh7xCGFPoDEOqS+AiEgF9hrO1tt0Y8wXgFZz7yb+y1m41xjwIrLXWvhjcd60xZhvQAdxvra0KZ8Fl8BqRHMMn547mk3NH09DSzqrdlby+q5wVOyr4y5ajAEwansjCCRksLMhk+shkfKFsVXv9zpCho+bClV+HxmOw73UnqPeucN5VDZCYHQzqBZC3QIOgiMiA6ddzztbaZcCyk7Z9s9eyBb4SnET6LS7ax6IpWSyakoW1lp1ldazYUcGKneU8+noxj6zYS2LAx/zgUKMLCjLITAhxqzo2FSbf7EzWwrFiKC5yAnvnMtjwjHNcxgQYs4C0xjRomg4xyaEth4hIkEYIE9cwxjAhK5EJWYl8rnAsNU1tvLmnkhU7ynl9VwUvbToCOK3qwgInrGeODvG9amOcy9lpY51L4J2dcHSTE9TFRbD+Kaa2N8HW7zvvqx4z35lGXQrR6mgjIqGhcBbXSorxs3jqcBZPHY61lm1Hanl9VwVFOyt4bGUxPy3aS0K0j8vz01kQDOuQjf3dxeOBEdOd6fL7oL2F9176BTOSamH/KnjnZ/DW/zgDoYyYGQzreZAz2xnpTETkHCicZVAwxjB5RBKTRyTxj4X51Da38daeyu6w/utW5151fmY888alM39cBnPyUomNCvF/4r5oapInQ2Eh8FVnHPCDq52xwPevgjcehlU/BI/fCfRRlwanuXpvtYj0m8JZBqXEgJ9FU4azaIrTqt5VVs+q3RWs3F3Js6sP8MSb+4nyepiVm8L88RnMG5fOxKzE0A0r2iUq1nlz1tiFznpzLRx4B0redOarH3Va1uDcs+4K69GXOiOZqTe4iJyCwlkGPWMMBVkJFGQlcPe8PJrbOnh3/zFW7qpg1e7K4PjfkB4fzeX5aVw+Np3L8tNC+1atLoFE52Uc44NPebc1Oe+tLnnLCestv4d1Tzj7EkbAyNkwco4zZU0FXwhHUBORQUvhLBecgN/LvHEZzBuXAUBZbTOrdleyclcFb+6p4oUNhwEYnRbLZWPTuTw/jcvGpod2aNEu/pieIUYBOjugfBuUvO2MC35wNWz7k7PPF3DuW3cH9myISw99mUTE9RTOcsEblhjgwxfn8OGLc7ovgb+5p5K39lby542HeW7NAcDpBX55fhqXjk3jktxUEgL+0BfG4+15y9ace51ttUeCQb3GaV2//Qi8+SNnX+pYJ6hHBVvX6QV6j7XIEKBwliGl9yXwT18xhvaOTjYdquGtPZW8uaeKX79Vws9X7cNjYGp2EnPHpnFpnhPWcdFh+t8lcThMWuJM4FwKP7LRCeqDa5w3bm181tkXnQQjL+m5FJ59sR7hErkAKZxlSPN5PcwclcLMUSl84cpxNLd1sL7kOG8XV/FOcRW/emMfj71ejM9jmJaTxNy8NGLrO7ikpT18Ye2P6RnBDHoGRjm4Bg4GA3vFfwAWMM77q0fMhBEzIHsmDJsC/hAP1CIiA0rhLNJLwO/lsvx0Lst37vU2trazruQ4b++t4u3iKh5bWUxHp+Xh9X9jSnYSc8akMjs3lVm5KSTHhqkzV++BUabf5mxrqobStXBordPhbM+rPa1rjw8yJzlBPWKGM2VMVGczkUFE4SxyBrFRvhM6l9W3tPPkn4toThjJmn3HePLN/Ty+0nnHy4SsBGaPSWX2mFQuyU0N/cs7eotJhnFXOxM4revaQ05QH1oPh9fD1j/Cuied/d4oGDYZhgcHVBk+3QlwBbaIKymcRc5CfLSPKek+CgsLAGhu62DjwWrW7DvGmv3HeH5dKU+9XQJATkoMs0ancPHoFC4enUpBVgLeUD9n3cUYSMpxpokfdLZ1XQ4/sgEOb3DmW//Q8yiXxw/DJjlBPXwaZF3krEfFhaeMItJvCmeR8xDwe5mTl8acvDQA2jo62Xq4lrX7j7Gu5Dhv7q3iT8FHt+KjfcwYlRwM6xQuGplMYjh6hHfpfTl8yoecbdbC8X09YX14A2x7Adb/uuuPIC3f6U0+fFqwZ/k0iM8MXzlF5H0UziIh5Pd6mD4ymekjk7l7HlhrKT3exNoSJ6zX7j/Oj1/bjbVOdo7LjGf6yGRmjEph+shkxg8LY+uaG0LNAAAWVElEQVQanA9NzXOmKbc426yFmlLnBR9HNzvTobVOK7tL/DCno1nWFBg21ZmnjQOvfkJEwkH/Z4mEkTGGkamxjEyN5eYZOQDUNrex4UA17x2oZsPB4yzfVsbStaUAxEV5mZaTzPRRTsBflJNMVlKYe14bA8kjnWnCDT3bm47D0S09gV222XnRR0ers98bDZkTnLAeNtmZ0sc7773WsKQi50XhLDLAEgN+5o/PYP54p5OZtZaSqkY2HKzmvQPH2XCwmp+vLKa90wKQmRDNtJxkpuUkBafk8IxmdrKYFOcNW2Pm9WzraIPKXU5olwWn3X+DDf/bc0x0IqSPc4K695Q6BrxhvIwvcgFROItEmDGG3PQ4ctPjuGlGNuB0NNt6uJbNpdVsKq1hY2k1r+0owzp5TU5KDBflJDM1J4kpI5KYPCKRlIEIbK+/p5XMrT3b68udYUkrdzvhXbkLil+Hjc/1HOPxQcqY4H3w/F7zfEgYrta2SC8KZxEXCvi93R3HutQ1t7HlUC2begX2y5uPdO/PTo5h8ohEJo9IYkp2IlOyk8hMiMYMROjFZzpTXuGJ21vqgoG9Gyp3QtUeqNrrBHd7U89x/jhIy4O0fHLr/ZBa4bS+0/I1ApoMSQpnkUEiIeDn0rHO2N9djje0su1ILVsO1bD1cC1bDtewfHtPCzs9PppJIxKZmJXAxOGJTByeSF5GHH7vAI3PHZ3gDIaSPfPE7Z2dUHc4GNZ7oDI4P/weo48fgJKlPccmZp94mTwt3+nQlpTjjFUucgFSOIsMYilxUVyen87l+T1vr2poaWd7MLC3HK5l+5FannizitaOTgCivB7yM+ODYe2EdkFWAunx0QNXcI+n57nsvMITdq38+3IWTMnpuTxeuceZb3gOWut6ncMPKbk9vc9Tx/QsJ43UACsyqCmcRS4wcdE+ZuWmMis3tXtbW0cnxRUNbD9Sy/ajtWw/UsfK3RX8fn1p9zFpcVGMH5bA+GHxjM9KoGBYAuOGJZAUM7CduKzHD5kTnemEHRbqjkLVbji2zxlg5Vixs7z/DWhr6DnWeJz3ZSePOvWUlKPOaeJqCmeRIcDv9XS/jesmsru3V9a3sONIHTvL6thd5syfX1dKQ2tH9zFZiQHGZyWQnxFPfmbPNCA9xnszxnmDV+JwGDP/xH3WOp3Sju/rCeyag3C8xAnuusNgO3udy+N0Qksa2dOCTx4ZXA9uCyQO7PcT6UXhLDKEpcdHc8W4aK4Y13NZ3FrL4Zpmdh11wnrX0Tp2ldfx3L5jNLX1hHZqXBT5GfGM7RXYY9LiyE6JCe9AKqdiDCQMc6aut3n11t7qjD1efSA4lTgDr9SUOgOubHsBOttO/JvopGBwZzvzxOyeIE/MdiZdOpcwUTiLyAmMMWQnx5CdHMPCCT3DdnZ2Wg7XNLGnvP6E6S9bjlDd2BNsfq9hVGosY9LjyE1zHhHLCz4qlpUYwDPQwQ1OiKaOcaZT6exwWt41pU6Lu+YgVB90Ar2m1HkDWNOxk/7IOD3UE7IgPsuZd0291+MyNZKanDX9FyMi/eLxGHJSYslJiaWwoCe0rbVUNbRSXNHA/soGiiud+f6qBlbtrqSlvedycrTPw+i0WEanxZEbnI9OiyU3LY4RyRFocXfxeHsumY+85NTHtDZC7WEnuLtCu6YU6sucy+aH34OGCpz3bPdmIC49GNjDTjMPTr4B7JQnrqZwFpHzYowhPT6a9PhoZo9JPWFfZ6flaG1zd2iXVDVQUtVISVUjK3dVnBDcfq9hZEossTSz/Pjm4D8EYhiZ6szT4qIG5pnt04mKhfR8ZzqdjjanBV5/1Om81jXVH4W6MifIy7Y6x9iO9/99TKpzL7x3K7xrPSbVGbUtJhkCyeCP0cAtFzCFs4iEjcdjGJEcw4jkGC7r9bgXOMFdXtfC/iontPdXNVJS1cD2kiaWbT7C8cYT7wHH+L3kpMSQkxJDdopzzuzk4HpyLBkJ0ZFreXfx+oP3qLPPfFxnBzRWBYO7rFeQH+kJ84odzvKpQhycsc1jkp3ADgTnXeHdvXziuq+tnu63roirKZxFJCI8HkNWUoCspABz83oGVikqKqKwsJD6lnYOHW/i4LFGSo83cvB4E6XHGyk93sR7B6tPuM8NTss7KylAdvAfAyOSYhieHOieD0+KITHgi2zru4vH2zOq2pl0dkJjpRPSTcecl5E0VUNzdc9y03FnvbbUaZU3HT/xefBergB4J9p5y1jX58dn9qzHZTqX4HuHuy61R4TCWURcKT7a1/3416nUt7RzuLqJQ8ebKK1u6l4+VN3E23urKKttpvOk279xUV6GJ8cwPCnA8KQAwxJ7T9EMSwyQFheFb6BGUOuLx9O/ED9ZR1tPcHeFd9Nx9mxaTX5WQvDSe5nT6a107WnulQf5Y9/fMg8kOVN0ovPI2fvmwf2BRIX7OVI4i8igFB/tCw6acurwbu/opLyuhSM1TRyubj5hfqSmmZ1H66isb3lfgHuM84jZsMQAmQnRZHbPoxmWEHDmbgvxk3n9EJ/hTL2UHs8iv7Dw/cd3tDuX2evLnHlXqHdPvYK+ai8010BLLbTW910WX+CkMO8V3N3LSc4/AE5YD27zh/mVqS6lcBaRC5LP6+m+333x6FMf097RSVVDK2W1zZTVtlBW20x5cPlobTOHa5rZWFpNVUNr93jlXYyBtLho0uOjgh3iokgLdoxLi48iIzhPj48mNS6KgN/F44B7fT3PiZ+Nzg4npJtrnXlLnbPcFd7N1T3r3VO185x5U7WzfvLz5e8rW7QT1N0t9uQT16MTnTHcA0nOvHtK7JkPwkfZBl+JRURCxOf1dF/WPpO2jk4q61sor22hvC4Y4nUtVNQ1U1HXSlVDCyUHGqisaz1hoJbe4qK8pAWDOi0uirT4KFKD4Z4aF0VKXBSpsc5yalwUsVFed9wfPxOPt+dy97mwFtqbewV3bU+Adwd7MMS7wryxEo7t7Vk/XYe53vyxJwZ2oCu4ewV6VKzzdrSoWIiK61n2B9ej4iAq3jl2AF64onAWEemD3+theFIMw5Ni+jy2sbWdyrpWKhtaqKxroaqhlWMNrVTWt3AsuHy4ppkth2s41tBKW8ep7/VG+TykxgZDO85PcmwUKbF+kmOiSI511pNj/KTE+Unq2hbjd++l9lMxxnkkzB/jPC52tqyFtsaeFntLXbAF37Vc16tVX3vicXVHe9ZP04HutHwxzqtMo+KD8wSIjmdSdQPUPO9s7x3oXctn8xFnVyIRETmT2Cgfo9J8jEqL7fNYay21ze0cb2jlWGMrx+qdee/1442tVDW0cqS6luqmNqobW993n7y3xICPlLgoUoJhnhIM+JRYP+WlbdRsOERiwE9CwEdCwE9ijDOPGwwt9ZMZ0xN85xLuXTo7nfeLtzY6L1BpbXRCv7WhZ97a4Nxjb6l3wrylvtd6PdSXE19fAXv2BY+tO3E897OkcBYRiRBjDEkxfpJi/OTSv5ZVZ6elrqWdmsY2jje2dgd2dXD9eEMrx4PLFfUt7Cqrp7qxtftlJk9t23DK83oM3WGdGPA7U9dyTM96QnewO/vio53l+ICPaJ+L76uficfTq3Wb0efhp7Mm+BggELxk39IT6q318J0p/T6XwllEZBDxeHoCvT+t8y4t7R385dXXmTJzNnXNbdQ2t1PX3EZdczu1TcF5r/Xa5jb2VzZS29xGbVPbCW8qO50on4fEgC8Y2MEwj+5ppScEfCTGBNejfcQFp/hoH3HR3uDch38wXZo/HWOcnub+AMSl9X38SRTOIiJDQLTPS3LAQ35m/Dn9fXtHJ/Ut7dQEg7yuuZ36lp6Ar2/pCfe6XsFfUVdPbZOz3p+Ad8rq6Q7q+K4p4OvelhDwERflbIuN8gYnZzmma93vIybKS1y0lxj/4Ltkr3AWEZE++bwepxNa7Lm/JrOj01Lfq4Xe0OqEekNwqm/poP4U2+ua2ymva6a4wtle39JOc1v/7+caA7F+b3fYx0Z7iYvqabnHBcM9LvqkeZSX2OD+mCgv0T4v0T4P0X4P0T4vAb+HKK8nLMGvcBYRkQHh9RiSYv0kxfrP+1xdLfnG1g4aWztoau2gsbWdxrau5Q6aWp39Da0d3UHftVzf0k5ZbTONrR3Ut7TT1NpBQ2v7+55n749on4donwcfHaSsKyIu2mnFx0X5usM9Nurs4lbhLCIig05PSz5057TW0tzWSUNrO40tTlg3tjot+ua2DlraO2kJzrvX2ztpae+gpa2T4gOlJKQm0hj8R0BZXTONlc55Glr6d0m/+/uF7muJiIgMXsYYYoKXsDmHW/NFRRUUFs48/fkf7P+5LoAucSIiIhcWhbOIiIjLKJxFRERcRuEsIiLiMv0KZ2PMImPMTmPMHmPMA2c47kPGGGuMmRW6IoqIiAwtfYazMcYLPAJcD0wCbjPGTDrFcQnAfcDqUBdSRERkKOlPy3k2sMdaW2ytbQV+Ayw5xXHfBX4ANIewfCIiIkNOf8I5GzjYa700uK2bMWYmMNJa+3IIyyYiIjIknfcgJMYYD/DfwJ39OPZe4F6AjIwMioqKzvfj5TTq6+tVv2Giug0v1W/4qG7DK5T1259wPgSM7LWeE9zWJQGYAhQFB//OAl40xtxorV3b+0TW2seBxwEKCgps93svJeSKer9XVEJKdRteqt/wUd2GVyjrtz+Xtd8FxhljxhhjooCPAS927bTW1lhr0621udbaXOAd4H3BLCIiIv3TZzhba9uBLwCvANuBpdbarcaYB40xN4a7gCIiIkNNv+45W2uXActO2vbN0xxbeP7FEhERGbo0QpiIiIjLKJxFRERcRuEsIiLiMgpnERERl1E4i4iIuIzCWURExGUUziIiIi6jcBYREXEZhbOIiIjLKJxFRERcRuEsIiLiMgpnERERl1E4i4iIuIzCWURExGUUziIiIi6jcBYREXEZhbOIiIjLKJxFRERcRuEsIiLiMgpnERERl1E4i4iIuIzCWURExGUUziIiIi6jcBYREXEZhbOIiIjLKJxFRERcRuEsIiLiMgpnERERl1E4i4iIuIzCWURExGUUziIiIi6jcBYREXEZhbOIiIjLKJxFRERcRuEsIiLiMgpnERERl1E4i4iIuIzCWURExGUUziIiIi6jcBYREXEZhbOIiIjLKJxFRERcRuEsIiLiMgpnERERl1E4i4iIuIzCWURExGX6Fc7GmEXGmJ3GmD3GmAdOsf8rxphtxphNxpjXjDGjQ19UERGRoaHPcDbGeIFHgOuBScBtxphJJx32HjDLWjsNeB74r1AXVEREZKjoT8t5NrDHWltsrW0FfgMs6X2AtXaFtbYxuPoOkBPaYoqIiAwdvn4ckw0c7LVeCsw5w/GfAf5yqh3GmHuBewEyMjIoKirqXynlrNXX16t+w0R1G16q3/BR3YZXKOu3P+Hcb8aY24FZwIJT7bfWPg48DlBQUGALCwtD+fHSS1FREarf8FDdhpfqN3xUt+EVyvrtTzgfAkb2Ws8JbjuBMeZq4OvAAmttS0hKJyIiMgT1557zu8A4Y8wYY0wU8DHgxd4HGGNmAI8BN1pry0NfTBERkaGjz3C21rYDXwBeAbYDS621W40xDxpjbgwe9hAQD/zOGLPBGPPiaU4nIiIifejXPWdr7TJg2Unbvtlr+eoQl0tERGTI0ghhIiIiLqNwFhERcRmFs4iIiMsonEVERFxG4SwiIuIyCmcRERGXUTiLiIi4jMJZRETEZRTOIiIiLqNwFhERcRmFs4iIiMsonEVERFxG4SwiIuIyCmcRERGXUTiLiIi4jMJZRETEZRTOIiIiLqNwFhERcRmFs4iIiMsonEVERFxG4SwiIuIyCmcRERGXUTiLiIi4jMJZRETEZRTOIiIiLqNwFhERcRmFs4iIiMsonEVERFxG4SwiIuIyCmcRERGXUTiLiIi4jMJZRETEZRTOIiIiLqNwFhERcRmFs4iIiMsonEVERFxG4SwiIuIyCmcRERGXUTiLiIi4jMJZRETEZRTOIiIiLqNwFhERcRmFs4iIiMsonEVERFxG4SwiIuIy/QpnY8wiY8xOY8weY8wDp9gfbYz5bXD/amNMbqgLKiIiMlT0Gc7GGC/wCHA9MAm4zRgz6aTDPgMct9bmAw8DPwh1QUVERIaK/rScZwN7rLXF1tpW4DfAkpOOWQL8Orj8PHCVMcaErpgiIiJDR3/CORs42Gu9NLjtlMdYa9uBGiAtFAUUEREZanwD+WHGmHuBe4OrLcaYLQP5+UNMOlAZ6UJcoFS34aX6DR/VbXj1Vb+j+3ui/oTzIWBkr/Wc4LZTHVNqjPEBSUDVySey1j4OPA5gjFlrrZ3V34LK2VH9ho/qNrxUv+Gjug2vUNZvfy5rvwuMM8aMMcZEAR8DXjzpmBeBO4LLHwb+bq21oSigiIjIUNNny9la226M+QLwCuAFfmWt3WqMeRBYa619Efgl8LQxZg9wDCfARURE5Bz0656ztXYZsOykbd/stdwMfOQsP/vxszxezo7qN3xUt+Gl+g0f1W14hax+ja4+i4iIuIuG7xQREXGZiIRzX8OBSv8ZY35ljCnv/ViaMSbVGLPcGLM7OE+JZBkHM2PMSGPMCmPMNmPMVmPMfcHtquPzZIwJGGPWGGM2Buv2O8HtY4LDAO8JDgscFemyDlbGGK8x5j1jzEvBddVtiBhj9htjNhtjNhhj1ga3hex3YcDDuZ/DgUr/PQksOmnbA8Br1tpxwGvBdTk37cA/WWsnAXOBzwf/e1Udn78W4Epr7UXAdGCRMWYuzvC/DweHAz6OMzywnJv7gO291lW3obXQWju91+NTIftdiETLuT/DgUo/WWtX4vSQ7633cKq/Bm4a0EJdQKy1R6y164PLdTg/dNmojs+bddQHV/3ByQJX4gwDDKrbc2aMyQFuAH4RXDeobsMtZL8LkQjn/gwHKudnmLX2SHD5KDAskoW5UATftjYDWI3qOCSCl103AOXAcmAvUB0cBhj0+3A+fgT8C9AZXE9DdRtKFvibMWZdcPRLCOHvwoAO3ykDz1prjTHqkn+ejDHxwO+BL1tra3u/10V1fO6stR3AdGNMMvBHYEKEi3RBMMZ8ACi31q4zxhRGujwXqCustYeMMZnAcmPMjt47z/d3IRIt5/4MByrnp8wYMxwgOC+PcHkGNWOMHyeYn7HW/iG4WXUcQtbaamAFcCmQHBwGGPT7cK4uB240xuzHuXV4JfBjVLchY609FJyX4/zDcjYh/F2IRDj3ZzhQOT+9h1O9A3ghgmUZ1IL36X4JbLfW/nevXarj82SMyQi2mDHGxADX4NzTX4EzDDCobs+Jtfar1toca20uzm/s3621n0B1GxLGmDhjTELXMnAtsIUQ/i5EZBASY8xinPshXcOB/vuAF+ICYYx5DijEeRtKGfAt4E/AUmAUUAJ81Fp7cqcx6QdjzBXAKmAzPffuvoZz31l1fB6MMdNwOs14cRoKS621Dxpj8nBae6nAe8Dt1tqWyJV0cAte1v5na+0HVLehEazHPwZXfcCz1tp/N8akEaLfBY0QJiIi4jIaIUxERMRlFM4iIiIuo3AWERFxGYWziIiIyyicRUREXEbhLCIi4jIKZxEREZdROIuIiLjM/werOzXauusuhAAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 576x360 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "def plot_learning_curves(history):\n",
    "    pd.DataFrame(history.history).plot(figsize=(8, 5))\n",
    "    plt.grid(True)\n",
    "    plt.gca().set_ylim(0, 1)\n",
    "    plt.show()\n",
    "plot_learning_curves(history)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "model.evaluate(x_test_scaled, y_test)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.9"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
