{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "from keras.preprocessing import sequence\n",
    "from keras.models import Sequential\n",
    "from keras.datasets import boston_housing\n",
    "from keras.layers import Dense, Dropout\n",
    "from keras.utils import multi_gpu_model\n",
    "from keras import regularizers  # 正则化\n",
    "import matplotlib.pyplot as plt\n",
    "import numpy as np\n",
    "from sklearn.preprocessing import MinMaxScaler\n",
    "import pandas as pd"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 加载数据"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "        0     1      2    3      4      5      6       7     8      9     10  \\\n",
      "0  1.23247   0.0   8.14  0.0  0.538  6.142   91.7  3.9769   4.0  307.0  21.0   \n",
      "1  0.02177  82.5   2.03  0.0  0.415  7.610   15.7  6.2700   2.0  348.0  14.7   \n",
      "2  4.89822   0.0  18.10  0.0  0.631  4.970  100.0  1.3325  24.0  666.0  20.2   \n",
      "3  0.03961   0.0   5.19  0.0  0.515  6.037   34.5  5.9853   5.0  224.0  20.2   \n",
      "4  3.69311   0.0  18.10  0.0  0.713  6.376   88.4  2.5671  24.0  666.0  20.2   \n",
      "\n",
      "       11     12  \n",
      "0  396.90  18.72  \n",
      "1  395.38   3.11  \n",
      "2  375.52   3.26  \n",
      "3  396.90   8.01  \n",
      "4  391.43  14.65  \n",
      "-------------------\n",
      "      0\n",
      "0  15.2\n",
      "1  42.3\n",
      "2  50.0\n",
      "3  21.1\n",
      "4  17.7\n"
     ]
    }
   ],
   "source": [
    "(x_train, y_train), (x_valid, y_valid) = boston_housing.load_data()  # 加载数据\n",
    "\n",
    "# 转成DataFrame格式方便数据处理\n",
    "x_train_pd = pd.DataFrame(x_train)\n",
    "y_train_pd = pd.DataFrame(y_train)\n",
    "x_valid_pd = pd.DataFrame(x_valid)\n",
    "y_valid_pd = pd.DataFrame(y_valid)\n",
    "print(x_train_pd.head(5))\n",
    "print('-------------------')\n",
    "print(y_train_pd.head(5))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 数据归一化"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[[1.37816304e-02 0.00000000e+00 2.81524927e-01 ... 8.93617021e-01\n",
      "  1.00000000e+00 4.68818985e-01]\n",
      " [1.73654275e-04 8.25000000e-01 5.75513196e-02 ... 2.23404255e-01\n",
      "  9.96167230e-01 3.80794702e-02]\n",
      " [5.49837765e-02 0.00000000e+00 6.46627566e-01 ... 8.08510638e-01\n",
      "  9.46089061e-01 4.22185430e-02]\n",
      " ...\n",
      " [3.18534767e-04 3.50000000e-01 2.05278592e-01 ... 4.57446809e-01\n",
      "  9.12627969e-01 1.68322296e-01]\n",
      " [2.40852297e-02 0.00000000e+00 7.00879765e-01 ... 2.23404255e-01\n",
      "  6.59715568e-01 3.87969095e-01]\n",
      " [9.07048543e-05 6.00000000e-01 9.05425220e-02 ... 3.19148936e-01\n",
      "  9.49064501e-01 7.31236203e-02]]\n",
      "[[0.72190697 0.         0.63650075 ... 0.87804878 0.00698455 0.90222813]\n",
      " [0.00440139 0.         0.33144796 ... 0.58536585 0.99476158 0.47522448]\n",
      " [0.00167219 0.         0.14969834 ... 0.87804878 1.         0.26005986]\n",
      " ...\n",
      " [0.07273042 0.         0.69230769 ... 0.20731707 0.98041639 0.        ]\n",
      " [0.01378101 0.         0.18778281 ... 0.53658537 0.98603089 0.25906219]\n",
      " [0.11628215 0.         0.69230769 ... 0.20731707 0.57893889 0.26238776]]\n",
      "[[0.03603604]\n",
      " [0.2972973 ]\n",
      " [0.3018018 ]\n",
      " [0.48198198]\n",
      " [0.37387387]\n",
      " [0.42567568]\n",
      " [0.57657658]\n",
      " [0.38963964]\n",
      " [0.33558559]\n",
      " [0.3963964 ]\n",
      " [0.29279279]\n",
      " [0.20045045]\n",
      " [0.27477477]\n",
      " [1.        ]\n",
      " [0.34234234]\n",
      " [0.42117117]\n",
      " [0.41891892]\n",
      " [0.31981982]\n",
      " [0.30405405]\n",
      " [0.38513514]\n",
      " [0.14414414]\n",
      " [0.1036036 ]\n",
      " [0.32432432]\n",
      " [0.29054054]\n",
      " [0.34459459]\n",
      " [0.39189189]\n",
      " [0.49324324]\n",
      " [0.5518018 ]\n",
      " [0.08783784]\n",
      " [0.36936937]\n",
      " [0.35135135]\n",
      " [0.19144144]\n",
      " [0.61936937]\n",
      " [0.4009009 ]\n",
      " [0.32657658]\n",
      " [0.04054054]\n",
      " [0.22072072]\n",
      " [0.40990991]\n",
      " [0.32657658]\n",
      " [0.42567568]\n",
      " [0.61711712]\n",
      " [0.51351351]\n",
      " [0.19144144]\n",
      " [0.92567568]\n",
      " [0.60585586]\n",
      " [0.54054054]\n",
      " [0.51351351]\n",
      " [0.31981982]\n",
      " [0.32882883]\n",
      " [0.43693694]\n",
      " [0.67117117]\n",
      " [0.33108108]\n",
      " [0.09234234]\n",
      " [0.20045045]\n",
      " [0.65990991]\n",
      " [0.47297297]\n",
      " [0.03603604]\n",
      " [1.        ]\n",
      " [0.6036036 ]\n",
      " [0.36036036]\n",
      " [0.54504505]\n",
      " [0.16891892]\n",
      " [0.49324324]\n",
      " [0.35135135]\n",
      " [0.39414414]\n",
      " [0.36711712]\n",
      " [0.16666667]\n",
      " [0.3963964 ]\n",
      " [0.05630631]\n",
      " [0.        ]\n",
      " [0.36261261]\n",
      " [0.54054054]\n",
      " [0.31531532]\n",
      " [0.03153153]\n",
      " [0.46846847]\n",
      " [0.29954955]\n",
      " [0.34459459]\n",
      " [0.50675676]\n",
      " [0.67117117]\n",
      " [0.1036036 ]\n",
      " [0.42117117]\n",
      " [0.84459459]\n",
      " [0.27027027]\n",
      " [0.22072072]\n",
      " [0.23873874]\n",
      " [0.48423423]\n",
      " [0.35585586]\n",
      " [0.35810811]\n",
      " [0.37837838]\n",
      " [0.43693694]\n",
      " [0.24774775]\n",
      " [0.29279279]\n",
      " [0.36936937]\n",
      " [0.83783784]\n",
      " [0.66441441]\n",
      " [0.35810811]\n",
      " [0.68468468]\n",
      " [0.36711712]\n",
      " [0.41666667]\n",
      " [1.        ]\n",
      " [0.47522523]\n",
      " [0.43693694]]\n",
      "[[0.22666667]\n",
      " [0.82888889]\n",
      " [1.        ]\n",
      " [0.35777778]\n",
      " [0.28222222]\n",
      " [0.3       ]\n",
      " [0.14      ]\n",
      " [0.23555556]\n",
      " [0.23555556]\n",
      " [0.20888889]\n",
      " [0.15777778]\n",
      " [0.28666667]\n",
      " [0.40222222]\n",
      " [0.33111111]\n",
      " [0.23777778]\n",
      " [0.08444444]\n",
      " [1.        ]\n",
      " [0.38888889]\n",
      " [0.42444444]\n",
      " [0.5       ]\n",
      " [0.13111111]\n",
      " [0.57333333]\n",
      " [0.62      ]\n",
      " [0.42222222]\n",
      " [0.3       ]\n",
      " [0.18444444]\n",
      " [0.39777778]\n",
      " [0.66      ]\n",
      " [0.25777778]\n",
      " [0.27777778]\n",
      " [0.38444444]\n",
      " [0.24666667]\n",
      " [0.22      ]\n",
      " [0.40222222]\n",
      " [0.66444444]\n",
      " [0.44444444]\n",
      " [0.19777778]\n",
      " [0.18      ]\n",
      " [0.34222222]\n",
      " [0.33333333]\n",
      " [0.22666667]\n",
      " [0.43777778]\n",
      " [0.38222222]\n",
      " [0.26      ]\n",
      " [0.17111111]\n",
      " [0.23555556]\n",
      " [0.29777778]\n",
      " [0.35555556]\n",
      " [0.55777778]\n",
      " [0.22444444]\n",
      " [0.30444444]\n",
      " [0.10222222]\n",
      " [0.58888889]\n",
      " [0.44      ]\n",
      " [0.31333333]\n",
      " [0.37777778]\n",
      " [0.21111111]\n",
      " [0.13333333]\n",
      " [0.6       ]\n",
      " [0.54222222]\n",
      " [0.34      ]\n",
      " [0.43111111]\n",
      " [0.21333333]\n",
      " [0.32222222]\n",
      " [0.20222222]\n",
      " [0.20666667]\n",
      " [0.23555556]\n",
      " [0.12222222]\n",
      " [0.02888889]\n",
      " [0.31777778]\n",
      " [0.31777778]\n",
      " [0.18666667]\n",
      " [0.69777778]\n",
      " [0.28444444]\n",
      " [0.18888889]\n",
      " [0.25555556]\n",
      " [0.07333333]\n",
      " [0.20666667]\n",
      " [0.24444444]\n",
      " [0.18666667]\n",
      " [0.52444444]\n",
      " [0.85555556]\n",
      " [0.33777778]\n",
      " [0.37777778]\n",
      " [0.4       ]\n",
      " [0.34888889]\n",
      " [0.16666667]\n",
      " [0.96666667]\n",
      " [0.21333333]\n",
      " [0.18666667]\n",
      " [0.41555556]\n",
      " [1.        ]\n",
      " [0.37111111]\n",
      " [0.77333333]\n",
      " [0.74888889]\n",
      " [0.38222222]\n",
      " [0.66444444]\n",
      " [0.38888889]\n",
      " [0.58      ]\n",
      " [0.52666667]\n",
      " [0.91111111]\n",
      " [0.81555556]\n",
      " [0.35555556]\n",
      " [0.48      ]\n",
      " [0.22222222]\n",
      " [0.43111111]\n",
      " [0.18444444]\n",
      " [0.36      ]\n",
      " [0.14888889]\n",
      " [0.37111111]\n",
      " [0.32      ]\n",
      " [1.        ]\n",
      " [0.39555556]\n",
      " [0.32666667]\n",
      " [0.43777778]\n",
      " [0.69333333]\n",
      " [0.20444444]\n",
      " [0.30888889]\n",
      " [0.29555556]\n",
      " [0.34666667]\n",
      " [0.43555556]\n",
      " [0.29333333]\n",
      " [0.08222222]\n",
      " [0.86666667]\n",
      " [0.12      ]\n",
      " [0.18222222]\n",
      " [0.36      ]\n",
      " [0.71111111]\n",
      " [0.57111111]\n",
      " [0.39777778]\n",
      " [0.33333333]\n",
      " [0.31777778]\n",
      " [0.59333333]\n",
      " [0.6       ]\n",
      " [0.40222222]\n",
      " [0.30666667]\n",
      " [0.13111111]\n",
      " [1.        ]\n",
      " [0.32444444]\n",
      " [0.        ]\n",
      " [0.20888889]\n",
      " [0.32888889]\n",
      " [0.19555556]\n",
      " [0.32444444]\n",
      " [0.42      ]\n",
      " [0.43333333]\n",
      " [0.44444444]\n",
      " [0.33111111]\n",
      " [0.27111111]\n",
      " [0.43555556]\n",
      " [0.18888889]\n",
      " [0.48      ]\n",
      " [0.36444444]\n",
      " [0.15333333]\n",
      " [0.39111111]\n",
      " [0.32444444]\n",
      " [0.07777778]\n",
      " [0.41555556]\n",
      " [0.40222222]\n",
      " [0.38666667]\n",
      " [0.34444444]\n",
      " [0.41333333]\n",
      " [0.29777778]\n",
      " [0.67111111]\n",
      " [0.40222222]\n",
      " [0.50888889]\n",
      " [0.34666667]\n",
      " [0.41555556]\n",
      " [0.51111111]\n",
      " [0.19111111]\n",
      " [0.49111111]\n",
      " [0.41333333]\n",
      " [0.34666667]\n",
      " [0.29333333]\n",
      " [0.37111111]\n",
      " [0.26888889]\n",
      " [0.07555556]\n",
      " [0.45111111]\n",
      " [0.19555556]\n",
      " [0.38222222]\n",
      " [0.29777778]\n",
      " [0.34888889]\n",
      " [0.59111111]\n",
      " [0.56666667]\n",
      " [0.34      ]\n",
      " [0.08444444]\n",
      " [0.31555556]\n",
      " [0.32      ]\n",
      " [0.40222222]\n",
      " [0.4       ]\n",
      " [0.21777778]\n",
      " [0.97333333]\n",
      " [0.39111111]\n",
      " [0.63111111]\n",
      " [0.35777778]\n",
      " [0.19111111]\n",
      " [0.60444444]\n",
      " [0.18      ]\n",
      " [0.40888889]\n",
      " [0.30888889]\n",
      " [0.42      ]\n",
      " [0.15111111]\n",
      " [0.40666667]\n",
      " [0.39555556]\n",
      " [0.32444444]\n",
      " [0.26      ]\n",
      " [0.18666667]\n",
      " [0.38222222]\n",
      " [0.34222222]\n",
      " [0.37333333]\n",
      " [0.47555556]\n",
      " [0.22      ]\n",
      " [0.42444444]\n",
      " [0.41777778]\n",
      " [0.16222222]\n",
      " [0.53555556]\n",
      " [0.35555556]\n",
      " [0.32222222]\n",
      " [0.40666667]\n",
      " [0.41777778]\n",
      " [0.28444444]\n",
      " [0.14444444]\n",
      " [0.37111111]\n",
      " [0.33111111]\n",
      " [0.44444444]\n",
      " [0.63111111]\n",
      " [0.52222222]\n",
      " [0.36444444]\n",
      " [0.42888889]\n",
      " [0.5       ]\n",
      " [0.62444444]\n",
      " [0.24888889]\n",
      " [0.40666667]\n",
      " [0.96222222]\n",
      " [0.39777778]\n",
      " [0.39555556]\n",
      " [0.18      ]\n",
      " [0.17111111]\n",
      " [0.39111111]\n",
      " [0.22222222]\n",
      " [0.22888889]\n",
      " [0.12222222]\n",
      " [0.42222222]\n",
      " [0.3       ]\n",
      " [0.37111111]\n",
      " [0.32222222]\n",
      " [0.62666667]\n",
      " [0.40444444]\n",
      " [0.        ]\n",
      " [0.31333333]\n",
      " [0.17111111]\n",
      " [0.38444444]\n",
      " [0.11555556]\n",
      " [0.19777778]\n",
      " [0.25111111]\n",
      " [0.26666667]\n",
      " [0.33555556]\n",
      " [0.55333333]\n",
      " [0.27111111]\n",
      " [0.71777778]\n",
      " [0.89777778]\n",
      " [0.28444444]\n",
      " [0.40444444]\n",
      " [0.53333333]\n",
      " [0.37777778]\n",
      " [0.28888889]\n",
      " [0.27555556]\n",
      " [0.65777778]\n",
      " [0.33555556]\n",
      " [0.44444444]\n",
      " [0.23555556]\n",
      " [0.44      ]\n",
      " [0.51555556]\n",
      " [0.36      ]\n",
      " [0.36444444]\n",
      " [0.41777778]\n",
      " [0.57777778]\n",
      " [0.47111111]\n",
      " [0.27555556]\n",
      " [0.73111111]\n",
      " [0.27777778]\n",
      " [0.33333333]\n",
      " [0.07333333]\n",
      " [0.42      ]\n",
      " [0.07555556]\n",
      " [0.19555556]\n",
      " [0.04888889]\n",
      " [0.14888889]\n",
      " [0.26888889]\n",
      " [0.36888889]\n",
      " [1.        ]\n",
      " [0.24666667]\n",
      " [0.34222222]\n",
      " [0.34666667]\n",
      " [0.36444444]\n",
      " [0.34666667]\n",
      " [0.7       ]\n",
      " [0.07777778]\n",
      " [0.44      ]\n",
      " [0.12888889]\n",
      " [0.37555556]\n",
      " [0.27333333]\n",
      " [0.30888889]\n",
      " [0.69333333]\n",
      " [0.22      ]\n",
      " [0.29333333]\n",
      " [0.62888889]\n",
      " [0.37333333]\n",
      " [0.32666667]\n",
      " [0.59111111]\n",
      " [0.44      ]\n",
      " [0.32      ]\n",
      " [0.39555556]\n",
      " [0.05555556]\n",
      " [0.88444444]\n",
      " [0.26222222]\n",
      " [0.30444444]\n",
      " [1.        ]\n",
      " [1.        ]\n",
      " [0.32222222]\n",
      " [0.33555556]\n",
      " [1.        ]\n",
      " [0.27111111]\n",
      " [0.35111111]\n",
      " [0.31777778]\n",
      " [0.80666667]\n",
      " [0.34222222]\n",
      " [0.34444444]\n",
      " [0.19555556]\n",
      " [0.25555556]\n",
      " [0.42      ]\n",
      " [0.34666667]\n",
      " [0.58888889]\n",
      " [0.40666667]\n",
      " [0.26222222]\n",
      " [0.2       ]\n",
      " [0.64      ]\n",
      " [0.69111111]\n",
      " [0.17333333]\n",
      " [0.29555556]\n",
      " [0.30444444]\n",
      " [0.31333333]\n",
      " [0.53333333]\n",
      " [0.55777778]\n",
      " [1.        ]\n",
      " [1.        ]\n",
      " [0.37777778]\n",
      " [0.15333333]\n",
      " [0.72444444]\n",
      " [1.        ]\n",
      " [0.39333333]\n",
      " [0.35111111]\n",
      " [0.41111111]\n",
      " [0.50888889]\n",
      " [1.        ]\n",
      " [0.31777778]\n",
      " [0.42      ]\n",
      " [0.39111111]\n",
      " [0.22666667]\n",
      " [0.37111111]\n",
      " [0.31555556]\n",
      " [0.86222222]\n",
      " [0.34      ]\n",
      " [0.62666667]\n",
      " [0.33111111]\n",
      " [0.38888889]\n",
      " [0.61555556]\n",
      " [0.37777778]\n",
      " [0.26888889]\n",
      " [0.31111111]\n",
      " [0.22222222]\n",
      " [0.24666667]\n",
      " [0.44666667]\n",
      " [0.41555556]\n",
      " [0.52666667]\n",
      " [0.71555556]\n",
      " [0.39111111]\n",
      " [0.25333333]\n",
      " [0.44444444]\n",
      " [0.55111111]\n",
      " [0.38      ]\n",
      " [0.27555556]\n",
      " [0.29111111]\n",
      " [0.56222222]\n",
      " [0.27777778]\n",
      " [0.43777778]\n",
      " [0.16888889]\n",
      " [0.47777778]\n",
      " [0.52666667]\n",
      " [0.18444444]\n",
      " [0.12      ]\n",
      " [0.43111111]\n",
      " [0.4       ]\n",
      " [0.33333333]\n",
      " [0.28444444]\n",
      " [0.04444444]\n",
      " [0.15111111]\n",
      " [0.43111111]\n",
      " [0.19555556]\n",
      " [0.32      ]\n",
      " [0.44888889]\n",
      " [0.32      ]\n",
      " [0.32      ]\n",
      " [0.53555556]]\n"
     ]
    }
   ],
   "source": [
    "# 训练集归一化\n",
    "min_max_scaler = MinMaxScaler()\n",
    "min_max_scaler.fit(x_train_pd)\n",
    "x_train = min_max_scaler.transform(x_train_pd)\n",
    "\n",
    "min_max_scaler.fit(y_train_pd)\n",
    "y_train = min_max_scaler.transform(y_train_pd)\n",
    "\n",
    "# 验证集归一化\n",
    "min_max_scaler.fit(x_valid_pd)\n",
    "x_valid = min_max_scaler.transform(x_valid_pd)\n",
    "\n",
    "min_max_scaler.fit(y_valid_pd)\n",
    "y_valid = min_max_scaler.transform(y_valid_pd)\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 训练"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "_________________________________________________________________\n",
      "Layer (type)                 Output Shape              Param #   \n",
      "=================================================================\n",
      "dense_23 (Dense)             (None, 10)                140       \n",
      "_________________________________________________________________\n",
      "dropout_8 (Dropout)          (None, 10)                0         \n",
      "_________________________________________________________________\n",
      "dense_24 (Dense)             (None, 15)                165       \n",
      "_________________________________________________________________\n",
      "dense_25 (Dense)             (None, 1)                 16        \n",
      "=================================================================\n",
      "Total params: 321\n",
      "Trainable params: 321\n",
      "Non-trainable params: 0\n",
      "_________________________________________________________________\n",
      "None\n",
      "Train on 404 samples, validate on 102 samples\n",
      "Epoch 1/200\n",
      " - 2s - loss: 0.3991 - val_loss: 0.2925\n",
      "Epoch 2/200\n",
      " - 0s - loss: 0.3243 - val_loss: 0.2459\n",
      "Epoch 3/200\n",
      " - 0s - loss: 0.2812 - val_loss: 0.2064\n",
      "Epoch 4/200\n",
      " - 0s - loss: 0.2337 - val_loss: 0.1732\n",
      "Epoch 5/200\n",
      " - 0s - loss: 0.2090 - val_loss: 0.1457\n",
      "Epoch 6/200\n",
      " - 0s - loss: 0.1693 - val_loss: 0.1244\n",
      "Epoch 7/200\n",
      " - 0s - loss: 0.1514 - val_loss: 0.1085\n",
      "Epoch 8/200\n",
      " - 0s - loss: 0.1355 - val_loss: 0.0963\n",
      "Epoch 9/200\n",
      " - 0s - loss: 0.1268 - val_loss: 0.0877\n",
      "Epoch 10/200\n",
      " - 0s - loss: 0.1044 - val_loss: 0.0819\n",
      "Epoch 11/200\n",
      " - 0s - loss: 0.1065 - val_loss: 0.0783\n",
      "Epoch 12/200\n",
      " - 0s - loss: 0.1059 - val_loss: 0.0762\n",
      "Epoch 13/200\n",
      " - 0s - loss: 0.1009 - val_loss: 0.0742\n",
      "Epoch 14/200\n",
      " - 0s - loss: 0.0942 - val_loss: 0.0724\n",
      "Epoch 15/200\n",
      " - 0s - loss: 0.0937 - val_loss: 0.0709\n",
      "Epoch 16/200\n",
      " - 0s - loss: 0.0883 - val_loss: 0.0696\n",
      "Epoch 17/200\n",
      " - 0s - loss: 0.0843 - val_loss: 0.0684\n",
      "Epoch 18/200\n",
      " - 0s - loss: 0.0902 - val_loss: 0.0671\n",
      "Epoch 19/200\n",
      " - 0s - loss: 0.0879 - val_loss: 0.0660\n",
      "Epoch 20/200\n",
      " - 0s - loss: 0.0820 - val_loss: 0.0651\n",
      "Epoch 21/200\n",
      " - 0s - loss: 0.0765 - val_loss: 0.0641\n",
      "Epoch 22/200\n",
      " - 0s - loss: 0.0822 - val_loss: 0.0634\n",
      "Epoch 23/200\n",
      " - 0s - loss: 0.0737 - val_loss: 0.0629\n",
      "Epoch 24/200\n",
      " - 0s - loss: 0.0719 - val_loss: 0.0622\n",
      "Epoch 25/200\n",
      " - 0s - loss: 0.0687 - val_loss: 0.0616\n",
      "Epoch 26/200\n",
      " - 0s - loss: 0.0722 - val_loss: 0.0609\n",
      "Epoch 27/200\n",
      " - 0s - loss: 0.0759 - val_loss: 0.0600\n",
      "Epoch 28/200\n",
      " - 0s - loss: 0.0732 - val_loss: 0.0593\n",
      "Epoch 29/200\n",
      " - 0s - loss: 0.0716 - val_loss: 0.0591\n",
      "Epoch 30/200\n",
      " - 0s - loss: 0.0744 - val_loss: 0.0589\n",
      "Epoch 31/200\n",
      " - 0s - loss: 0.0704 - val_loss: 0.0587\n",
      "Epoch 32/200\n",
      " - 0s - loss: 0.0675 - val_loss: 0.0579\n",
      "Epoch 33/200\n",
      " - 0s - loss: 0.0678 - val_loss: 0.0570\n",
      "Epoch 34/200\n",
      " - 0s - loss: 0.0609 - val_loss: 0.0560\n",
      "Epoch 35/200\n",
      " - 0s - loss: 0.0677 - val_loss: 0.0551\n",
      "Epoch 36/200\n",
      " - 0s - loss: 0.0656 - val_loss: 0.0548\n",
      "Epoch 37/200\n",
      " - 0s - loss: 0.0638 - val_loss: 0.0544\n",
      "Epoch 38/200\n",
      " - 0s - loss: 0.0596 - val_loss: 0.0540\n",
      "Epoch 39/200\n",
      " - 0s - loss: 0.0615 - val_loss: 0.0536\n",
      "Epoch 40/200\n",
      " - 0s - loss: 0.0626 - val_loss: 0.0530\n",
      "Epoch 41/200\n",
      " - 0s - loss: 0.0587 - val_loss: 0.0520\n",
      "Epoch 42/200\n",
      " - 0s - loss: 0.0628 - val_loss: 0.0512\n",
      "Epoch 43/200\n",
      " - 0s - loss: 0.0599 - val_loss: 0.0505\n",
      "Epoch 44/200\n",
      " - 0s - loss: 0.0596 - val_loss: 0.0498\n",
      "Epoch 45/200\n",
      " - 0s - loss: 0.0549 - val_loss: 0.0493\n",
      "Epoch 46/200\n",
      " - 0s - loss: 0.0543 - val_loss: 0.0488\n",
      "Epoch 47/200\n",
      " - 0s - loss: 0.0619 - val_loss: 0.0481\n",
      "Epoch 48/200\n",
      " - 0s - loss: 0.0536 - val_loss: 0.0475\n",
      "Epoch 49/200\n",
      " - 0s - loss: 0.0564 - val_loss: 0.0473\n",
      "Epoch 50/200\n",
      " - 0s - loss: 0.0506 - val_loss: 0.0473\n",
      "Epoch 51/200\n",
      " - 0s - loss: 0.0543 - val_loss: 0.0473\n",
      "Epoch 52/200\n",
      " - 0s - loss: 0.0526 - val_loss: 0.0471\n",
      "Epoch 53/200\n",
      " - 0s - loss: 0.0535 - val_loss: 0.0470\n",
      "Epoch 54/200\n",
      " - 0s - loss: 0.0517 - val_loss: 0.0468\n",
      "Epoch 55/200\n",
      " - 0s - loss: 0.0496 - val_loss: 0.0464\n",
      "Epoch 56/200\n",
      " - 0s - loss: 0.0542 - val_loss: 0.0461\n",
      "Epoch 57/200\n",
      " - 0s - loss: 0.0481 - val_loss: 0.0453\n",
      "Epoch 58/200\n",
      " - 0s - loss: 0.0479 - val_loss: 0.0441\n",
      "Epoch 59/200\n",
      " - 0s - loss: 0.0518 - val_loss: 0.0436\n",
      "Epoch 60/200\n",
      " - 0s - loss: 0.0511 - val_loss: 0.0432\n",
      "Epoch 61/200\n",
      " - 0s - loss: 0.0426 - val_loss: 0.0428\n",
      "Epoch 62/200\n",
      " - 0s - loss: 0.0493 - val_loss: 0.0425\n",
      "Epoch 63/200\n",
      " - 0s - loss: 0.0480 - val_loss: 0.0419\n",
      "Epoch 64/200\n",
      " - 0s - loss: 0.0476 - val_loss: 0.0413\n",
      "Epoch 65/200\n",
      " - 0s - loss: 0.0438 - val_loss: 0.0406\n",
      "Epoch 66/200\n",
      " - 0s - loss: 0.0463 - val_loss: 0.0402\n",
      "Epoch 67/200\n",
      " - 0s - loss: 0.0426 - val_loss: 0.0401\n",
      "Epoch 68/200\n",
      " - 0s - loss: 0.0448 - val_loss: 0.0401\n",
      "Epoch 69/200\n",
      " - 0s - loss: 0.0432 - val_loss: 0.0398\n",
      "Epoch 70/200\n",
      " - 0s - loss: 0.0428 - val_loss: 0.0396\n",
      "Epoch 71/200\n",
      " - 0s - loss: 0.0450 - val_loss: 0.0396\n",
      "Epoch 72/200\n",
      " - 0s - loss: 0.0424 - val_loss: 0.0397\n",
      "Epoch 73/200\n",
      " - 0s - loss: 0.0413 - val_loss: 0.0402\n",
      "Epoch 74/200\n",
      " - 0s - loss: 0.0365 - val_loss: 0.0408\n",
      "Epoch 75/200\n",
      " - 0s - loss: 0.0417 - val_loss: 0.0412\n",
      "Epoch 76/200\n",
      " - 0s - loss: 0.0407 - val_loss: 0.0413\n",
      "Epoch 77/200\n",
      " - 0s - loss: 0.0381 - val_loss: 0.0412\n",
      "Epoch 78/200\n",
      " - 0s - loss: 0.0400 - val_loss: 0.0412\n",
      "Epoch 79/200\n",
      " - 0s - loss: 0.0361 - val_loss: 0.0407\n",
      "Epoch 80/200\n",
      " - 0s - loss: 0.0419 - val_loss: 0.0403\n",
      "Epoch 81/200\n",
      " - 0s - loss: 0.0389 - val_loss: 0.0398\n",
      "Epoch 82/200\n",
      " - 0s - loss: 0.0376 - val_loss: 0.0391\n",
      "Epoch 83/200\n",
      " - 0s - loss: 0.0363 - val_loss: 0.0384\n",
      "Epoch 84/200\n",
      " - 0s - loss: 0.0388 - val_loss: 0.0379\n",
      "Epoch 85/200\n",
      " - 0s - loss: 0.0361 - val_loss: 0.0375\n",
      "Epoch 86/200\n",
      " - 0s - loss: 0.0347 - val_loss: 0.0370\n",
      "Epoch 87/200\n",
      " - 0s - loss: 0.0315 - val_loss: 0.0365\n",
      "Epoch 88/200\n",
      " - 0s - loss: 0.0358 - val_loss: 0.0361\n",
      "Epoch 89/200\n",
      " - 0s - loss: 0.0379 - val_loss: 0.0359\n",
      "Epoch 90/200\n",
      " - 0s - loss: 0.0349 - val_loss: 0.0360\n",
      "Epoch 91/200\n",
      " - 0s - loss: 0.0350 - val_loss: 0.0359\n",
      "Epoch 92/200\n",
      " - 0s - loss: 0.0375 - val_loss: 0.0357\n",
      "Epoch 93/200\n",
      " - 0s - loss: 0.0314 - val_loss: 0.0355\n",
      "Epoch 94/200\n",
      " - 0s - loss: 0.0341 - val_loss: 0.0356\n",
      "Epoch 95/200\n",
      " - 0s - loss: 0.0306 - val_loss: 0.0360\n",
      "Epoch 96/200\n",
      " - 0s - loss: 0.0342 - val_loss: 0.0361\n",
      "Epoch 97/200\n",
      " - 0s - loss: 0.0361 - val_loss: 0.0355\n",
      "Epoch 98/200\n",
      " - 0s - loss: 0.0328 - val_loss: 0.0344\n",
      "Epoch 99/200\n",
      " - 0s - loss: 0.0349 - val_loss: 0.0338\n",
      "Epoch 100/200\n",
      " - 0s - loss: 0.0356 - val_loss: 0.0338\n",
      "Epoch 101/200\n",
      " - 0s - loss: 0.0338 - val_loss: 0.0339\n",
      "Epoch 102/200\n",
      " - 0s - loss: 0.0324 - val_loss: 0.0340\n",
      "Epoch 103/200\n",
      " - 0s - loss: 0.0308 - val_loss: 0.0341\n",
      "Epoch 104/200\n",
      " - 0s - loss: 0.0342 - val_loss: 0.0340\n",
      "Epoch 105/200\n",
      " - 0s - loss: 0.0298 - val_loss: 0.0339\n",
      "Epoch 106/200\n",
      " - 0s - loss: 0.0314 - val_loss: 0.0339\n",
      "Epoch 107/200\n",
      " - 0s - loss: 0.0316 - val_loss: 0.0337\n",
      "Epoch 108/200\n",
      " - 0s - loss: 0.0334 - val_loss: 0.0334\n",
      "Epoch 109/200\n",
      " - 0s - loss: 0.0300 - val_loss: 0.0332\n",
      "Epoch 110/200\n",
      " - 0s - loss: 0.0316 - val_loss: 0.0332\n",
      "Epoch 111/200\n",
      " - 0s - loss: 0.0300 - val_loss: 0.0335\n",
      "Epoch 112/200\n",
      " - 0s - loss: 0.0292 - val_loss: 0.0340\n",
      "Epoch 113/200\n",
      " - 0s - loss: 0.0304 - val_loss: 0.0342\n",
      "Epoch 114/200\n",
      " - 0s - loss: 0.0348 - val_loss: 0.0343\n",
      "Epoch 115/200\n",
      " - 0s - loss: 0.0298 - val_loss: 0.0345\n",
      "Epoch 116/200\n",
      " - 0s - loss: 0.0301 - val_loss: 0.0346\n",
      "Epoch 117/200\n",
      " - 0s - loss: 0.0302 - val_loss: 0.0344\n",
      "Epoch 118/200\n",
      " - 0s - loss: 0.0318 - val_loss: 0.0341\n",
      "Epoch 119/200\n",
      " - 0s - loss: 0.0285 - val_loss: 0.0339\n",
      "Epoch 120/200\n",
      " - 0s - loss: 0.0343 - val_loss: 0.0336\n",
      "Epoch 121/200\n",
      " - 0s - loss: 0.0282 - val_loss: 0.0332\n",
      "Epoch 122/200\n",
      " - 0s - loss: 0.0302 - val_loss: 0.0327\n",
      "Epoch 123/200\n",
      " - 0s - loss: 0.0336 - val_loss: 0.0320\n",
      "Epoch 124/200\n",
      " - 0s - loss: 0.0311 - val_loss: 0.0316\n",
      "Epoch 125/200\n",
      " - 0s - loss: 0.0286 - val_loss: 0.0318\n",
      "Epoch 126/200\n",
      " - 0s - loss: 0.0297 - val_loss: 0.0327\n",
      "Epoch 127/200\n",
      " - 0s - loss: 0.0273 - val_loss: 0.0335\n",
      "Epoch 128/200\n",
      " - 0s - loss: 0.0267 - val_loss: 0.0340\n",
      "Epoch 129/200\n",
      " - 0s - loss: 0.0287 - val_loss: 0.0343\n",
      "Epoch 130/200\n",
      " - 0s - loss: 0.0299 - val_loss: 0.0342\n",
      "Epoch 131/200\n",
      " - 0s - loss: 0.0308 - val_loss: 0.0337\n",
      "Epoch 132/200\n",
      " - 0s - loss: 0.0282 - val_loss: 0.0331\n",
      "Epoch 133/200\n",
      " - 0s - loss: 0.0301 - val_loss: 0.0326\n",
      "Epoch 134/200\n",
      " - 0s - loss: 0.0323 - val_loss: 0.0322\n",
      "Epoch 135/200\n",
      " - 0s - loss: 0.0310 - val_loss: 0.0318\n",
      "Epoch 136/200\n",
      " - 0s - loss: 0.0296 - val_loss: 0.0315\n",
      "Epoch 137/200\n",
      " - 0s - loss: 0.0269 - val_loss: 0.0314\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Epoch 138/200\n",
      " - 0s - loss: 0.0294 - val_loss: 0.0313\n",
      "Epoch 139/200\n",
      " - 0s - loss: 0.0253 - val_loss: 0.0310\n",
      "Epoch 140/200\n",
      " - 0s - loss: 0.0276 - val_loss: 0.0306\n",
      "Epoch 141/200\n",
      " - 0s - loss: 0.0248 - val_loss: 0.0301\n",
      "Epoch 142/200\n",
      " - 0s - loss: 0.0239 - val_loss: 0.0295\n",
      "Epoch 143/200\n",
      " - 0s - loss: 0.0270 - val_loss: 0.0294\n",
      "Epoch 144/200\n",
      " - 0s - loss: 0.0278 - val_loss: 0.0297\n",
      "Epoch 145/200\n",
      " - 0s - loss: 0.0284 - val_loss: 0.0300\n",
      "Epoch 146/200\n",
      " - 0s - loss: 0.0254 - val_loss: 0.0301\n",
      "Epoch 147/200\n",
      " - 0s - loss: 0.0264 - val_loss: 0.0299\n",
      "Epoch 148/200\n",
      " - 0s - loss: 0.0302 - val_loss: 0.0302\n",
      "Epoch 149/200\n",
      " - 0s - loss: 0.0260 - val_loss: 0.0303\n",
      "Epoch 150/200\n",
      " - 0s - loss: 0.0276 - val_loss: 0.0300\n",
      "Epoch 151/200\n",
      " - 0s - loss: 0.0291 - val_loss: 0.0298\n",
      "Epoch 152/200\n",
      " - 0s - loss: 0.0264 - val_loss: 0.0297\n",
      "Epoch 153/200\n",
      " - 0s - loss: 0.0269 - val_loss: 0.0296\n",
      "Epoch 154/200\n",
      " - 0s - loss: 0.0280 - val_loss: 0.0293\n",
      "Epoch 155/200\n",
      " - 0s - loss: 0.0277 - val_loss: 0.0290\n",
      "Epoch 156/200\n",
      " - 0s - loss: 0.0262 - val_loss: 0.0290\n",
      "Epoch 157/200\n",
      " - 0s - loss: 0.0283 - val_loss: 0.0295\n",
      "Epoch 158/200\n",
      " - 0s - loss: 0.0249 - val_loss: 0.0299\n",
      "Epoch 159/200\n",
      " - 0s - loss: 0.0283 - val_loss: 0.0302\n",
      "Epoch 160/200\n",
      " - 0s - loss: 0.0260 - val_loss: 0.0301\n",
      "Epoch 161/200\n",
      " - 0s - loss: 0.0275 - val_loss: 0.0302\n",
      "Epoch 162/200\n",
      " - 0s - loss: 0.0243 - val_loss: 0.0309\n",
      "Epoch 163/200\n",
      " - 0s - loss: 0.0249 - val_loss: 0.0312\n",
      "Epoch 164/200\n",
      " - 0s - loss: 0.0256 - val_loss: 0.0313\n",
      "Epoch 165/200\n",
      " - 0s - loss: 0.0268 - val_loss: 0.0315\n",
      "Epoch 166/200\n",
      " - 0s - loss: 0.0276 - val_loss: 0.0313\n",
      "Epoch 167/200\n",
      " - 0s - loss: 0.0288 - val_loss: 0.0309\n",
      "Epoch 168/200\n",
      " - 0s - loss: 0.0263 - val_loss: 0.0306\n",
      "Epoch 169/200\n",
      " - 0s - loss: 0.0269 - val_loss: 0.0303\n",
      "Epoch 170/200\n",
      " - 0s - loss: 0.0232 - val_loss: 0.0297\n",
      "Epoch 171/200\n",
      " - 0s - loss: 0.0251 - val_loss: 0.0289\n",
      "Epoch 172/200\n",
      " - 0s - loss: 0.0271 - val_loss: 0.0284\n",
      "Epoch 173/200\n",
      " - 0s - loss: 0.0279 - val_loss: 0.0281\n",
      "Epoch 174/200\n",
      " - 0s - loss: 0.0268 - val_loss: 0.0279\n",
      "Epoch 175/200\n",
      " - 0s - loss: 0.0236 - val_loss: 0.0280\n",
      "Epoch 176/200\n",
      " - 0s - loss: 0.0252 - val_loss: 0.0284\n",
      "Epoch 177/200\n",
      " - 0s - loss: 0.0262 - val_loss: 0.0284\n",
      "Epoch 178/200\n",
      " - 0s - loss: 0.0277 - val_loss: 0.0281\n",
      "Epoch 179/200\n",
      " - 0s - loss: 0.0234 - val_loss: 0.0278\n",
      "Epoch 180/200\n",
      " - 0s - loss: 0.0236 - val_loss: 0.0276\n",
      "Epoch 181/200\n",
      " - 0s - loss: 0.0259 - val_loss: 0.0276\n",
      "Epoch 182/200\n",
      " - 0s - loss: 0.0227 - val_loss: 0.0279\n",
      "Epoch 183/200\n",
      " - 0s - loss: 0.0251 - val_loss: 0.0281\n",
      "Epoch 184/200\n",
      " - 0s - loss: 0.0258 - val_loss: 0.0283\n",
      "Epoch 185/200\n",
      " - 0s - loss: 0.0236 - val_loss: 0.0279\n",
      "Epoch 186/200\n",
      " - 0s - loss: 0.0245 - val_loss: 0.0271\n",
      "Epoch 187/200\n",
      " - 0s - loss: 0.0226 - val_loss: 0.0268\n",
      "Epoch 188/200\n",
      " - 0s - loss: 0.0257 - val_loss: 0.0265\n",
      "Epoch 189/200\n",
      " - 0s - loss: 0.0246 - val_loss: 0.0261\n",
      "Epoch 190/200\n",
      " - 0s - loss: 0.0253 - val_loss: 0.0257\n",
      "Epoch 191/200\n",
      " - 0s - loss: 0.0247 - val_loss: 0.0256\n",
      "Epoch 192/200\n",
      " - 0s - loss: 0.0251 - val_loss: 0.0257\n",
      "Epoch 193/200\n",
      " - 0s - loss: 0.0237 - val_loss: 0.0258\n",
      "Epoch 194/200\n",
      " - 0s - loss: 0.0243 - val_loss: 0.0259\n",
      "Epoch 195/200\n",
      " - 0s - loss: 0.0250 - val_loss: 0.0256\n",
      "Epoch 196/200\n",
      " - 0s - loss: 0.0252 - val_loss: 0.0253\n",
      "Epoch 197/200\n",
      " - 0s - loss: 0.0226 - val_loss: 0.0252\n",
      "Epoch 198/200\n",
      " - 0s - loss: 0.0235 - val_loss: 0.0253\n",
      "Epoch 199/200\n",
      " - 0s - loss: 0.0269 - val_loss: 0.0253\n",
      "Epoch 200/200\n",
      " - 0s - loss: 0.0242 - val_loss: 0.0254\n"
     ]
    }
   ],
   "source": [
    "\n",
    "# 单CPU or GPU版本，若有GPU则自动切换\n",
    "model = Sequential()  # 初始化，很重要！\n",
    "model.add(Dense(units = 10,   # 输出大小\n",
    "                activation='relu',  # 激励函数\n",
    "                input_shape=(x_train_pd.shape[1],)  # 输入大小, 也就是列的大小\n",
    "               )\n",
    "         )\n",
    "\n",
    "model.add(Dropout(0.2))  # 丢弃神经元链接概率\n",
    "\n",
    "model.add(Dense(units = 15,\n",
    "#                 kernel_regularizer=regularizers.l2(0.01),  # 施加在权重上的正则项\n",
    "#                 activity_regularizer=regularizers.l1(0.01),  # 施加在输出上的正则项\n",
    "                activation='relu' # 激励函数\n",
    "                # bias_regularizer=keras.regularizers.l1_l2(0.01)  # 施加在偏置向量上的正则项\n",
    "               )\n",
    "         )\n",
    "\n",
    "model.add(Dense(units = 1,   \n",
    "                activation='linear'  # 线性激励函数 回归一般在输出层用这个激励函数  \n",
    "               )\n",
    "         )\n",
    "\n",
    "print(model.summary())  # 打印网络层次结构\n",
    "\n",
    "model.compile(loss='mse',  # 损失均方误差\n",
    "              optimizer='adam',  # 优化器\n",
    "             )\n",
    "\n",
    "history = model.fit(x_train, y_train,\n",
    "          epochs=200,  # 迭代次数\n",
    "          batch_size=200,  # 每次用来梯度下降的批处理数据大小\n",
    "          verbose=2,  # verbose：日志冗长度，int：冗长度，0：不输出训练过程，1：输出训练进度，2：输出每一个epoch\n",
    "          validation_data = (x_valid, y_valid)  # 验证集\n",
    "        )\n",
    "\n",
    "# 多GPU版本\n",
    "# parallel_model = multi_gpu_model(model, gpus=4)\n",
    "# parallel_model.compile(loss='mse',  # 多分类     \n",
    "#                        optimizer='adam',\n",
    "#                       )\n",
    "\n",
    "# This `fit` call will be distributed on 4 GPUs.\n",
    "# Since the batch size is 50, each GPU will process 32 samples.\n",
    "# batch_size = 512\n",
    "# epochs = 2\n",
    "# history = parallel_model.fit(\n",
    "#           x_train, \n",
    "#           y_train,\n",
    "#           batch_size=batch_size,\n",
    "#           epochs=epochs,\n",
    "#           validation_split = 0.2  # 从训练集分割出20%的数据作为验证集\n",
    "#         )"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 训练过程可视化"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEWCAYAAACJ0YulAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4zLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvIxREBQAAIABJREFUeJzt3Xt8XHWd//HXZ2YyM7nf26ZN26Q32lKglIhSLoIitqjgBRDUlZt2dWXxsrqWn/5Qcf0t6G9VEH6LuBbRFRBE3LpbBEQE5dq0FHqnaeklbdLcm3vm9vn9cU7SSTpJ09KZpJnP8/GYx8ycOTPzyclk3vl+v+d8j6gqxhhjDIBnrAswxhgzflgoGGOMGWChYIwxZoCFgjHGmAEWCsYYYwZYKBhjjBlgoWDMKIhIhYioiPhGse51IvK3t/s6xowFCwUz4YjIbhEJiUjJkOUb3C/kirGpzJjxz0LBTFRvAdf03xGR04DMsSvHmJODhYKZqH4FfDru/rXAL+NXEJF8EfmliDSKyB4R+aaIeNzHvCLyf0WkSUR2AR9I8Nyfi0idiOwXkX8REe+xFikiU0VktYi0iEiNiHw27rGzRaRaRNpF5KCI/NBdHhSR/xSRZhFpE5G1IjL5WN/bmEQsFMxE9TKQJyIL3C/rjwP/OWSdnwD5wCzg3Tghcr372GeBDwJnAlXAFUOe+wAQAea461wCfOY46nwIqAWmuu/xf0Tkve5jdwJ3qmoeMBt4xF1+rVv3dKAY+BzQcxzvbcwRLBTMRNbfWngfsA3Y3/9AXFDcoqodqrob+Dfg79xVrgJ+rKr7VLUF+Ne4504GlgNfUtUuVW0AfgRcfSzFich04Dzg66raq6obgP+IqyEMzBGRElXtVNWX45YXA3NUNaqq61S1/Vje25jhWCiYiexXwCeA6xjSdQSUAH5gT9yyPcA09/ZUYN+Qx/rNBDKAOrf7pg34KTDpGOubCrSoascwNdwIzAO2uV1EH4z7uZ4EHhaRAyLyfRHJOMb3NiYhCwUzYanqHpwB50uB3w15uAnnP+6ZcctmcLg1UYfTPRP/WL99QB9QoqoF7iVPVU89xhIPAEUikpuoBlXdoarX4ITNHcBvRSRbVcOq+h1VXQgsxenm+jTGnAAWCmaiuxF4j6p2xS9U1ShOH/33RCRXRGYCX+HwuMMjwM0iUi4ihcDKuOfWAU8B/yYieSLiEZHZIvLuYylMVfcBLwL/6g4en+7W+2sAEfmUiJSqagxoc58WFZGLROQ0twusHSfcosfy3sYMx0LBTGiqulNVq4d5+B+BLmAX8DfgQWCV+9jPcLpoXgfWc2RL49M43U9bgFbgt0DZcZR4DVCB02p4HPiWqj7tPrYM2CwinTiDzlerai8wxX2/dmAr8BxHDqIbc1zETrJjjDGmn7UUjDHGDLBQMMYYM8BCwRhjzAALBWOMMQNOuul7S0pKtKKiYqzLMMaYk8q6deuaVLX0aOuddKFQUVFBdfVwexgaY4xJRET2HH0t6z4yxhgTx0LBGGPMAAsFY4wxA066MYVEwuEwtbW19Pb2jnUpKRMMBikvLycjwybHNMacOBMiFGpra8nNzaWiogIRGetykk5VaW5upra2lsrKyrEuxxgzgUyI7qPe3l6Ki4vTIhAARITi4uK0ahkZY1JjQoQCkDaB0C/dfl5jTGokNRREZJmIbHdPSL5ymHWuEpEtIrJZRB5MVi1dfRHqD/Vgs8IaY8zwkhYK7glA7sE5l+1C4BoRWThknbnALcC57lmrvpSserpDURo6+oglIRSam5tZvHgxixcvZsqUKUybNm3gfigUGtVrXH/99Wzfvv2E12aMMccimQPNZwM1qroLQEQeBi7HOSlJv88C96hqK4B7AvSk8Li9LTEF7wl+7eLiYjZs2ADAt7/9bXJycvjqV786aB1VRVXxeBLn8P3333+CqzLGmGOXzO6jaQw+8Xkth09I3m8eME9EXhCRl0VkWaIXEpEVIlItItWNjY3HVYzHTYVYLHXdRzU1NSxatIjPfe5zLFmyhLq6OlasWEFVVRWnnnoqt91228C65513Hhs2bCASiVBQUMDKlSs544wzOOecc2hoSFpWGmPMIMlsKSQaCR36jewD5gIXAuXAX0Vkkaq2DXqS6n3AfQBVVVUjfqt/5w+b2XKg/Yjl0ZjSG46S6ffiOcZB2oVT8/jWh471nOyOLVu2cP/993PvvfcCcPvtt1NUVEQkEuGiiy7iiiuuYOHCQb1qHDp0iHe/+93cfvvtfOUrX2HVqlWsXJlwSMYYY06oZLYUaoHpcffLcc5DO3Sd/1LVsKq+BWzHCYkJY/bs2bzjHe8YuP/QQw+xZMkSlixZwtatW9myZcsRz8nMzGT58uUAnHXWWezevTtV5Rpj0lwyWwprgbkiUgnsB64GPjFknd/jnLj8FyJSgtOdtOvtvOlw/9F39UXY2dhJZUk2ucHUHQWcnZ09cHvHjh3ceeedvPrqqxQUFPCpT30q4bEGfr9/4LbX6yUSiaSkVmOMSVpLQVUjwE3Ak8BW4BFV3Swit4nIZe5qTwLNIrIFeBb4mqo2J6Oe/i6jZOx9NFrt7e3k5uaSl5dHXV0dTz755JjVYowxiSR1mgtVXQOsGbLs1rjbCnzFvSRV/04/sViy32l4S5YsYeHChSxatIhZs2Zx7rnnjl0xxhiTgJxsB3NVVVXp0JPsbN26lQULFoz4vHA0xta6dqYVZFKcE0hmiSkzmp/bGGMARGSdqlYdbb0JM83F0YyH7iNjjBnv0igUnOsUHqZgjDEnnbQJBRHBI2ItBWOMGUHahAI4XUipPKLZGGNONukVCh7rPjLGmJGkVyhY95Exxowo7UIhmoSmwomYOhtg1apV1NfXn/D6jDFmtCbEOZpHyyPJ6T4azdTZo7Fq1SqWLFnClClTTnSJxhgzKmkWCkIkxYc0P/DAA9xzzz2EQiGWLl3K3XffTSwW4/rrr2fDhg2oKitWrGDy5Mls2LCBj3/842RmZvLqq68OmgPJGGNSYeKFwhMroX5jwoemRKLO3kf+Y/yxp5wGy28/5lI2bdrE448/zosvvojP52PFihU8/PDDzJ49m6amJjZudOpsa2ujoKCAn/zkJ9x9990sXrz4mN/LGGNOhIkXCiMQjjyhQzL96U9/Yu3atVRVOUeW9/T0MH36dN7//vezfft2vvjFL3LppZdyySWXpLAqY4wZ3sQLhRH+o29u66G1K8Sp0/JTUoqqcsMNN/Dd7373iMfeeOMNnnjiCe666y4ee+wx7rvvvpTUZIwxI0m/vY/ccyWnwsUXX8wjjzxCU1MT4OyltHfvXhobG1FVrrzySr7zne+wfv16AHJzc+no6EhJbcYYk8jEaymMoH/6bFU4xjNyHpfTTjuNb33rW1x88cXEYjEyMjK499578Xq93HjjjagqIsIdd9wBwPXXX89nPvMZG2g2xoyZtJk6G6Cps48DbT0sLMvD5z35G0k2dbYxZrRs6uwEbPpsY4wZWZqFgnNt8x8ZY0xiEyYURtMN5nFTYSLMlHqydfsZY04OEyIUgsEgzc3NR/2inCjdR6pKc3MzwWBwrEsxxkwwE2Lvo/Lycmpra2lsbBxxvVAkRkNHH5EWP5kZ3hRVlxzBYJDy8vKxLsMYM8FMiFDIyMigsrLyqOvtbOzk8l8/x51XL+byBdNSUJkxxpxcJkT30Whl+Z3WQXcoOsaVGGPM+JRmoeA0jCwUjDEmsaSGgogsE5HtIlIjIisTPH6diDSKyAb38plk1jPQUuiLJPNtjDHmpJW0MQUR8QL3AO8DaoG1IrJaVbcMWfU3qnpTsuqIl+H14Pd66A5bS8EYYxJJZkvhbKBGVXepagh4GLg8ie83Kpl+r7UUjDFmGMkMhWnAvrj7te6yoT4mIm+IyG9FZHqiFxKRFSJSLSLVR9vt9GhyAj66bEzBGGMSSmYoJJqHdOhRY38AKlT1dOBPwAOJXkhV71PVKlWtKi0tfVtFZfm9dFlLwRhjEkpmKNQC8f/5lwMH4ldQ1WZV7XPv/gw4K4n1AJBtLQVjjBlWMkNhLTBXRCpFxA9cDayOX0FEyuLuXgZsTWI9AGQHrKVgjDHDSdreR6oaEZGbgCcBL7BKVTeLyG1AtaquBm4WkcuACNACXJesevpl+300d3Yn+22MMeaklNRpLlR1DbBmyLJb427fAtySzBoGiYbJ9nvpCllLwRhjEkmfI5r/9mP4bgn5/ijdfTamYIwxiaRPKGRkAVDgDdFpYwrGGJNQ+oRCIAeAAm8ffZEYkWhsjAsyxpjxJ31Cwe+EQp6nF8B2SzXGmATSJxQC/aHgHBZhu6UaY8yR0icU/LkA5IjTUui2PZCMMeYI6RMKbkshR3oA6LQ9kIwx5gjpEwrumEKmOqFgM6UaY8yR0icU3JZClva3FCwUjDFmqPQJBXdMIRBzpriwU3IaY8yR0icUvD7wBQnErKVgjDHDSZ9QAPDn4I92Abb3kTHGJJJeoRDIwRdxuo9s7yNjjDlSeoWCPxcJddrZ14wxZhjpFQqBHOjrIDvgs+4jY4xJIL1CwZ8DoU6y/V7rPjLGmATSKxQCOdDX6bQUrPvIGGOOkF6hMNBS8NkuqcYYk0B6hUIg120peO3gNWOMSSC9QsFtKdjeR8YYk1h6hUIgB1CKfGG6bO8jY4w5QnqFgjtTamFGiC7b+8gYY46QXqEQcCbFK/D20hWKoKpjXJAxxowv6RUK/edp9vahCj1hay0YY0y89AqF/vM0u6fktN1SjTFmsKSGgogsE5HtIlIjIitHWO8KEVERqUpmPf0thVyPGwq9FgrGGBMvaaEgIl7gHmA5sBC4RkQWJlgvF7gZeCVZtQxwxxRy6AOgw0LBGGMGSWZL4WygRlV3qWoIeBi4PMF63wW+D/QmsRaH21LIEedEOxYKxhgzWDJDYRqwL+5+rbtsgIicCUxX1f8e6YVEZIWIVItIdWNj4/FX5M8GDp+nuaM3fPyvZYwxE1AyQ0ESLBvYB1REPMCPgH862gup6n2qWqWqVaWlpcdfkdtSCKrTKLGWgjHGDJbMUKgFpsfdLwcOxN3PBRYBfxGR3cC7gNVJHWz2eCAjm2DMOSVnu7UUjDFmkGSGwlpgrohUiogfuBpY3f+gqh5S1RJVrVDVCuBl4DJVrU5iTRDMI8M9T7O1FIwxZrCkhYKqRoCbgCeBrcAjqrpZRG4TkcuS9b5HFcjF09funmjHQsEYY+L5kvniqroGWDNk2a3DrHthMmsZEMiD3nZygj4baDbGmCHS64hmgGAe9LWTG8yw7iNjjBki/UIhkAd9HeQGfRYKxhgzRPqFQtDpPnJaCtZ9ZIwx8dIvFAL93UfWUjDGmKHSLxSC+RDupiAA7RYKxhgzSPqFQiAPgBJfyLqPjDFmiPQLhaATCkXeHvoiMUKR2BgXZIwx40f6hYLbUij02ol2jDFmqPQLBbelkO+xmVKNMWao9AsFt6WQK92AzX9kjDHx0i8U3JZCLk4o2EypxhhzWPqFgttSyFZrKRhjzFBpGwpZatNnG2PMUOkXCj4/+IIEBs6pYN1HxhjTL/1CASCQRyDSCVhLwRhj4qVnKATz8ITaCWZ4rKVgjDFxRhUKIjJbRALu7QtF5GYRKUhuaUk0MH22nVPBGGPijbal8BgQFZE5wM+BSuDBpFWVbO702YVZGbR0hca6GmOMGTdGGwox95zLHwF+rKpfBsqSV1aSudNnT8oN0tjZN9bVGGPMuDHaUAiLyDXAtcB/u8syklNSCrgthdLcAI0dFgrGGNNvtKFwPXAO8D1VfUtEKoH/TF5ZSRbId1sKARo6+lDVsa7IGGPGBd9oVlLVLcDNACJSCOSq6u3JLCypgnkQ6mRSjo9QJEZ7b4T8zJO34WOMMSfKaPc++ouI5IlIEfA6cL+I/DC5pSWRe1RzWdDZHbWxo3csqzHGmHFjtN1H+araDnwUuF9VzwIuTl5ZSRbMB2Cy3wmDBhtXMMYYYPSh4BORMuAqDg80H5WILBOR7SJSIyIrEzz+ORHZKCIbRORvIrJwtK/9tmQ6h1hM8jmhYIPNxhjjGG0o3AY8CexU1bUiMgvYMdITRMQL3AMsBxYC1yT40n9QVU9T1cXA94HUdEllFgJQ6HVmSrVQMMYYx2gHmh8FHo27vwv42FGedjZQ466LiDwMXA5siXud9rj1s4HU7AbkhkJ2tJ2AL9O6j4wxxjXageZyEXlcRBpE5KCIPCYi5Ud52jRgX9z9WnfZ0Nf+gojsxGkp3DzM+68QkWoRqW5sbBxNySNzQ0F6Wu1YBWOMiTPa7qP7gdXAVJwv9j+4y0YiCZYd0RJQ1XtUdTbwdeCbiV5IVe9T1SpVrSotLR1lySMIutM29bYxyULBGGMGjDYUSlX1flWNuJdfAEf7dq4FpsfdLwcOjLD+w8CHR1nP25MRBF8muC2FBtsl1RhjgNGHQpOIfEpEvO7lU0DzUZ6zFpgrIpUi4geuxmltDBCRuXF3P8BRBq9PqMxC6Gl15j+yloIxxgCjHGgGbgDuBn6E0wX0Is7UF8NS1YiI3ISz15IXWKWqm0XkNqBaVVcDN4nIxUAYaMWZWyk1Mgugp43S0gCt3WFCkRh+X3qeXsIYY/qNdu+jvcBl8ctE5EvAj4/yvDXAmiHLbo27/cVRV3qiZRY6oZAbAKCxs49pBZljVo4xxowHb+df46+csCrGgtt9VJrjhEKzTaFtjDFvKxQS7V108sgsgJ5WinL8ADTbyXaMMeZthcLJPd900AmF4mw3FDotFIwxZsQxBRHpIPGXvwAndwd8ZiFEeigOOj9eS5d1HxljzIihoKq5qSok5QamuujA7/NYS8EYY3h73UcnN3emVOltoyTbb2MKxhhDWoeC01Kgp42iHD8tFgrGGGOhQE8rRdkB2yXVGGOwUICeVus+MsYYV/qGQv9MqT2tFGX7baDZGGNI51AI5IF4oNcZU+gJR+kORca6KmOMGVPpGwoez8ABbCXZ/VNdWGvBGJPe0jcUwBlX6G6myD2q2fZAMsaku/QOhexS6GqiOMdCwRhjIO1DoQS6myl2u4+abLdUY0yas1DoahyYKdVaCsaYdJfeoZBVAt0tZGcIAZ/HjlUwxqS99A6F7BLQKNJ7iGI7VsEYY9I9FEqd665GSnIDNNqYgjEmzaV3KGQVO9ddTUzNz6SurWds6zHGmDGW3qEQ11IoKwhyoK0H1ZP7hHLGGPN2pHkolDjX3U1MK8ikKxSlvcemujDGpK/0DoWB7qNmphY4Zxfdb11Ixpg0lt6h4M1w5j/qahwIhQMWCsaYNJbUUBCRZSKyXURqRGRlgse/IiJbROQNEXlGRGYms56Eskugu4mpBUEA6g5ZKBhj0lfSQkFEvMA9wHJgIXCNiCwcstprQJWqng78Fvh+suoZljv/UUl2gAyvsL+tN+UlGGPMeJHMlsLZQI2q7lLVEPAwcHn8Cqr6rKp2u3dfBsqTWE9iWcXQ1YTHI5TlZ1r3kTEmrSUzFKYB++Lu17rLhnMj8ESiB0RkhYhUi0h1Y2PjCSwRp6XQ3QTAVHe3VGOMSVfJDAVJsCzhQQAi8imgCvhBosdV9T5VrVLVqtLS0hNYIgMzpRKLMbXAWgrGmPTmS+Jr1wLT4+6XAweGriQiFwPfAN6tqqmfZyK7FDQGPa1MK8ikvr2XSDSGz5veO2YZY9JTMr/51gJzRaRSRPzA1cDq+BVE5Ezgp8BlqtqQxFqG138AW1cDUwsyiSk0dNgcSMaY9JS0UFDVCHAT8CSwFXhEVTeLyG0icpm72g+AHOBREdkgIquHebnkyZniXHfUMc09VmFPc/cITzDGmIkrmd1HqOoaYM2QZbfG3b44me8/KnllznV7HfNnLwVgW30758wuHsOijDFmbFjHea4bCh0HKM0NUJztZ8uB9rGtyRhjxoiFQkYmZBZCex0iwsKpeWytt1AwxqQnCwWA3KnQUQfAgrI83qzvJByNjXFRxhiTehYK4IwruKGwsCyPUDTGrsauMS7KGGNSz0IBnHGF9sMtBYCtddaFZIxJPxYK4IRCVwNEI8wqzcbv87DFQsEYk4YsFMDpPtIYdB4kw+th3uQcaykYY9KShQI4A80waFxhy4F2O1+zMSbtWChA3AFsztRMC8ryaO4K0WjTXRhj0oyFAiRsKQBsti4kY0yasVAA50Q7noyBUJhveyAZY9KUhQKAxzNot9T8zAzKCzNtugtjTNqxUOiXXw5tewfuLijLs5aCMSbtWCj0K5oFLTsH7i4sy+Otpi56QtExLMoYY1LLQqFf8SzoPAh9nYDTUogpbD/YMcaFGWNM6lgo9Cua5Vy37AIO74Fk4wrGmHRiodCvaLZz7YZCeWEmuQGfjSsYY9KKhUK/okrn2h1X8HiE+WW5NgeSMSatWCj0C+RCzuSBlgI4XUjb6tqJxWy6C2NMerBQiFc0C5oPh8KCsjy6QlH2tXaPYVHGGJM6FgrximYNbilMtcFmY0x6sVCIVzQLOusHdkudNzkXj9h0F8aY9GGhEG/IbqnBDC+zS3NssNkYkzYsFOJNXuRc178xsOi08nxe29tmg83GmLRgoRCveA4E8mD/uoFF58wqprkrNHBkczSm/PDpN9nXYoPPxpiJJ6mhICLLRGS7iNSIyMoEj18gIutFJCIiVySzllHxeGDqYti/fmDRuXNKAHihpgmAtbtbuOuZHfzL/2wZkxKNMSaZkhYKIuIF7gGWAwuBa0Rk4ZDV9gLXAQ8mq45jNnUJHNwMEeesa1MLMqksyealnc0APLu9AYAnNx9k0/5DY1amMcYkQzJbCmcDNaq6S1VDwMPA5fErqOpuVX0DiCWxjmMzbQnEwlC/aWDR0tnFvPJWC5FojL9sa+SM8nzygj5++PSbY1ioMcaceMkMhWnAvrj7te6yYyYiK0SkWkSqGxsbT0hxw5p2lnN9YHAXUmdfhEfX1bL9YAcfPH0q/3DRHP68rYE/bqpLbj3GGJNCyQwFSbDsuHbhUdX7VLVKVatKS0vfZllHkTcNsicNGmw+f24J5YWZ3PK7jQBcNL+UG8+rZNG0PL75+020dIWSW5MxxqRIMkOhFpged78cOJDE9zsxRGDmObDzzxBzTrCTG8zgd59fysKyPOZPyWV2aQ4ZXg8/uOIM2rrD3PXMDgAi0fHTC2aMMccjmaGwFpgrIpUi4geuBlYn8f1OnEUfc064s/uvA4sm5QX57388j9/9w1JEnEbQgrI8PrpkGg+9upe1u1s45/Y/87Pndw33qsYYM+4lLRRUNQLcBDwJbAUeUdXNInKbiFwGICLvEJFa4ErgpyKyOVn1HJO5l4A/FzY+OmixxyNk+X2Dlv3DhXMIR2Ncc9/LNHb08fO/vWUtBmPMSSupxymo6hpVnaeqs1X1e+6yW1V1tXt7raqWq2q2qhar6qnJrGfUMjJhwYdgyx8g3DviqhUl2Xx48TRiqly3tIL69l6ee/PwYPiz2xq48AfP8u3V4yPvjDFmJHZE83BOvxL6DsFLdx911f/z0dN46ssX8I0PLKAkJ8AvX9rD9voOvvDr9Vz/i7U0dvTxixd3s25PawoKN8aY4+c7+ippatZFztjCs9+D0vkw/wPOIHQCwQwvcyblAnBVVTn/7y87ee7NRvw+D//0vnn83TkzWfbjv/KNxzfyDxfNob0nTP2hXj6yZBqzS3NS+VMZY8yIRPXkmuitqqpKq6urU/NmoS74j/dBw2bInw65U8AbgIwglJwC08+GWRdCZsHAU3rDUf66o4m27hBnVxYxszgbgKc21/OFB9cTjh7e3h6Br71/Pp+/cHZqfh5jTNoSkXWqWnXU9SwUjqL3EGz+Pex8BnrbIRpywqJxG0R6weODUz8KF66E4pG/3Lv6Iuxv6yEzw0um38uXf7OBDXvbqP7fFxPweVP0Axlj0tFoQ8G6j44mmA9nXetc4kXDzsR5W34P1ffD5sfhov8FS28Gb+LNmh3wMW9y7sD9G86t5Poda3mxppknN9fzu/X7ycv0cfU7ZvD5C2eTHbBfjzEmtWyg+Xh5M2DGO2HZv8IXX4f5l8Iz34H7l0HzzlG9xNI5xeQGfNz15x08vHYfS+cUc9bMQu5+toZldz7P7qYuAFSVx9bVDpwB7mB7L82dfZxsrTxjzPhn3Ucniips/C2s+SdnhtVzvgDnfRkCuSM+7cu/2cDjr+2nICuD5//5IvKCGbz6Vgt//6tqvB4Pq66r4o3aQ3zz95sQgdmlOdQ0OKcLLcsPcsulC/jQ6WUDB9SNpKahg1+9tIeygkw+924bxzAmndiYwlhpr4Onb4WNj0DOZKdL6YxrwBdIuPrTWw7y2V9Wc8vy+fx93Bd1TUMn1656laZOZwrvsyuLWFiWxxu1h7hgXil+n4ffv7afjfsPcf7cEr57+SIqSrKHLWv93lauvPclojElwyu8fMt7Kc5JXJMxZuKxUBhrtdXwx1ug9lVngr2qG6DqemcPpjiqyrPbG7hgbik+7+DevObOPm568DV2N3ex+qbzKM0d/CUejSn/+fIefvDkdkLRGDddNIe/f/esgUHrvkiU1/a2cXZFETc8sJY3ag9xzyeWcM3PXuabH1jAZ86flbD0nY2dbDnQzofOmHoCN4gxZixZKIwHqs5eS6/cBzueBPHCzKVwyqVwynIoqhzFSyjhqOL3DT/8c7C9l9v+ewv/80Ydp5fnc88nlrB+byv/9tSb7G3p5vy5Jfx1RxNfe/8pfOGiOXzk/71Ae0+Y8+aU0NYT5v9eeQYZbiDtbOzkqntforkrxLNfvZDKEVofxpiTh4XCeNO8Ezb8GratgcatzrJJC51wmLfcOQ2oN+NtvcUfN9XzT49soCvkzO46b3IO58wq5oGX9pAb8PHCLe8hL5jBw6/uZeXvNiLi5NYN51Zy64cWcqCthyv+/UV6IzFau0P840Vz+Molp7zdn9wYMw5YKIxnLbtg+xPOZc+LoFHwBaHsDOckP9POgrLFUFgx7O6tw9lW385j62o5b24p580pwesRnthYRzDDy0XzJwHQE4pyxx+3sXzRFP64uZ77X9jNR8+cxobaNhrb+3hoxbu444/beKupi1XXvYPH1tfS3Rdspg1GAAAUYElEQVSloaOXmMIty+fTG47xxKY63rtgMounFwyqIRpTvJ6jD3wbY1LHQuFk0d0Cu551jnmorYa61yHS4zzmyXAOiCuZ51xKT4GSuVA8FwInZnqMcDTG7U9s46FX9xKNKb+68Z2cXVnE46/V8uXfvE6G1/lyzw74KMkJ0NjRRyQaoy8SIxJzPjtLZhTwd+fMZE5pLo+/tp9fvbyb3GAG75k/iX/58CKCGYMPzIvFFM8IobGzsZM1b9SxIm58xBjz9lgonKyiYWjYAgc3Q9Ob0PgmNG2HlrecFkW/rBLIK3POFJfrXueVQd5U57FwD4Q6nN1jgwVQMAPyy4edv+lQT5iO3jDlhVkAdIciXPD9vzC7NJufXHMmk/KCAOxv6+Hrv32DSXkBvnzxPP609SC/eHE3e5q7AeflP3LmNAThsfW1vGtWEV+95BTmTcmlsaOPbz6+ifr2XtbcfD59kSjb6zt456xi2rpDvPpWCz6v8LVH36C5K8RHzpzGD686AxGhsaMPn0cozPYnd/sbM0FZKEw0kZDT7dTkhsShWmf3144D0H4AupuP/hrZpTDzXKi8wJmzqWjWsCEBzjxOAZ/nqMdAxGLKG/sPUX+oh4qSbOZPyQPgvzbs56uPvj5ovqdsv5euUJQvvncuf93RyPq9bSw7dQqv7WvlYLuz+21ZfpBLFk7mgZf2cMO5lVxZVc4nfvYy4ajyj++Zw7VLK45ofRhjRmahkG7CvdBR51y6miAjy+li8gWgpw2aa5zuqbeed4IEIJAPk091LgXTIWcK5E52rrNLwZ/ljHWM4sC44bR0OS2AfS3diMAHTi/j26s38+TmgwB84LQynthUR0VxNv/7QwsJRWIsmVFISY6fb6/ezAMv7cEjUJwT4LRp+fx5WwMlOQFml2az/WAHAZ+HuZNyueod05k7KQcR6A3HOH1aPh6PoKo8Ur2PB1/dR21LN3Mm5XDWzEL2tnQzOS/IexdMYunskhPxGzBmXLNQMImpOgGx+69Qv8nppmrYAn3twzxBnIDJyAR/NvhznOu8qUeOc/izRlXCrsZOlt/5Vz5y5jRu/9jp7GvppjQ3kPC//9+uq+XBV/Zw+8dOZ97kXF7Z1cy/P7eTlq4Qp07NIxpTXqhpZn9bz6DnffKdM/hfly7g5ode45ltDSyalsfCsjzW722jpqGT8sJMGjr6CEVifPKdM7jolElsq2/novmTOHVq/sDrdPZFqGnoZEFZ7qjHN3pCUYIZR29hHU04GiMcjR1xtj9jjoeFgjk2fR3QUe9cOg86rY1wtzM2Ee52LqFuCHU66x7aB627QeNOPerPhewS91IKWcXO9dD7mYW0RvwUFBQgwxzpfSyiMWX93laaOvqIKby4s4lfv7KXypJs9rZ0841LF3Dd0oqBlkP/cR89oSg/fuZNfvrc4PNql+YGmJofpLMvwp7mbiIxpSArgw8vnsYlCyfz3I5G6g/1cs6s4oHjR2YWZ3N6eT4b9x/i2lWvMqs0hy++dw61rT0cbO/lUE+Y9p4IGV4PZflBLls8ddDkiHB4AF5VWbOxnn99YivRmPI/N59P0ZCxlH0t3fzixd0sWzSFd1QUJdwu3aHIcQfK0XYGMCcfCwWTfJE+5/iLpjehZSd0NkJ3E3Q1Qlezc93dBLHI8K/h8R1ugQQLnAP6imZB8Rxnz6vsUqcLKyPLOY+FL/Oou+mGozE+8dMX2Lq/mR997FTed0qRM+V5NOQM5Is4R5b7nQPzqne30BOOMn9KHn/cVMcbtYc42NFHTsBLRXE28ybn8vTWgzy9+SChaAyvRyjMyqCpMzTofUty/PSGYxRkZdATitLc5Tzu9Qj5mRnkBn2EIzEaOvqIxJSzK4v40Oll7Gzs4uVdzexo6OSCuSV09kVYu7uVUybn8lZTF+fNLeHT58zkpV3NVO9upbU7xL6WbsJRJS/o4/dfOJf6Q708/tp+aho7uW5pBc+/2cTvN+znzqsX4/d6+N6arZw7p4RPvnMGp07NpzccpTsUpTArAxHh+Tcbue/5XXzg9DLK8oN8+TcbuOod01m5bP5Ai0dV+cv2RrL8Xs6uLEJEqGno5ImNdVzzzhmU2LQp45qFghkfVKG3bXBIdLe4LY+uw5dwl7O8eSe0vuV8gQ/H4wOv3zlCXDzOl7wIxKKHv/zjWzDDCea7e21NjduDa+rgS7BgYEylrTvECzXNLJlZwJS8IG81deERIarKm/Ud/O61/TS09/LvnzqLLL+X1/a1ccrkXMryg4O6kpo7+3h0XS2/fmUP+1qc82ucNbOQWaXZPLGpHlXlq5ecwpVV0/nlS7v5zh+2AJDhFU6blk9ZQSbTCjJ538LJfPaX1bT3hIkp5AR8lOT42d3sjN/MKMriQFsPMYWpBUEaO/roDcc4u6KILXXtdPZFyA368IhwqCc8sBMAQEFWBm3dYS46pZRNB9qZXphJdsDHX3c0AVBRnEVFSTYv1DQRjiqFWRlUVRTR0NHHVVXl5AUz+O26Wtq6Q5QXZvGJd87gxZ1N7DjYydSCTOZOzqEwy8/Ohk5ygz68HmHdnlYm5we5eMFkqmYW0tIVonpPKwvL8piUF6ClKzQQSufMKublt1rwinDxwkkDXXvtvWFaOkNk+r1MdveYC0djHOoJU5ztH/R76D+eZv3eVp7b3siiafm0doXYfOAQM4uzed/CyUwvykJVicZ00DQ0qjrotTYfOERNQyeXnTF10PIXapp4anM9X18+f1CrTVXZWtdBTWMnp0zO5ZQph1uNo93B41hZKJiTVyzqdE8174SeVqcLK9Ibd93t/Mev6nz591+8Ge7F7xzj0X/b6x98OxZxBuTbD7jX+53bnQ3AkL8H8UBGtjNekpF5+LbX77xnLOq+v3sdiw25Hz3c/eb1O7sLF8yAwpnEsifRHM2ksLAEX3YhBPOJ+vOQjEw8Xh94vCjCE5sPUhqERaVeMrXH6cKLhMDjZdvBLp7e1sxZFUWcNbMYr8/D8zuaKS0qpmLmDG54ZBc5QT8/+dg8Yn2drH55M+u27mTelFymTJrM5t5Swt5MZk/K4aqq6TxavY83D3by9eXz+dZ/beYPrx/govml1DR0sr+th39+/3xygj6e2lzP3pZuTi8v4MqzyrnzmR3Ut/cS8HkHpnifUeQEx4a9rbT3RhCBypJsDh7qHQifeKW5AVq7QkRiytxJOexv66E7wXpDFWZl8LEl5XT2RXikeh/u4TNMyQsSicVo7gqhCufPLeHT51TwwIu72bj/EB29YcryM48YjwpmeOgNx8j2e/n8hbN5/LX97GzsojArg2WLygDlsXX7Oa08n/efOplQJMZdz9QQisa44qxysvxe3jzYwdxJuTz06l4iMeXiBZO4bmkl6/e2UpiVweOv7Wf93jYAfB7h8xfO5swZBTyxsZ7H1teS4fUwsziLhWV5FGT5KS/M5NLTyphakHnMf04DH2ULBWOOUTTsjKm0HzgcFD2th1s1/WMsoS5nXY/Hbam4LRZPf8vFM/h+/0B9NOwET9teaNszwuB+iuVPd4IqkOtcfAGIRtBYmFioF2+4A+1th74OpK/D/ZncLr1AHmQVQWYR+LNRj4997WGi+Jg5qQBPIJceTxYbG6NUTJ3CpOIiVDw0dEXpCCnlxbn09PYS6u1mUhb0RD38bU8Pf9jSypS8IB9cWEhdcxu9PV1keaIsmFZAdzjG1vou5pcV0BeDZ7Y2sOmtA5TIIS4t72OWtxFPTzPdvX34JEaG10PYm8n25iidMT/qz2ZKSRGeQDb1vT5KCgupmlvOgW4PgaxcZkwuoa7Hw3ef2sPa/SEmFRdy8RmV7GvtZc3GOlSVD502ic21zdQ2teMjyvmz81lQmslDr+zC6/UxtTifLQd7qJo9hXNmFfPTp18nT7rIo5tiaacis4f3z8mirKSIZ3Z18/yeXroI0ufJ4oJFlUggh+2t8Hp9iPa+GB19ThfsbZefyqfPqTiuX7OFgjHjXaTPOcVr7yH30uZch3sGtzQ05nwBx+/95fM7LaVYxLmoAnr4OaFO59iVLvf4FX+2cwnmQ2ahE1hdjdBU44wJte93diDo63C63zw+5+ILOF/8wTwnMPzukfT9OyD0HnKCs7vF6QKMRiAWdgJwaKsrFcQDeeWQM8lpHXp8zjYJdRHq6STS20EmfUi4e+QuykR8QTQWRWLh5NSekIA/h0hGNu2xAH3n/jNl533q+F7JTsdpzDjnC0BOqXOZiKKRw3ur9V/CXW4XW/RwoPV36/kCTpj07+nm8Tg7GfgC7g4GfkCP7LLTmNOtl1XstHh8iY9697uXw/WFD7cAB8a2Rrgd7kbEG9dF6XODJ+Nw12V/CPXv1BDpc2oO5LrhWgDZxU43YjDPCde+Tmf2gb5Od3sNvd+Br6+DolAnlCV/OvukhoKILAPuBLzAf6jq7UMeDwC/BM4CmoGPq+ruZNZkjEkRrw8yC5zLeOTNGPv6gvkw8skZUy5p52gWES9wD7AcWAhcIyILh6x2I9CqqnOAHwF3JKseY4wxR5e0UADOBmpUdZeqhoCHgcuHrHM58IB7+7fAe+VE74dljDFm1JIZCtOAfXH3a91lCddR1QhwCCge+kIiskJEqkWkurGxMUnlGmOMSWYoJPqPf+juCKNZB1W9T1WrVLWqtHSCDsoZY8w4kMxQqAWmx90vBw4Mt46I+IB8oCWJNRljjBlBMkNhLTBXRCpFxA9cDawess5q4Fr39hXAn/VkO3DCGGMmkKTtkqqqERG5CXgSZ5fUVaq6WURuA6pVdTXwc+BXIlKD00K4Oln1GGOMObqkHqegqmuANUOW3Rp3uxe4Mpk1GGOMGb2TbpoLEWkE9hzn00uAphNYzok0Xmuzuo6N1XXsxmttE62umap61D11TrpQeDtEpHo0c3+MhfFam9V1bKyuYzdea0vXupI50GyMMeYkY6FgjDFmQLqFwn1jXcAIxmttVtexsbqO3XitLS3rSqsxBWOMMSNLt5aCMcaYEVgoGGOMGZA2oSAiy0Rku4jUiMjKMaxjuog8KyJbRWSziHzRXf5tEdkvIhvcy6VjUNtuEdnovn+1u6xIRJ4WkR3udWGKazolbptsEJF2EfnSWG0vEVklIg0isiluWcJtJI673M/cGyKyJMV1/UBEtrnv/biIFLjLK0SkJ27b3Zviuob93YnILe722i4i709WXSPU9pu4unaLyAZ3eUq22QjfD6n7jKnqhL/gTLOxE5iFc0a+14GFY1RLGbDEvZ0LvIlzEqJvA18d4+20GygZsuz7wEr39krgjjH+PdYDM8dqewEXAEuATUfbRsClwBM4swG/C3glxXVdAvjc23fE1VURv94YbK+Evzv37+B1IABUun+z3lTWNuTxfwNuTeU2G+H7IWWfsXRpKYzmhD8poap1qrrevd0BbOXI80yMJ/EnQnoA+PAY1vJeYKeqHu8R7W+bqj7PkTP5DreNLgd+qY6XgQIRKUtVXar6lDrnKQF4GWem4pQaZnsN53LgYVXtU9W3gBqcv92U1yYiAlwFPJSs9x+mpuG+H1L2GUuXUBjNCX9STkQqgDOBV9xFN7lNwFWp7qZxKfCUiKwTkRXussmqWgfOBxaYNAZ19buawX+kY729+g23jcbT5+4GnP8o+1WKyGsi8pyInD8G9ST63Y2n7XU+cFBVd8QtS+k2G/L9kLLPWLqEwqhO5pNKIpIDPAZ8SVXbgX8HZgOLgTqcpmuqnauqS3DOq/0FEblgDGpISJzp1y8DHnUXjYftdTTj4nMnIt8AIsCv3UV1wAxVPRP4CvCgiOSlsKThfnfjYnu5rmHwPyAp3WYJvh+GXTXBsre1zdIlFEZzwp+UEZEMnF/4r1X1dwCqelBVo6oaA35GEpvNw1HVA+51A/C4W8PB/uaoe92Q6rpcy4H1qnrQrXHMt1ec4bbRmH/uRORa4IPAJ9XthHa7Z5rd2+tw+u7npaqmEX53Y769YOCEXx8FftO/LJXbLNH3Ayn8jKVLKIzmhD8p4fZV/hzYqqo/jFse3w/4EWDT0Ocmua5sEcntv40zSLmJwSdCuhb4r1TWFWfQf25jvb2GGG4brQY+7e4h8i7gUH8XQCqIyDLg68Blqtodt7xURLzu7VnAXGBXCusa7ne3GrhaRAIiUunW9Wqq6opzMbBNVWv7F6Rqmw33/UAqP2PJHk0fLxecUfo3cRL+G2NYx3k4zbs3gA3u5VLgV8BGd/lqoCzFdc3C2fPjdWBz/zYCioFngB3uddEYbLMsoBnIj1s2JtsLJ5jqgDDOf2k3DreNcJr297ifuY1AVYrrqsHpb+7/nN3rrvsx93f8OrAe+FCK6xr2dwd8w91e24Hlqf5dust/AXxuyLop2WYjfD+k7DNm01wYY4wZkC7dR8YYY0bBQsEYY8wACwVjjDEDLBSMMcYMsFAwxhgzwELBmCFEJCqDZ2Y9YbPqurNtjuUxFcaMyDfWBRgzDvWo6uKxLsKYsWAtBWNGyZ1f/w4RedW9zHGXzxSRZ9wJ3p4RkRnu8sninMfgdfey1H0pr4j8zJ0v/ykRyRyzH8qYISwUjDlS5pDuo4/HPdauqmcDdwM/dpfdjTN98ek4k87d5S6/C3hOVc/Ambd/s7t8LnCPqp4KtOEcLWvMuGBHNBszhIh0qmpOguW7gfeo6i530rJ6VS0WkSacqRrC7vI6VS0RkUagXFX74l6jAnhaVee6978OZKjqvyT/JzPm6KylYMyx0WFuD7dOIn1xt6PY2J4ZRywUjDk2H4+7fsm9/SLOzLsAnwT+5t5+Bvg8gIh4U3zOAmOOi/2HYsyRMsU9Ybvrj6rav1tqQERewfmH6hp32c3AKhH5GtAIXO8u/yJwn4jciNMi+DzOrJzGjFs2pmDMKLljClWq2jTWtRiTLNZ9ZIwxZoC1FIwxxgywloIxxpgBFgrGGGMGWCgYY4wZYKFgjDFmgIWCMcaYAf8fm1o4G/g6lp0AAAAASUVORK5CYII=\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "import matplotlib.pyplot as plt\n",
    "# 绘制训练 & 验证的损失值\n",
    "plt.plot(history.history['loss'])\n",
    "plt.plot(history.history['val_loss'])\n",
    "plt.title('Model loss')\n",
    "plt.ylabel('Loss')\n",
    "plt.xlabel('Epoch')\n",
    "plt.legend(['Train', 'Test'], loc='upper left')\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 保存模型 & 模型可视化 & 加载模型"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "from keras.utils import plot_model\n",
    "from keras.models import load_model\n",
    "# 保存模型\n",
    "model.save('model_MLP.h5')  # creates a HDF5 file 'my_model.h5'\n",
    "\n",
    "#模型可视化 pip install pydot\n",
    "plot_model(model, to_file='model_MLP.png', show_shapes=True)\n",
    "\n",
    "# 加载模型\n",
    "model = load_model('model_MLP.h5')"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## 模型的预测功能"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 39,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[[0.07493158]\n",
      " [0.2893982 ]\n",
      " [0.39078662]\n",
      " [0.53390676]\n",
      " [0.4262308 ]\n",
      " [0.33204186]\n",
      " [0.50946146]\n",
      " [0.4132849 ]\n",
      " [0.2729963 ]\n",
      " [0.27879465]\n",
      " [0.32446215]\n",
      " [0.3486992 ]\n",
      " [0.25756323]\n",
      " [0.6590541 ]\n",
      " [0.24239415]\n",
      " [0.33537012]\n",
      " [0.41441542]\n",
      " [0.3994282 ]\n",
      " [0.24715093]\n",
      " [0.2789744 ]\n",
      " [0.16952842]\n",
      " [0.23424083]\n",
      " [0.3418997 ]\n",
      " [0.31744128]\n",
      " [0.41457468]\n",
      " [0.31370088]\n",
      " [0.53618276]\n",
      " [0.5057087 ]\n",
      " [0.1993371 ]\n",
      " [0.38090742]\n",
      " [0.33472478]\n",
      " [0.23952556]\n",
      " [0.75942624]\n",
      " [0.39149237]\n",
      " [0.21550557]\n",
      " [0.05032485]\n",
      " [0.17014757]\n",
      " [0.28126177]\n",
      " [0.26160812]\n",
      " [0.48688784]\n",
      " [0.37928256]\n",
      " [0.49463415]\n",
      " [0.19800049]\n",
      " [0.64089465]\n",
      " [0.57231766]\n",
      " [0.40187317]\n",
      " [0.52594954]\n",
      " [0.30410153]\n",
      " [0.23205215]\n",
      " [0.3788426 ]\n",
      " [0.6003846 ]\n",
      " [0.3352767 ]\n",
      " [0.15056258]\n",
      " [0.2847675 ]\n",
      " [0.5697139 ]\n",
      " [0.47074068]\n",
      " [0.17245734]\n",
      " [0.7106333 ]\n",
      " [0.7865126 ]\n",
      " [0.39577943]\n",
      " [0.2971291 ]\n",
      " [0.2173678 ]\n",
      " [0.22693393]\n",
      " [0.29524282]\n",
      " [0.41086504]\n",
      " [0.4469202 ]\n",
      " [0.21003845]\n",
      " [0.39480698]\n",
      " [0.21039695]\n",
      " [0.08688372]\n",
      " [0.27861476]\n",
      " [0.4643402 ]\n",
      " [0.33623737]\n",
      " [0.24264556]\n",
      " [0.44484848]\n",
      " [0.31478217]\n",
      " [0.3557385 ]\n",
      " [0.40444526]\n",
      " [0.53992856]\n",
      " [0.11507987]\n",
      " [0.36255306]\n",
      " [0.6483772 ]\n",
      " [0.3241757 ]\n",
      " [0.18527794]\n",
      " [0.32055545]\n",
      " [0.2967106 ]\n",
      " [0.22943214]\n",
      " [0.41828594]\n",
      " [0.44124618]\n",
      " [0.42295867]\n",
      " [0.29182857]\n",
      " [0.41147852]\n",
      " [0.3989495 ]\n",
      " [0.56162333]\n",
      " [0.55789036]\n",
      " [0.20891178]\n",
      " [0.6080386 ]\n",
      " [0.45429915]\n",
      " [0.38438904]\n",
      " [0.69470245]\n",
      " [0.5454582 ]\n",
      " [0.26201004]]\n",
      "[[ 8.926963 ]\n",
      " [18.44928  ]\n",
      " [22.950926 ]\n",
      " [29.30546  ]\n",
      " [24.524647 ]\n",
      " [20.342659 ]\n",
      " [28.220089 ]\n",
      " [23.949848 ]\n",
      " [17.721037 ]\n",
      " [17.978483 ]\n",
      " [20.00612  ]\n",
      " [21.082245 ]\n",
      " [17.035809 ]\n",
      " [34.862    ]\n",
      " [16.3623   ]\n",
      " [20.490435 ]\n",
      " [24.000044 ]\n",
      " [23.33461  ]\n",
      " [16.573502 ]\n",
      " [17.986465 ]\n",
      " [13.127063 ]\n",
      " [16.000294 ]\n",
      " [20.780348 ]\n",
      " [19.694393 ]\n",
      " [24.007114 ]\n",
      " [19.52832  ]\n",
      " [29.406513 ]\n",
      " [28.053465 ]\n",
      " [14.450567 ]\n",
      " [22.51229  ]\n",
      " [20.46178  ]\n",
      " [16.234936 ]\n",
      " [39.318523 ]\n",
      " [22.98226  ]\n",
      " [15.1684475]\n",
      " [ 7.8344235]\n",
      " [13.154552 ]\n",
      " [18.088024 ]\n",
      " [17.2154   ]\n",
      " [27.217821 ]\n",
      " [22.440147 ]\n",
      " [27.561756 ]\n",
      " [14.391222 ]\n",
      " [34.05572  ]\n",
      " [31.010904 ]\n",
      " [23.443169 ]\n",
      " [28.952158 ]\n",
      " [19.102108 ]\n",
      " [15.903116 ]\n",
      " [22.42061  ]\n",
      " [32.257076 ]\n",
      " [20.486286 ]\n",
      " [12.28498  ]\n",
      " [18.243677 ]\n",
      " [30.895296 ]\n",
      " [26.500885 ]\n",
      " [13.257107 ]\n",
      " [37.15212  ]\n",
      " [40.52116  ]\n",
      " [23.172606 ]\n",
      " [18.792532 ]\n",
      " [15.251131 ]\n",
      " [15.675867 ]\n",
      " [18.708782 ]\n",
      " [23.84241  ]\n",
      " [25.443256 ]\n",
      " [14.925708 ]\n",
      " [23.129429 ]\n",
      " [14.941625 ]\n",
      " [ 9.457637 ]\n",
      " [17.970495 ]\n",
      " [26.216705 ]\n",
      " [20.52894  ]\n",
      " [16.373463 ]\n",
      " [25.351273 ]\n",
      " [19.576328 ]\n",
      " [21.394789 ]\n",
      " [23.557371 ]\n",
      " [29.572826 ]\n",
      " [10.709547 ]\n",
      " [21.697357 ]\n",
      " [34.387947 ]\n",
      " [19.9934   ]\n",
      " [13.826341 ]\n",
      " [19.832663 ]\n",
      " [18.773952 ]\n",
      " [15.786787 ]\n",
      " [24.171896 ]\n",
      " [25.191332 ]\n",
      " [24.379364 ]\n",
      " [18.55719  ]\n",
      " [23.869646 ]\n",
      " [23.313356 ]\n",
      " [30.536076 ]\n",
      " [30.37033  ]\n",
      " [14.875684 ]\n",
      " [32.596912 ]\n",
      " [25.770882 ]\n",
      " [22.666872 ]\n",
      " [36.444786 ]\n",
      " [29.818344 ]\n",
      " [17.233246 ]]\n"
     ]
    }
   ],
   "source": [
    "# 预测\n",
    "y_new = model.predict(x_valid)\n",
    "# 反归一化\n",
    "min_max_scaler.fit(y_valid_pd)\n",
    "y_new = min_max_scaler.inverse_transform(y_new)\n",
    "\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
