{
  "nbformat": 4,
  "nbformat_minor": 0,
  "metadata": {
    "kernelspec": {
      "display_name": "Python 3",
      "language": "python",
      "name": "python3"
    },
    "language_info": {
      "codemirror_mode": {
        "name": "ipython",
        "version": 3
      },
      "file_extension": ".py",
      "mimetype": "text/x-python",
      "name": "python",
      "nbconvert_exporter": "python",
      "pygments_lexer": "ipython3",
      "version": "3.6.9"
    },
    "colab": {
      "name": "LSTM simple RNN Bi-LSTM实现PE回归 regression analysis.ipynb",
      "provenance": [],
      "collapsed_sections": [],
      "toc_visible": true,
      "include_colab_link": true
    },
    "accelerator": "GPU"
  },
  "cells": [
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "view-in-github",
        "colab_type": "text"
      },
      "source": [
        "<a href=\"https://colab.research.google.com/github/sunyingjian/AI-in-well-logging/blob/master/LSTM_simple_RNN_Bi_LSTM%E5%AE%9E%E7%8E%B0PE%E5%9B%9E%E5%BD%92_regression_analysis.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "N5hdYUYzEAwG",
        "outputId": "08f3fac3-c35b-4c9d-8e14-e79697bf595b"
      },
      "source": [
        "!git clone https://github.com/sunyingjian/numpy-.git"
      ],
      "execution_count": 1,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Cloning into 'numpy-'...\n",
            "remote: Enumerating objects: 12, done.\u001b[K\n",
            "remote: Counting objects: 100% (12/12), done.\u001b[K\n",
            "remote: Compressing objects: 100% (12/12), done.\u001b[K\n",
            "remote: Total 415 (delta 3), reused 0 (delta 0), pack-reused 403\u001b[K\n",
            "Receiving objects: 100% (415/415), 202.24 MiB | 34.58 MiB/s, done.\n",
            "Resolving deltas: 100% (117/117), done.\n",
            "Checking out files: 100% (206/206), done.\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "vxwqeGDWEGli",
        "outputId": "886f3dc5-eee0-4a18-f785-8f76d587de9d"
      },
      "source": [
        "! /opt/bin/nvidia-smi"
      ],
      "execution_count": 2,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Mon Feb  1 08:22:16 2021       \n",
            "+-----------------------------------------------------------------------------+\n",
            "| NVIDIA-SMI 418.67       Driver Version: 418.67       CUDA Version: 10.1     |\n",
            "|-------------------------------+----------------------+----------------------+\n",
            "| GPU  Name        Persistence-M| Bus-Id        Disp.A | Volatile Uncorr. ECC |\n",
            "| Fan  Temp  Perf  Pwr:Usage/Cap|         Memory-Usage | GPU-Util  Compute M. |\n",
            "|===============================+======================+======================|\n",
            "|   0  Tesla T4            Off  | 00000000:00:04.0 Off |                    0 |\n",
            "| N/A   64C    P8    11W /  70W |      0MiB / 15079MiB |      0%      Default |\n",
            "+-------------------------------+----------------------+----------------------+\n",
            "                                                                               \n",
            "+-----------------------------------------------------------------------------+\n",
            "| Processes:                                                       GPU Memory |\n",
            "|  GPU       PID   Type   Process name                             Usage      |\n",
            "|=============================================================================|\n",
            "|  No running processes found                                                 |\n",
            "+-----------------------------------------------------------------------------+\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "bgwiHTAfDgRB"
      },
      "source": [
        "import tensorflow as tf\n",
        "import pandas as pd\n",
        "import numpy as np\n",
        "from matplotlib import pyplot as plt\n",
        "from tensorflow import keras\n",
        "%matplotlib inline\n",
        "import tensorflow_addons as tfa"
      ],
      "execution_count": 3,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 35
        },
        "id": "Oo01BGm0DgRH",
        "outputId": "3b1d77e5-8e8e-4f9f-d6d4-c8c7bf7851f6"
      },
      "source": [
        "tf.__version__"
      ],
      "execution_count": 4,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "application/vnd.google.colaboratory.intrinsic+json": {
              "type": "string"
            },
            "text/plain": [
              "'2.4.1'"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 4
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 419
        },
        "id": "vOzW0DaIDgRH",
        "outputId": "2b4f2019-9e5b-4066-ef8a-bf2480095f7f"
      },
      "source": [
        "data = pd.read_csv('/content/numpy-/TCN data.csv')\n",
        "data"
      ],
      "execution_count": 5,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/html": [
              "<div>\n",
              "<style scoped>\n",
              "    .dataframe tbody tr th:only-of-type {\n",
              "        vertical-align: middle;\n",
              "    }\n",
              "\n",
              "    .dataframe tbody tr th {\n",
              "        vertical-align: top;\n",
              "    }\n",
              "\n",
              "    .dataframe thead th {\n",
              "        text-align: right;\n",
              "    }\n",
              "</style>\n",
              "<table border=\"1\" class=\"dataframe\">\n",
              "  <thead>\n",
              "    <tr style=\"text-align: right;\">\n",
              "      <th></th>\n",
              "      <th>Unnamed: 0</th>\n",
              "      <th>Formation</th>\n",
              "      <th>Depth</th>\n",
              "      <th>GR</th>\n",
              "      <th>ILD_log10</th>\n",
              "      <th>DeltaPHI</th>\n",
              "      <th>PHIND</th>\n",
              "      <th>NM_M</th>\n",
              "      <th>RELPOS</th>\n",
              "      <th>Facies</th>\n",
              "      <th>Well Name</th>\n",
              "      <th>PE</th>\n",
              "    </tr>\n",
              "  </thead>\n",
              "  <tbody>\n",
              "    <tr>\n",
              "      <th>0</th>\n",
              "      <td>0</td>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.399818</td>\n",
              "      <td>0.298966</td>\n",
              "      <td>0.458149</td>\n",
              "      <td>0.776042</td>\n",
              "      <td>0.219321</td>\n",
              "      <td>0</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>3</td>\n",
              "      <td>7</td>\n",
              "      <td>0.557385</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1</th>\n",
              "      <td>1</td>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.400729</td>\n",
              "      <td>0.302738</td>\n",
              "      <td>0.456157</td>\n",
              "      <td>0.888021</td>\n",
              "      <td>0.231865</td>\n",
              "      <td>0</td>\n",
              "      <td>0.978788</td>\n",
              "      <td>3</td>\n",
              "      <td>7</td>\n",
              "      <td>0.494046</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>2</th>\n",
              "      <td>2</td>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.401639</td>\n",
              "      <td>0.306417</td>\n",
              "      <td>0.454165</td>\n",
              "      <td>0.903646</td>\n",
              "      <td>0.241224</td>\n",
              "      <td>0</td>\n",
              "      <td>0.956566</td>\n",
              "      <td>3</td>\n",
              "      <td>7</td>\n",
              "      <td>0.430707</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3</th>\n",
              "      <td>3</td>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.402550</td>\n",
              "      <td>0.339247</td>\n",
              "      <td>0.452173</td>\n",
              "      <td>0.880208</td>\n",
              "      <td>0.242479</td>\n",
              "      <td>0</td>\n",
              "      <td>0.935354</td>\n",
              "      <td>3</td>\n",
              "      <td>7</td>\n",
              "      <td>0.418039</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>4</th>\n",
              "      <td>4</td>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.403461</td>\n",
              "      <td>0.285601</td>\n",
              "      <td>0.446860</td>\n",
              "      <td>0.869792</td>\n",
              "      <td>0.246049</td>\n",
              "      <td>0</td>\n",
              "      <td>0.914141</td>\n",
              "      <td>3</td>\n",
              "      <td>7</td>\n",
              "      <td>0.405371</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>...</th>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3227</th>\n",
              "      <td>3227</td>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.996357</td>\n",
              "      <td>0.155858</td>\n",
              "      <td>0.646070</td>\n",
              "      <td>0.565833</td>\n",
              "      <td>0.129373</td>\n",
              "      <td>1</td>\n",
              "      <td>0.681818</td>\n",
              "      <td>5</td>\n",
              "      <td>0</td>\n",
              "      <td>0.432860</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3228</th>\n",
              "      <td>3228</td>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.997268</td>\n",
              "      <td>0.145818</td>\n",
              "      <td>0.650055</td>\n",
              "      <td>0.576589</td>\n",
              "      <td>0.144021</td>\n",
              "      <td>1</td>\n",
              "      <td>0.673737</td>\n",
              "      <td>5</td>\n",
              "      <td>0</td>\n",
              "      <td>0.398277</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3229</th>\n",
              "      <td>3229</td>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.998179</td>\n",
              "      <td>0.169829</td>\n",
              "      <td>0.657359</td>\n",
              "      <td>0.594401</td>\n",
              "      <td>0.144021</td>\n",
              "      <td>1</td>\n",
              "      <td>0.665657</td>\n",
              "      <td>5</td>\n",
              "      <td>0</td>\n",
              "      <td>0.378769</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3230</th>\n",
              "      <td>3230</td>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.999089</td>\n",
              "      <td>0.177978</td>\n",
              "      <td>0.658023</td>\n",
              "      <td>0.598516</td>\n",
              "      <td>0.138135</td>\n",
              "      <td>1</td>\n",
              "      <td>0.657576</td>\n",
              "      <td>5</td>\n",
              "      <td>0</td>\n",
              "      <td>0.373955</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3231</th>\n",
              "      <td>3231</td>\n",
              "      <td>0.923077</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>0.171282</td>\n",
              "      <td>0.661343</td>\n",
              "      <td>0.586172</td>\n",
              "      <td>0.118065</td>\n",
              "      <td>1</td>\n",
              "      <td>0.649495</td>\n",
              "      <td>5</td>\n",
              "      <td>0</td>\n",
              "      <td>0.392070</td>\n",
              "    </tr>\n",
              "  </tbody>\n",
              "</table>\n",
              "<p>3232 rows × 12 columns</p>\n",
              "</div>"
            ],
            "text/plain": [
              "      Unnamed: 0  Formation     Depth  ...  Facies  Well Name        PE\n",
              "0              0   0.076923  0.399818  ...       3          7  0.557385\n",
              "1              1   0.076923  0.400729  ...       3          7  0.494046\n",
              "2              2   0.076923  0.401639  ...       3          7  0.430707\n",
              "3              3   0.076923  0.402550  ...       3          7  0.418039\n",
              "4              4   0.076923  0.403461  ...       3          7  0.405371\n",
              "...          ...        ...       ...  ...     ...        ...       ...\n",
              "3227        3227   0.923077  0.996357  ...       5          0  0.432860\n",
              "3228        3228   0.923077  0.997268  ...       5          0  0.398277\n",
              "3229        3229   0.923077  0.998179  ...       5          0  0.378769\n",
              "3230        3230   0.923077  0.999089  ...       5          0  0.373955\n",
              "3231        3231   0.923077  1.000000  ...       5          0  0.392070\n",
              "\n",
              "[3232 rows x 12 columns]"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 5
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "2v_WkA58DgRH",
        "outputId": "1c526bef-d083-495f-b852-c995aad008ff"
      },
      "source": [
        "data['Well Name'].value_counts()"
      ],
      "execution_count": 6,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "1    501\n",
              "7    471\n",
              "3    463\n",
              "2    461\n",
              "6    449\n",
              "4    415\n",
              "0    404\n",
              "5     68\n",
              "Name: Well Name, dtype: int64"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 6
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "h6AeS6K5DgRI"
      },
      "source": [
        "# 分割数据集"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "3Rjwr3IbDgRI"
      },
      "source": [
        "test_data = data.loc[data['Well Name']==1]"
      ],
      "execution_count": 7,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 419
        },
        "id": "mkNVVBTUDgRI",
        "outputId": "bcbef2c9-8ad5-4f2e-ae98-4f4ef310bb73"
      },
      "source": [
        "test_data"
      ],
      "execution_count": 8,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/html": [
              "<div>\n",
              "<style scoped>\n",
              "    .dataframe tbody tr th:only-of-type {\n",
              "        vertical-align: middle;\n",
              "    }\n",
              "\n",
              "    .dataframe tbody tr th {\n",
              "        vertical-align: top;\n",
              "    }\n",
              "\n",
              "    .dataframe thead th {\n",
              "        text-align: right;\n",
              "    }\n",
              "</style>\n",
              "<table border=\"1\" class=\"dataframe\">\n",
              "  <thead>\n",
              "    <tr style=\"text-align: right;\">\n",
              "      <th></th>\n",
              "      <th>Unnamed: 0</th>\n",
              "      <th>Formation</th>\n",
              "      <th>Depth</th>\n",
              "      <th>GR</th>\n",
              "      <th>ILD_log10</th>\n",
              "      <th>DeltaPHI</th>\n",
              "      <th>PHIND</th>\n",
              "      <th>NM_M</th>\n",
              "      <th>RELPOS</th>\n",
              "      <th>Facies</th>\n",
              "      <th>Well Name</th>\n",
              "      <th>PE</th>\n",
              "    </tr>\n",
              "  </thead>\n",
              "  <tbody>\n",
              "    <tr>\n",
              "      <th>1381</th>\n",
              "      <td>1381</td>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.000000</td>\n",
              "      <td>0.489844</td>\n",
              "      <td>0.465117</td>\n",
              "      <td>0.682292</td>\n",
              "      <td>0.181401</td>\n",
              "      <td>0</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>2</td>\n",
              "      <td>1</td>\n",
              "      <td>0.434634</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1382</th>\n",
              "      <td>1382</td>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.000911</td>\n",
              "      <td>0.369284</td>\n",
              "      <td>0.445682</td>\n",
              "      <td>0.776042</td>\n",
              "      <td>0.241224</td>\n",
              "      <td>0</td>\n",
              "      <td>0.986869</td>\n",
              "      <td>2</td>\n",
              "      <td>1</td>\n",
              "      <td>0.411705</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1383</th>\n",
              "      <td>1383</td>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.001821</td>\n",
              "      <td>0.414073</td>\n",
              "      <td>0.435586</td>\n",
              "      <td>0.830729</td>\n",
              "      <td>0.269206</td>\n",
              "      <td>0</td>\n",
              "      <td>0.973737</td>\n",
              "      <td>2</td>\n",
              "      <td>1</td>\n",
              "      <td>0.400811</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1384</th>\n",
              "      <td>1384</td>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.002732</td>\n",
              "      <td>0.352277</td>\n",
              "      <td>0.415863</td>\n",
              "      <td>0.911458</td>\n",
              "      <td>0.299118</td>\n",
              "      <td>0</td>\n",
              "      <td>0.960606</td>\n",
              "      <td>2</td>\n",
              "      <td>1</td>\n",
              "      <td>0.378389</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1385</th>\n",
              "      <td>1385</td>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.003643</td>\n",
              "      <td>0.336421</td>\n",
              "      <td>0.401467</td>\n",
              "      <td>0.872396</td>\n",
              "      <td>0.296223</td>\n",
              "      <td>0</td>\n",
              "      <td>0.946465</td>\n",
              "      <td>2</td>\n",
              "      <td>1</td>\n",
              "      <td>0.367241</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>...</th>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1877</th>\n",
              "      <td>1877</td>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.484517</td>\n",
              "      <td>0.180390</td>\n",
              "      <td>0.525660</td>\n",
              "      <td>0.419271</td>\n",
              "      <td>0.350258</td>\n",
              "      <td>1</td>\n",
              "      <td>0.064646</td>\n",
              "      <td>3</td>\n",
              "      <td>1</td>\n",
              "      <td>0.357487</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1878</th>\n",
              "      <td>1878</td>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.485428</td>\n",
              "      <td>0.129315</td>\n",
              "      <td>0.530370</td>\n",
              "      <td>0.546875</td>\n",
              "      <td>0.248943</td>\n",
              "      <td>1</td>\n",
              "      <td>0.049495</td>\n",
              "      <td>3</td>\n",
              "      <td>1</td>\n",
              "      <td>0.392450</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1879</th>\n",
              "      <td>1879</td>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.486339</td>\n",
              "      <td>0.098091</td>\n",
              "      <td>0.532214</td>\n",
              "      <td>0.598958</td>\n",
              "      <td>0.167892</td>\n",
              "      <td>1</td>\n",
              "      <td>0.034343</td>\n",
              "      <td>8</td>\n",
              "      <td>1</td>\n",
              "      <td>0.420699</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1880</th>\n",
              "      <td>1880</td>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.487250</td>\n",
              "      <td>0.110403</td>\n",
              "      <td>0.536914</td>\n",
              "      <td>0.648438</td>\n",
              "      <td>0.126402</td>\n",
              "      <td>1</td>\n",
              "      <td>0.019192</td>\n",
              "      <td>8</td>\n",
              "      <td>1</td>\n",
              "      <td>0.429440</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1881</th>\n",
              "      <td>1881</td>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.488160</td>\n",
              "      <td>0.127233</td>\n",
              "      <td>0.544241</td>\n",
              "      <td>0.695312</td>\n",
              "      <td>0.130261</td>\n",
              "      <td>1</td>\n",
              "      <td>0.005051</td>\n",
              "      <td>8</td>\n",
              "      <td>1</td>\n",
              "      <td>0.420319</td>\n",
              "    </tr>\n",
              "  </tbody>\n",
              "</table>\n",
              "<p>501 rows × 12 columns</p>\n",
              "</div>"
            ],
            "text/plain": [
              "      Unnamed: 0  Formation     Depth  ...  Facies  Well Name        PE\n",
              "1381        1381   0.076923  0.000000  ...       2          1  0.434634\n",
              "1382        1382   0.076923  0.000911  ...       2          1  0.411705\n",
              "1383        1383   0.076923  0.001821  ...       2          1  0.400811\n",
              "1384        1384   0.076923  0.002732  ...       2          1  0.378389\n",
              "1385        1385   0.076923  0.003643  ...       2          1  0.367241\n",
              "...          ...        ...       ...  ...     ...        ...       ...\n",
              "1877        1877   0.923077  0.484517  ...       3          1  0.357487\n",
              "1878        1878   0.923077  0.485428  ...       3          1  0.392450\n",
              "1879        1879   0.923077  0.486339  ...       8          1  0.420699\n",
              "1880        1880   0.923077  0.487250  ...       8          1  0.429440\n",
              "1881        1881   0.923077  0.488160  ...       8          1  0.420319\n",
              "\n",
              "[501 rows x 12 columns]"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 8
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "_hcp9rmKDgRI"
      },
      "source": [
        "test_data = test_data.drop(columns=['Unnamed: 0','Well Name'])"
      ],
      "execution_count": 9,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 419
        },
        "id": "ZJYBE6X2DgRJ",
        "outputId": "ea5d1c54-76de-4905-d75f-3f51be43e388"
      },
      "source": [
        "test_data"
      ],
      "execution_count": 10,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/html": [
              "<div>\n",
              "<style scoped>\n",
              "    .dataframe tbody tr th:only-of-type {\n",
              "        vertical-align: middle;\n",
              "    }\n",
              "\n",
              "    .dataframe tbody tr th {\n",
              "        vertical-align: top;\n",
              "    }\n",
              "\n",
              "    .dataframe thead th {\n",
              "        text-align: right;\n",
              "    }\n",
              "</style>\n",
              "<table border=\"1\" class=\"dataframe\">\n",
              "  <thead>\n",
              "    <tr style=\"text-align: right;\">\n",
              "      <th></th>\n",
              "      <th>Formation</th>\n",
              "      <th>Depth</th>\n",
              "      <th>GR</th>\n",
              "      <th>ILD_log10</th>\n",
              "      <th>DeltaPHI</th>\n",
              "      <th>PHIND</th>\n",
              "      <th>NM_M</th>\n",
              "      <th>RELPOS</th>\n",
              "      <th>Facies</th>\n",
              "      <th>PE</th>\n",
              "    </tr>\n",
              "  </thead>\n",
              "  <tbody>\n",
              "    <tr>\n",
              "      <th>1381</th>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.000000</td>\n",
              "      <td>0.489844</td>\n",
              "      <td>0.465117</td>\n",
              "      <td>0.682292</td>\n",
              "      <td>0.181401</td>\n",
              "      <td>0</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>2</td>\n",
              "      <td>0.434634</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1382</th>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.000911</td>\n",
              "      <td>0.369284</td>\n",
              "      <td>0.445682</td>\n",
              "      <td>0.776042</td>\n",
              "      <td>0.241224</td>\n",
              "      <td>0</td>\n",
              "      <td>0.986869</td>\n",
              "      <td>2</td>\n",
              "      <td>0.411705</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1383</th>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.001821</td>\n",
              "      <td>0.414073</td>\n",
              "      <td>0.435586</td>\n",
              "      <td>0.830729</td>\n",
              "      <td>0.269206</td>\n",
              "      <td>0</td>\n",
              "      <td>0.973737</td>\n",
              "      <td>2</td>\n",
              "      <td>0.400811</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1384</th>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.002732</td>\n",
              "      <td>0.352277</td>\n",
              "      <td>0.415863</td>\n",
              "      <td>0.911458</td>\n",
              "      <td>0.299118</td>\n",
              "      <td>0</td>\n",
              "      <td>0.960606</td>\n",
              "      <td>2</td>\n",
              "      <td>0.378389</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1385</th>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.003643</td>\n",
              "      <td>0.336421</td>\n",
              "      <td>0.401467</td>\n",
              "      <td>0.872396</td>\n",
              "      <td>0.296223</td>\n",
              "      <td>0</td>\n",
              "      <td>0.946465</td>\n",
              "      <td>2</td>\n",
              "      <td>0.367241</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>...</th>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1877</th>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.484517</td>\n",
              "      <td>0.180390</td>\n",
              "      <td>0.525660</td>\n",
              "      <td>0.419271</td>\n",
              "      <td>0.350258</td>\n",
              "      <td>1</td>\n",
              "      <td>0.064646</td>\n",
              "      <td>3</td>\n",
              "      <td>0.357487</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1878</th>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.485428</td>\n",
              "      <td>0.129315</td>\n",
              "      <td>0.530370</td>\n",
              "      <td>0.546875</td>\n",
              "      <td>0.248943</td>\n",
              "      <td>1</td>\n",
              "      <td>0.049495</td>\n",
              "      <td>3</td>\n",
              "      <td>0.392450</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1879</th>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.486339</td>\n",
              "      <td>0.098091</td>\n",
              "      <td>0.532214</td>\n",
              "      <td>0.598958</td>\n",
              "      <td>0.167892</td>\n",
              "      <td>1</td>\n",
              "      <td>0.034343</td>\n",
              "      <td>8</td>\n",
              "      <td>0.420699</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1880</th>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.487250</td>\n",
              "      <td>0.110403</td>\n",
              "      <td>0.536914</td>\n",
              "      <td>0.648438</td>\n",
              "      <td>0.126402</td>\n",
              "      <td>1</td>\n",
              "      <td>0.019192</td>\n",
              "      <td>8</td>\n",
              "      <td>0.429440</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1881</th>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.488160</td>\n",
              "      <td>0.127233</td>\n",
              "      <td>0.544241</td>\n",
              "      <td>0.695312</td>\n",
              "      <td>0.130261</td>\n",
              "      <td>1</td>\n",
              "      <td>0.005051</td>\n",
              "      <td>8</td>\n",
              "      <td>0.420319</td>\n",
              "    </tr>\n",
              "  </tbody>\n",
              "</table>\n",
              "<p>501 rows × 10 columns</p>\n",
              "</div>"
            ],
            "text/plain": [
              "      Formation     Depth        GR  ...    RELPOS  Facies        PE\n",
              "1381   0.076923  0.000000  0.489844  ...  1.000000       2  0.434634\n",
              "1382   0.076923  0.000911  0.369284  ...  0.986869       2  0.411705\n",
              "1383   0.076923  0.001821  0.414073  ...  0.973737       2  0.400811\n",
              "1384   0.076923  0.002732  0.352277  ...  0.960606       2  0.378389\n",
              "1385   0.076923  0.003643  0.336421  ...  0.946465       2  0.367241\n",
              "...         ...       ...       ...  ...       ...     ...       ...\n",
              "1877   0.923077  0.484517  0.180390  ...  0.064646       3  0.357487\n",
              "1878   0.923077  0.485428  0.129315  ...  0.049495       3  0.392450\n",
              "1879   0.923077  0.486339  0.098091  ...  0.034343       8  0.420699\n",
              "1880   0.923077  0.487250  0.110403  ...  0.019192       8  0.429440\n",
              "1881   0.923077  0.488160  0.127233  ...  0.005051       8  0.420319\n",
              "\n",
              "[501 rows x 10 columns]"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 10
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "Agf5N4ndDgRJ"
      },
      "source": [
        "index=data[data['Well Name'].isin([1])].index[0]"
      ],
      "execution_count": 11,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "08Wj8B1yDgRJ",
        "outputId": "4f835d07-dd93-4e93-a018-b1b5d7e09922"
      },
      "source": [
        "index"
      ],
      "execution_count": 12,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "1381"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 12
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "sDknb38sDgRJ"
      },
      "source": [
        "training_data = data.drop(index = [i for i in range(1381,1882)])"
      ],
      "execution_count": 13,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 419
        },
        "id": "Igmm1a_5DgRK",
        "outputId": "1c9ce09b-e27d-4ee5-9409-246d8235f4f2"
      },
      "source": [
        "training_data"
      ],
      "execution_count": 14,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/html": [
              "<div>\n",
              "<style scoped>\n",
              "    .dataframe tbody tr th:only-of-type {\n",
              "        vertical-align: middle;\n",
              "    }\n",
              "\n",
              "    .dataframe tbody tr th {\n",
              "        vertical-align: top;\n",
              "    }\n",
              "\n",
              "    .dataframe thead th {\n",
              "        text-align: right;\n",
              "    }\n",
              "</style>\n",
              "<table border=\"1\" class=\"dataframe\">\n",
              "  <thead>\n",
              "    <tr style=\"text-align: right;\">\n",
              "      <th></th>\n",
              "      <th>Unnamed: 0</th>\n",
              "      <th>Formation</th>\n",
              "      <th>Depth</th>\n",
              "      <th>GR</th>\n",
              "      <th>ILD_log10</th>\n",
              "      <th>DeltaPHI</th>\n",
              "      <th>PHIND</th>\n",
              "      <th>NM_M</th>\n",
              "      <th>RELPOS</th>\n",
              "      <th>Facies</th>\n",
              "      <th>Well Name</th>\n",
              "      <th>PE</th>\n",
              "    </tr>\n",
              "  </thead>\n",
              "  <tbody>\n",
              "    <tr>\n",
              "      <th>0</th>\n",
              "      <td>0</td>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.399818</td>\n",
              "      <td>0.298966</td>\n",
              "      <td>0.458149</td>\n",
              "      <td>0.776042</td>\n",
              "      <td>0.219321</td>\n",
              "      <td>0</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>3</td>\n",
              "      <td>7</td>\n",
              "      <td>0.557385</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1</th>\n",
              "      <td>1</td>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.400729</td>\n",
              "      <td>0.302738</td>\n",
              "      <td>0.456157</td>\n",
              "      <td>0.888021</td>\n",
              "      <td>0.231865</td>\n",
              "      <td>0</td>\n",
              "      <td>0.978788</td>\n",
              "      <td>3</td>\n",
              "      <td>7</td>\n",
              "      <td>0.494046</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>2</th>\n",
              "      <td>2</td>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.401639</td>\n",
              "      <td>0.306417</td>\n",
              "      <td>0.454165</td>\n",
              "      <td>0.903646</td>\n",
              "      <td>0.241224</td>\n",
              "      <td>0</td>\n",
              "      <td>0.956566</td>\n",
              "      <td>3</td>\n",
              "      <td>7</td>\n",
              "      <td>0.430707</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3</th>\n",
              "      <td>3</td>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.402550</td>\n",
              "      <td>0.339247</td>\n",
              "      <td>0.452173</td>\n",
              "      <td>0.880208</td>\n",
              "      <td>0.242479</td>\n",
              "      <td>0</td>\n",
              "      <td>0.935354</td>\n",
              "      <td>3</td>\n",
              "      <td>7</td>\n",
              "      <td>0.418039</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>4</th>\n",
              "      <td>4</td>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.403461</td>\n",
              "      <td>0.285601</td>\n",
              "      <td>0.446860</td>\n",
              "      <td>0.869792</td>\n",
              "      <td>0.246049</td>\n",
              "      <td>0</td>\n",
              "      <td>0.914141</td>\n",
              "      <td>3</td>\n",
              "      <td>7</td>\n",
              "      <td>0.405371</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>...</th>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3227</th>\n",
              "      <td>3227</td>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.996357</td>\n",
              "      <td>0.155858</td>\n",
              "      <td>0.646070</td>\n",
              "      <td>0.565833</td>\n",
              "      <td>0.129373</td>\n",
              "      <td>1</td>\n",
              "      <td>0.681818</td>\n",
              "      <td>5</td>\n",
              "      <td>0</td>\n",
              "      <td>0.432860</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3228</th>\n",
              "      <td>3228</td>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.997268</td>\n",
              "      <td>0.145818</td>\n",
              "      <td>0.650055</td>\n",
              "      <td>0.576589</td>\n",
              "      <td>0.144021</td>\n",
              "      <td>1</td>\n",
              "      <td>0.673737</td>\n",
              "      <td>5</td>\n",
              "      <td>0</td>\n",
              "      <td>0.398277</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3229</th>\n",
              "      <td>3229</td>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.998179</td>\n",
              "      <td>0.169829</td>\n",
              "      <td>0.657359</td>\n",
              "      <td>0.594401</td>\n",
              "      <td>0.144021</td>\n",
              "      <td>1</td>\n",
              "      <td>0.665657</td>\n",
              "      <td>5</td>\n",
              "      <td>0</td>\n",
              "      <td>0.378769</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3230</th>\n",
              "      <td>3230</td>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.999089</td>\n",
              "      <td>0.177978</td>\n",
              "      <td>0.658023</td>\n",
              "      <td>0.598516</td>\n",
              "      <td>0.138135</td>\n",
              "      <td>1</td>\n",
              "      <td>0.657576</td>\n",
              "      <td>5</td>\n",
              "      <td>0</td>\n",
              "      <td>0.373955</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3231</th>\n",
              "      <td>3231</td>\n",
              "      <td>0.923077</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>0.171282</td>\n",
              "      <td>0.661343</td>\n",
              "      <td>0.586172</td>\n",
              "      <td>0.118065</td>\n",
              "      <td>1</td>\n",
              "      <td>0.649495</td>\n",
              "      <td>5</td>\n",
              "      <td>0</td>\n",
              "      <td>0.392070</td>\n",
              "    </tr>\n",
              "  </tbody>\n",
              "</table>\n",
              "<p>2731 rows × 12 columns</p>\n",
              "</div>"
            ],
            "text/plain": [
              "      Unnamed: 0  Formation     Depth  ...  Facies  Well Name        PE\n",
              "0              0   0.076923  0.399818  ...       3          7  0.557385\n",
              "1              1   0.076923  0.400729  ...       3          7  0.494046\n",
              "2              2   0.076923  0.401639  ...       3          7  0.430707\n",
              "3              3   0.076923  0.402550  ...       3          7  0.418039\n",
              "4              4   0.076923  0.403461  ...       3          7  0.405371\n",
              "...          ...        ...       ...  ...     ...        ...       ...\n",
              "3227        3227   0.923077  0.996357  ...       5          0  0.432860\n",
              "3228        3228   0.923077  0.997268  ...       5          0  0.398277\n",
              "3229        3229   0.923077  0.998179  ...       5          0  0.378769\n",
              "3230        3230   0.923077  0.999089  ...       5          0  0.373955\n",
              "3231        3231   0.923077  1.000000  ...       5          0  0.392070\n",
              "\n",
              "[2731 rows x 12 columns]"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 14
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "-TE-fgHaDgRK"
      },
      "source": [
        "train_data = training_data.drop(columns=['Well Name','Unnamed: 0'])"
      ],
      "execution_count": 15,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 419
        },
        "id": "DoSNEGO3DgRK",
        "outputId": "bd368873-5b3e-4ca6-c7f6-4a80220cd28d"
      },
      "source": [
        "train_data"
      ],
      "execution_count": 16,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/html": [
              "<div>\n",
              "<style scoped>\n",
              "    .dataframe tbody tr th:only-of-type {\n",
              "        vertical-align: middle;\n",
              "    }\n",
              "\n",
              "    .dataframe tbody tr th {\n",
              "        vertical-align: top;\n",
              "    }\n",
              "\n",
              "    .dataframe thead th {\n",
              "        text-align: right;\n",
              "    }\n",
              "</style>\n",
              "<table border=\"1\" class=\"dataframe\">\n",
              "  <thead>\n",
              "    <tr style=\"text-align: right;\">\n",
              "      <th></th>\n",
              "      <th>Formation</th>\n",
              "      <th>Depth</th>\n",
              "      <th>GR</th>\n",
              "      <th>ILD_log10</th>\n",
              "      <th>DeltaPHI</th>\n",
              "      <th>PHIND</th>\n",
              "      <th>NM_M</th>\n",
              "      <th>RELPOS</th>\n",
              "      <th>Facies</th>\n",
              "      <th>PE</th>\n",
              "    </tr>\n",
              "  </thead>\n",
              "  <tbody>\n",
              "    <tr>\n",
              "      <th>0</th>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.399818</td>\n",
              "      <td>0.298966</td>\n",
              "      <td>0.458149</td>\n",
              "      <td>0.776042</td>\n",
              "      <td>0.219321</td>\n",
              "      <td>0</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>3</td>\n",
              "      <td>0.557385</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>1</th>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.400729</td>\n",
              "      <td>0.302738</td>\n",
              "      <td>0.456157</td>\n",
              "      <td>0.888021</td>\n",
              "      <td>0.231865</td>\n",
              "      <td>0</td>\n",
              "      <td>0.978788</td>\n",
              "      <td>3</td>\n",
              "      <td>0.494046</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>2</th>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.401639</td>\n",
              "      <td>0.306417</td>\n",
              "      <td>0.454165</td>\n",
              "      <td>0.903646</td>\n",
              "      <td>0.241224</td>\n",
              "      <td>0</td>\n",
              "      <td>0.956566</td>\n",
              "      <td>3</td>\n",
              "      <td>0.430707</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3</th>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.402550</td>\n",
              "      <td>0.339247</td>\n",
              "      <td>0.452173</td>\n",
              "      <td>0.880208</td>\n",
              "      <td>0.242479</td>\n",
              "      <td>0</td>\n",
              "      <td>0.935354</td>\n",
              "      <td>3</td>\n",
              "      <td>0.418039</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>4</th>\n",
              "      <td>0.076923</td>\n",
              "      <td>0.403461</td>\n",
              "      <td>0.285601</td>\n",
              "      <td>0.446860</td>\n",
              "      <td>0.869792</td>\n",
              "      <td>0.246049</td>\n",
              "      <td>0</td>\n",
              "      <td>0.914141</td>\n",
              "      <td>3</td>\n",
              "      <td>0.405371</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>...</th>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "      <td>...</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3227</th>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.996357</td>\n",
              "      <td>0.155858</td>\n",
              "      <td>0.646070</td>\n",
              "      <td>0.565833</td>\n",
              "      <td>0.129373</td>\n",
              "      <td>1</td>\n",
              "      <td>0.681818</td>\n",
              "      <td>5</td>\n",
              "      <td>0.432860</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3228</th>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.997268</td>\n",
              "      <td>0.145818</td>\n",
              "      <td>0.650055</td>\n",
              "      <td>0.576589</td>\n",
              "      <td>0.144021</td>\n",
              "      <td>1</td>\n",
              "      <td>0.673737</td>\n",
              "      <td>5</td>\n",
              "      <td>0.398277</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3229</th>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.998179</td>\n",
              "      <td>0.169829</td>\n",
              "      <td>0.657359</td>\n",
              "      <td>0.594401</td>\n",
              "      <td>0.144021</td>\n",
              "      <td>1</td>\n",
              "      <td>0.665657</td>\n",
              "      <td>5</td>\n",
              "      <td>0.378769</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3230</th>\n",
              "      <td>0.923077</td>\n",
              "      <td>0.999089</td>\n",
              "      <td>0.177978</td>\n",
              "      <td>0.658023</td>\n",
              "      <td>0.598516</td>\n",
              "      <td>0.138135</td>\n",
              "      <td>1</td>\n",
              "      <td>0.657576</td>\n",
              "      <td>5</td>\n",
              "      <td>0.373955</td>\n",
              "    </tr>\n",
              "    <tr>\n",
              "      <th>3231</th>\n",
              "      <td>0.923077</td>\n",
              "      <td>1.000000</td>\n",
              "      <td>0.171282</td>\n",
              "      <td>0.661343</td>\n",
              "      <td>0.586172</td>\n",
              "      <td>0.118065</td>\n",
              "      <td>1</td>\n",
              "      <td>0.649495</td>\n",
              "      <td>5</td>\n",
              "      <td>0.392070</td>\n",
              "    </tr>\n",
              "  </tbody>\n",
              "</table>\n",
              "<p>2731 rows × 10 columns</p>\n",
              "</div>"
            ],
            "text/plain": [
              "      Formation     Depth        GR  ...    RELPOS  Facies        PE\n",
              "0      0.076923  0.399818  0.298966  ...  1.000000       3  0.557385\n",
              "1      0.076923  0.400729  0.302738  ...  0.978788       3  0.494046\n",
              "2      0.076923  0.401639  0.306417  ...  0.956566       3  0.430707\n",
              "3      0.076923  0.402550  0.339247  ...  0.935354       3  0.418039\n",
              "4      0.076923  0.403461  0.285601  ...  0.914141       3  0.405371\n",
              "...         ...       ...       ...  ...       ...     ...       ...\n",
              "3227   0.923077  0.996357  0.155858  ...  0.681818       5  0.432860\n",
              "3228   0.923077  0.997268  0.145818  ...  0.673737       5  0.398277\n",
              "3229   0.923077  0.998179  0.169829  ...  0.665657       5  0.378769\n",
              "3230   0.923077  0.999089  0.177978  ...  0.657576       5  0.373955\n",
              "3231   0.923077  1.000000  0.171282  ...  0.649495       5  0.392070\n",
              "\n",
              "[2731 rows x 10 columns]"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 16
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "NcWjLMPeDgRK"
      },
      "source": [
        "# 切割步长"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "nePQSC6tDgRK"
      },
      "source": [
        "training_data = training_data.values"
      ],
      "execution_count": 17,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "NybD0X9tDgRL"
      },
      "source": [
        "seq_length = 100\n",
        "data_ = []\n",
        "for i in range(len(train_data)-seq_length):\n",
        "  if training_data[i,-2]!=training_data[i+seq_length,-2]:\n",
        "    continue\n",
        "  data_.append(train_data.iloc[i:i+seq_length])"
      ],
      "execution_count": 19,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "5fNfntU-DgRL"
      },
      "source": [
        "data_ = np.array([df.values for df in data_])"
      ],
      "execution_count": 20,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "AXw5OHEwDgRL",
        "outputId": "bdb914f3-12c9-4b6e-8697-68fdfc0bba85"
      },
      "source": [
        "data_.shape"
      ],
      "execution_count": 21,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "(2063, 100, 10)"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 21
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "D0pv0-hBDgRL"
      },
      "source": [
        "X = data_[:,:,:9]"
      ],
      "execution_count": 22,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "YF5LtcH2DgRL",
        "outputId": "23cb1c5c-68d5-46c2-fc24-53674f31cde0"
      },
      "source": [
        "X.shape"
      ],
      "execution_count": 23,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "(2063, 100, 9)"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 23
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "n4ZK9_uZDgRL"
      },
      "source": [
        "Y = data_[:,:,-1]"
      ],
      "execution_count": 24,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "KoFj9nugDgRM",
        "outputId": "d8755da7-cdeb-4b33-fcf8-0065db7900bb"
      },
      "source": [
        "Y.shape"
      ],
      "execution_count": 25,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "(2063, 100)"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 25
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "ITy4YGDYDgRM"
      },
      "source": [
        "data_test = []\n",
        "for i in range(len(test_data)-seq_length):\n",
        "    data_test.append(test_data.iloc[i:i+seq_length])"
      ],
      "execution_count": 26,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "O40hAcqUDgRM"
      },
      "source": [
        "data_test = np.array([df.values for df in data_test])"
      ],
      "execution_count": 27,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "Y4tHin07DgRM",
        "outputId": "fe3166b6-77c9-464a-9e4d-b906589f2551"
      },
      "source": [
        "data_test.shape"
      ],
      "execution_count": 28,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "(401, 100, 10)"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 28
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "PIxofvBVDgRM"
      },
      "source": [
        "test_x = data_test[:,:,:9]"
      ],
      "execution_count": 29,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "l0UEuBpLDgRM"
      },
      "source": [
        "test_y = data_test[:,:,-1]"
      ],
      "execution_count": 30,
      "outputs": []
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "CCWu0wlshbIC"
      },
      "source": [
        "# 定量函数"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "X7APSNiqhfYU",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "0b7c4733-82fa-4a15-d0ee-5503ac39496c"
      },
      "source": [
        "import sklearn\r\n",
        "from sklearn import metrics\r\n",
        "from sklearn import linear_model\r\n",
        "import statsmodels.api as sm\r\n",
        "from sklearn.metrics import r2_score"
      ],
      "execution_count": 31,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "/usr/local/lib/python3.6/dist-packages/statsmodels/tools/_testing.py:19: FutureWarning: pandas.util.testing is deprecated. Use the functions in the public API at pandas.testing instead.\n",
            "  import pandas.util.testing as tm\n"
          ],
          "name": "stderr"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "mS2VPsEADgRN"
      },
      "source": [
        "# LSTM"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "OL38QZ1dDgRN"
      },
      "source": [
        "input = tf.keras.Input(shape =(100,9))"
      ],
      "execution_count": 32,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "Ycnmg4r1DgRN"
      },
      "source": [
        "x = tf.keras.layers.LSTM(64,return_sequences=True)(input)\n",
        "x = tf.keras.layers.Dense(64,activation='relu',kernel_regularizer='l2')(x)\n",
        "x = tf.keras.layers.Dense(1)(x)"
      ],
      "execution_count": 33,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "lCPtZ8beDgRN"
      },
      "source": [
        "model = tf.keras.Model(inputs = input,outputs = x)"
      ],
      "execution_count": 34,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "XJkiDAGTE-8W",
        "outputId": "1f2945e4-5fb1-4006-db5e-807ed6801062"
      },
      "source": [
        "model.summary()"
      ],
      "execution_count": 35,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Model: \"model\"\n",
            "_________________________________________________________________\n",
            "Layer (type)                 Output Shape              Param #   \n",
            "=================================================================\n",
            "input_1 (InputLayer)         [(None, 100, 9)]          0         \n",
            "_________________________________________________________________\n",
            "lstm (LSTM)                  (None, 100, 64)           18944     \n",
            "_________________________________________________________________\n",
            "dense (Dense)                (None, 100, 64)           4160      \n",
            "_________________________________________________________________\n",
            "dense_1 (Dense)              (None, 100, 1)            65        \n",
            "=================================================================\n",
            "Total params: 23,169\n",
            "Trainable params: 23,169\n",
            "Non-trainable params: 0\n",
            "_________________________________________________________________\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "df85UXOcFeJ1"
      },
      "source": [
        "#学习率衰减\r\n",
        "learning_rate=0.01\r\n",
        "\r\n",
        "Lr_change=tf.keras.callbacks.ReduceLROnPlateau('val_mae',patience = 20, factor = 0.5, min_lr=0.0001)\r\n",
        "#保存准确率最好的模型\r\n",
        "from keras.callbacks import ModelCheckpoint\r\n",
        "filepath=\"best_weight.h5\"\r\n",
        "checkpoint = ModelCheckpoint(filepath, monitor='val_mae', verbose=1, save_best_only=True,mode='min')\r\n",
        "Adam=tf.keras.optimizers.Adam(learning_rate=learning_rate)\r\n",
        "model.compile(optimizer=Adam,loss='mse',metrics=['mae','mse'])"
      ],
      "execution_count": 36,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "PLe-6jp9F1OM",
        "outputId": "49b84e67-42e5-418b-a81b-c70b3b498b07"
      },
      "source": [
        "history=model.fit( X,Y,batch_size=128,\r\n",
        "         epochs=100, \r\n",
        "         callbacks=[Lr_change,checkpoint],\r\n",
        "         validation_data=(test_x,test_y))"
      ],
      "execution_count": 37,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Epoch 1/100\n",
            "17/17 [==============================] - 9s 55ms/step - loss: 0.9747 - mae: 0.4790 - mse: 0.4017 - val_loss: 0.4233 - val_mae: 0.0855 - val_mse: 0.0139\n",
            "\n",
            "Epoch 00001: val_mae improved from inf to 0.08548, saving model to best_weight.h5\n",
            "Epoch 2/100\n",
            "17/17 [==============================] - 0s 14ms/step - loss: 0.3820 - mae: 0.0888 - mse: 0.0127 - val_loss: 0.2688 - val_mae: 0.0693 - val_mse: 0.0076\n",
            "\n",
            "Epoch 00002: val_mae improved from 0.08548 to 0.06927, saving model to best_weight.h5\n",
            "Epoch 3/100\n",
            "17/17 [==============================] - 0s 13ms/step - loss: 0.2421 - mae: 0.0672 - mse: 0.0077 - val_loss: 0.1709 - val_mae: 0.0568 - val_mse: 0.0060\n",
            "\n",
            "Epoch 00003: val_mae improved from 0.06927 to 0.05679, saving model to best_weight.h5\n",
            "Epoch 4/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.1557 - mae: 0.0627 - mse: 0.0069 - val_loss: 0.1123 - val_mae: 0.0513 - val_mse: 0.0052\n",
            "\n",
            "Epoch 00004: val_mae improved from 0.05679 to 0.05135, saving model to best_weight.h5\n",
            "Epoch 5/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.1038 - mae: 0.0609 - mse: 0.0066 - val_loss: 0.0762 - val_mae: 0.0508 - val_mse: 0.0048\n",
            "\n",
            "Epoch 00005: val_mae improved from 0.05135 to 0.05081, saving model to best_weight.h5\n",
            "Epoch 6/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0716 - mae: 0.0605 - mse: 0.0065 - val_loss: 0.0535 - val_mae: 0.0487 - val_mse: 0.0049\n",
            "\n",
            "Epoch 00006: val_mae improved from 0.05081 to 0.04870, saving model to best_weight.h5\n",
            "Epoch 7/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0508 - mae: 0.0601 - mse: 0.0063 - val_loss: 0.0378 - val_mae: 0.0477 - val_mse: 0.0044\n",
            "\n",
            "Epoch 00007: val_mae improved from 0.04870 to 0.04774, saving model to best_weight.h5\n",
            "Epoch 8/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0366 - mae: 0.0583 - mse: 0.0060 - val_loss: 0.0274 - val_mae: 0.0460 - val_mse: 0.0042\n",
            "\n",
            "Epoch 00008: val_mae improved from 0.04774 to 0.04603, saving model to best_weight.h5\n",
            "Epoch 9/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0271 - mae: 0.0575 - mse: 0.0059 - val_loss: 0.0200 - val_mae: 0.0444 - val_mse: 0.0038\n",
            "\n",
            "Epoch 00009: val_mae improved from 0.04603 to 0.04441, saving model to best_weight.h5\n",
            "Epoch 10/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0206 - mae: 0.0570 - mse: 0.0058 - val_loss: 0.0150 - val_mae: 0.0423 - val_mse: 0.0036\n",
            "\n",
            "Epoch 00010: val_mae improved from 0.04441 to 0.04227, saving model to best_weight.h5\n",
            "Epoch 11/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0161 - mae: 0.0563 - mse: 0.0057 - val_loss: 0.0116 - val_mae: 0.0415 - val_mse: 0.0035\n",
            "\n",
            "Epoch 00011: val_mae improved from 0.04227 to 0.04152, saving model to best_weight.h5\n",
            "Epoch 12/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0129 - mae: 0.0556 - mse: 0.0055 - val_loss: 0.0092 - val_mae: 0.0408 - val_mse: 0.0035\n",
            "\n",
            "Epoch 00012: val_mae improved from 0.04152 to 0.04078, saving model to best_weight.h5\n",
            "Epoch 13/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0108 - mae: 0.0557 - mse: 0.0055 - val_loss: 0.0082 - val_mae: 0.0439 - val_mse: 0.0041\n",
            "\n",
            "Epoch 00013: val_mae did not improve from 0.04078\n",
            "Epoch 14/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0092 - mae: 0.0551 - mse: 0.0054 - val_loss: 0.0062 - val_mae: 0.0432 - val_mse: 0.0033\n",
            "\n",
            "Epoch 00014: val_mae did not improve from 0.04078\n",
            "Epoch 15/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0086 - mae: 0.0576 - mse: 0.0059 - val_loss: 0.0054 - val_mae: 0.0392 - val_mse: 0.0033\n",
            "\n",
            "Epoch 00015: val_mae improved from 0.04078 to 0.03921, saving model to best_weight.h5\n",
            "Epoch 16/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0074 - mae: 0.0544 - mse: 0.0053 - val_loss: 0.0050 - val_mae: 0.0395 - val_mse: 0.0034\n",
            "\n",
            "Epoch 00016: val_mae did not improve from 0.03921\n",
            "Epoch 17/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0068 - mae: 0.0536 - mse: 0.0052 - val_loss: 0.0046 - val_mae: 0.0422 - val_mse: 0.0034\n",
            "\n",
            "Epoch 00017: val_mae did not improve from 0.03921\n",
            "Epoch 18/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0065 - mae: 0.0541 - mse: 0.0053 - val_loss: 0.0052 - val_mae: 0.0431 - val_mse: 0.0042\n",
            "\n",
            "Epoch 00018: val_mae did not improve from 0.03921\n",
            "Epoch 19/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0061 - mae: 0.0534 - mse: 0.0052 - val_loss: 0.0050 - val_mae: 0.0425 - val_mse: 0.0042\n",
            "\n",
            "Epoch 00019: val_mae did not improve from 0.03921\n",
            "Epoch 20/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0061 - mae: 0.0544 - mse: 0.0053 - val_loss: 0.0040 - val_mae: 0.0388 - val_mse: 0.0034\n",
            "\n",
            "Epoch 00020: val_mae improved from 0.03921 to 0.03880, saving model to best_weight.h5\n",
            "Epoch 21/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0055 - mae: 0.0518 - mse: 0.0049 - val_loss: 0.0043 - val_mae: 0.0394 - val_mse: 0.0038\n",
            "\n",
            "Epoch 00021: val_mae did not improve from 0.03880\n",
            "Epoch 22/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0055 - mae: 0.0526 - mse: 0.0050 - val_loss: 0.0041 - val_mae: 0.0386 - val_mse: 0.0037\n",
            "\n",
            "Epoch 00022: val_mae improved from 0.03880 to 0.03862, saving model to best_weight.h5\n",
            "Epoch 23/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0054 - mae: 0.0524 - mse: 0.0050 - val_loss: 0.0040 - val_mae: 0.0392 - val_mse: 0.0035\n",
            "\n",
            "Epoch 00023: val_mae did not improve from 0.03862\n",
            "Epoch 24/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0050 - mae: 0.0505 - mse: 0.0046 - val_loss: 0.0038 - val_mae: 0.0394 - val_mse: 0.0035\n",
            "\n",
            "Epoch 00024: val_mae did not improve from 0.03862\n",
            "Epoch 25/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0052 - mae: 0.0519 - mse: 0.0048 - val_loss: 0.0045 - val_mae: 0.0408 - val_mse: 0.0041\n",
            "\n",
            "Epoch 00025: val_mae did not improve from 0.03862\n",
            "Epoch 26/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0054 - mae: 0.0535 - mse: 0.0051 - val_loss: 0.0038 - val_mae: 0.0396 - val_mse: 0.0034\n",
            "\n",
            "Epoch 00026: val_mae did not improve from 0.03862\n",
            "Epoch 27/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0047 - mae: 0.0490 - mse: 0.0044 - val_loss: 0.0035 - val_mae: 0.0386 - val_mse: 0.0032\n",
            "\n",
            "Epoch 00027: val_mae did not improve from 0.03862\n",
            "Epoch 28/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0046 - mae: 0.0486 - mse: 0.0043 - val_loss: 0.0048 - val_mae: 0.0424 - val_mse: 0.0044\n",
            "\n",
            "Epoch 00028: val_mae did not improve from 0.03862\n",
            "Epoch 29/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0051 - mae: 0.0518 - mse: 0.0048 - val_loss: 0.0037 - val_mae: 0.0405 - val_mse: 0.0033\n",
            "\n",
            "Epoch 00029: val_mae did not improve from 0.03862\n",
            "Epoch 30/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0047 - mae: 0.0489 - mse: 0.0043 - val_loss: 0.0037 - val_mae: 0.0411 - val_mse: 0.0034\n",
            "\n",
            "Epoch 00030: val_mae did not improve from 0.03862\n",
            "Epoch 31/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0044 - mae: 0.0470 - mse: 0.0041 - val_loss: 0.0040 - val_mae: 0.0394 - val_mse: 0.0037\n",
            "\n",
            "Epoch 00031: val_mae did not improve from 0.03862\n",
            "Epoch 32/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0045 - mae: 0.0479 - mse: 0.0042 - val_loss: 0.0038 - val_mae: 0.0390 - val_mse: 0.0035\n",
            "\n",
            "Epoch 00032: val_mae did not improve from 0.03862\n",
            "Epoch 33/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0043 - mae: 0.0463 - mse: 0.0040 - val_loss: 0.0038 - val_mae: 0.0431 - val_mse: 0.0035\n",
            "\n",
            "Epoch 00033: val_mae did not improve from 0.03862\n",
            "Epoch 34/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0042 - mae: 0.0456 - mse: 0.0038 - val_loss: 0.0036 - val_mae: 0.0411 - val_mse: 0.0033\n",
            "\n",
            "Epoch 00034: val_mae did not improve from 0.03862\n",
            "Epoch 35/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0042 - mae: 0.0458 - mse: 0.0039 - val_loss: 0.0038 - val_mae: 0.0425 - val_mse: 0.0035\n",
            "\n",
            "Epoch 00035: val_mae did not improve from 0.03862\n",
            "Epoch 36/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0042 - mae: 0.0458 - mse: 0.0039 - val_loss: 0.0035 - val_mae: 0.0435 - val_mse: 0.0032\n",
            "\n",
            "Epoch 00036: val_mae did not improve from 0.03862\n",
            "Epoch 37/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0048 - mae: 0.0504 - mse: 0.0046 - val_loss: 0.0036 - val_mae: 0.0414 - val_mse: 0.0033\n",
            "\n",
            "Epoch 00037: val_mae did not improve from 0.03862\n",
            "Epoch 38/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0044 - mae: 0.0475 - mse: 0.0041 - val_loss: 0.0036 - val_mae: 0.0359 - val_mse: 0.0034\n",
            "\n",
            "Epoch 00038: val_mae improved from 0.03862 to 0.03585, saving model to best_weight.h5\n",
            "Epoch 39/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0042 - mae: 0.0465 - mse: 0.0039 - val_loss: 0.0035 - val_mae: 0.0377 - val_mse: 0.0032\n",
            "\n",
            "Epoch 00039: val_mae did not improve from 0.03585\n",
            "Epoch 40/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0038 - mae: 0.0432 - mse: 0.0035 - val_loss: 0.0034 - val_mae: 0.0373 - val_mse: 0.0031\n",
            "\n",
            "Epoch 00040: val_mae did not improve from 0.03585\n",
            "Epoch 41/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0038 - mae: 0.0431 - mse: 0.0035 - val_loss: 0.0035 - val_mae: 0.0402 - val_mse: 0.0032\n",
            "\n",
            "Epoch 00041: val_mae did not improve from 0.03585\n",
            "Epoch 42/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0038 - mae: 0.0431 - mse: 0.0035 - val_loss: 0.0034 - val_mae: 0.0381 - val_mse: 0.0032\n",
            "\n",
            "Epoch 00042: val_mae did not improve from 0.03585\n",
            "Epoch 43/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0036 - mae: 0.0422 - mse: 0.0033 - val_loss: 0.0034 - val_mae: 0.0388 - val_mse: 0.0031\n",
            "\n",
            "Epoch 00043: val_mae did not improve from 0.03585\n",
            "Epoch 44/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0036 - mae: 0.0420 - mse: 0.0033 - val_loss: 0.0035 - val_mae: 0.0402 - val_mse: 0.0032\n",
            "\n",
            "Epoch 00044: val_mae did not improve from 0.03585\n",
            "Epoch 45/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0036 - mae: 0.0424 - mse: 0.0033 - val_loss: 0.0033 - val_mae: 0.0371 - val_mse: 0.0030\n",
            "\n",
            "Epoch 00045: val_mae did not improve from 0.03585\n",
            "Epoch 46/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0037 - mae: 0.0431 - mse: 0.0034 - val_loss: 0.0037 - val_mae: 0.0404 - val_mse: 0.0035\n",
            "\n",
            "Epoch 00046: val_mae did not improve from 0.03585\n",
            "Epoch 47/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0036 - mae: 0.0426 - mse: 0.0033 - val_loss: 0.0035 - val_mae: 0.0400 - val_mse: 0.0032\n",
            "\n",
            "Epoch 00047: val_mae did not improve from 0.03585\n",
            "Epoch 48/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0034 - mae: 0.0410 - mse: 0.0031 - val_loss: 0.0034 - val_mae: 0.0385 - val_mse: 0.0031\n",
            "\n",
            "Epoch 00048: val_mae did not improve from 0.03585\n",
            "Epoch 49/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0035 - mae: 0.0420 - mse: 0.0032 - val_loss: 0.0033 - val_mae: 0.0377 - val_mse: 0.0031\n",
            "\n",
            "Epoch 00049: val_mae did not improve from 0.03585\n",
            "Epoch 50/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0033 - mae: 0.0408 - mse: 0.0031 - val_loss: 0.0034 - val_mae: 0.0409 - val_mse: 0.0031\n",
            "\n",
            "Epoch 00050: val_mae did not improve from 0.03585\n",
            "Epoch 51/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0039 - mae: 0.0449 - mse: 0.0036 - val_loss: 0.0038 - val_mae: 0.0437 - val_mse: 0.0036\n",
            "\n",
            "Epoch 00051: val_mae did not improve from 0.03585\n",
            "Epoch 52/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0033 - mae: 0.0412 - mse: 0.0031 - val_loss: 0.0035 - val_mae: 0.0376 - val_mse: 0.0032\n",
            "\n",
            "Epoch 00052: val_mae did not improve from 0.03585\n",
            "Epoch 53/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0033 - mae: 0.0412 - mse: 0.0031 - val_loss: 0.0036 - val_mae: 0.0394 - val_mse: 0.0034\n",
            "\n",
            "Epoch 00053: val_mae did not improve from 0.03585\n",
            "Epoch 54/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0032 - mae: 0.0401 - mse: 0.0030 - val_loss: 0.0037 - val_mae: 0.0371 - val_mse: 0.0035\n",
            "\n",
            "Epoch 00054: val_mae did not improve from 0.03585\n",
            "Epoch 55/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0032 - mae: 0.0407 - mse: 0.0030 - val_loss: 0.0032 - val_mae: 0.0345 - val_mse: 0.0030\n",
            "\n",
            "Epoch 00055: val_mae improved from 0.03585 to 0.03446, saving model to best_weight.h5\n",
            "Epoch 56/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0031 - mae: 0.0396 - mse: 0.0029 - val_loss: 0.0036 - val_mae: 0.0399 - val_mse: 0.0034\n",
            "\n",
            "Epoch 00056: val_mae did not improve from 0.03446\n",
            "Epoch 57/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0032 - mae: 0.0405 - mse: 0.0030 - val_loss: 0.0032 - val_mae: 0.0347 - val_mse: 0.0030\n",
            "\n",
            "Epoch 00057: val_mae did not improve from 0.03446\n",
            "Epoch 58/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0031 - mae: 0.0401 - mse: 0.0029 - val_loss: 0.0036 - val_mae: 0.0414 - val_mse: 0.0034\n",
            "\n",
            "Epoch 00058: val_mae did not improve from 0.03446\n",
            "Epoch 59/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0031 - mae: 0.0398 - mse: 0.0029 - val_loss: 0.0036 - val_mae: 0.0381 - val_mse: 0.0034\n",
            "\n",
            "Epoch 00059: val_mae did not improve from 0.03446\n",
            "Epoch 60/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0029 - mae: 0.0385 - mse: 0.0027 - val_loss: 0.0036 - val_mae: 0.0396 - val_mse: 0.0034\n",
            "\n",
            "Epoch 00060: val_mae did not improve from 0.03446\n",
            "Epoch 61/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0030 - mae: 0.0388 - mse: 0.0028 - val_loss: 0.0036 - val_mae: 0.0367 - val_mse: 0.0034\n",
            "\n",
            "Epoch 00061: val_mae did not improve from 0.03446\n",
            "Epoch 62/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0030 - mae: 0.0392 - mse: 0.0028 - val_loss: 0.0041 - val_mae: 0.0394 - val_mse: 0.0039\n",
            "\n",
            "Epoch 00062: val_mae did not improve from 0.03446\n",
            "Epoch 63/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0030 - mae: 0.0394 - mse: 0.0028 - val_loss: 0.0045 - val_mae: 0.0442 - val_mse: 0.0043\n",
            "\n",
            "Epoch 00063: val_mae did not improve from 0.03446\n",
            "Epoch 64/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0032 - mae: 0.0412 - mse: 0.0030 - val_loss: 0.0034 - val_mae: 0.0382 - val_mse: 0.0032\n",
            "\n",
            "Epoch 00064: val_mae did not improve from 0.03446\n",
            "Epoch 65/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0030 - mae: 0.0391 - mse: 0.0028 - val_loss: 0.0049 - val_mae: 0.0430 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00065: val_mae did not improve from 0.03446\n",
            "Epoch 66/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0032 - mae: 0.0412 - mse: 0.0030 - val_loss: 0.0040 - val_mae: 0.0387 - val_mse: 0.0038\n",
            "\n",
            "Epoch 00066: val_mae did not improve from 0.03446\n",
            "Epoch 67/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0029 - mae: 0.0386 - mse: 0.0027 - val_loss: 0.0035 - val_mae: 0.0374 - val_mse: 0.0033\n",
            "\n",
            "Epoch 00067: val_mae did not improve from 0.03446\n",
            "Epoch 68/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0028 - mae: 0.0378 - mse: 0.0026 - val_loss: 0.0046 - val_mae: 0.0419 - val_mse: 0.0044\n",
            "\n",
            "Epoch 00068: val_mae did not improve from 0.03446\n",
            "Epoch 69/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0029 - mae: 0.0385 - mse: 0.0027 - val_loss: 0.0042 - val_mae: 0.0424 - val_mse: 0.0039\n",
            "\n",
            "Epoch 00069: val_mae did not improve from 0.03446\n",
            "Epoch 70/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0028 - mae: 0.0379 - mse: 0.0026 - val_loss: 0.0041 - val_mae: 0.0424 - val_mse: 0.0039\n",
            "\n",
            "Epoch 00070: val_mae did not improve from 0.03446\n",
            "Epoch 71/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0027 - mae: 0.0370 - mse: 0.0025 - val_loss: 0.0039 - val_mae: 0.0430 - val_mse: 0.0037\n",
            "\n",
            "Epoch 00071: val_mae did not improve from 0.03446\n",
            "Epoch 72/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0027 - mae: 0.0368 - mse: 0.0025 - val_loss: 0.0048 - val_mae: 0.0461 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00072: val_mae did not improve from 0.03446\n",
            "Epoch 73/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0027 - mae: 0.0374 - mse: 0.0026 - val_loss: 0.0044 - val_mae: 0.0437 - val_mse: 0.0042\n",
            "\n",
            "Epoch 00073: val_mae did not improve from 0.03446\n",
            "Epoch 74/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0027 - mae: 0.0367 - mse: 0.0025 - val_loss: 0.0036 - val_mae: 0.0391 - val_mse: 0.0034\n",
            "\n",
            "Epoch 00074: val_mae did not improve from 0.03446\n",
            "Epoch 75/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0027 - mae: 0.0371 - mse: 0.0025 - val_loss: 0.0039 - val_mae: 0.0408 - val_mse: 0.0037\n",
            "\n",
            "Epoch 00075: val_mae did not improve from 0.03446\n",
            "Epoch 76/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0026 - mae: 0.0359 - mse: 0.0024 - val_loss: 0.0040 - val_mae: 0.0418 - val_mse: 0.0038\n",
            "\n",
            "Epoch 00076: val_mae did not improve from 0.03446\n",
            "Epoch 77/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0025 - mae: 0.0356 - mse: 0.0023 - val_loss: 0.0041 - val_mae: 0.0419 - val_mse: 0.0039\n",
            "\n",
            "Epoch 00077: val_mae did not improve from 0.03446\n",
            "Epoch 78/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0025 - mae: 0.0354 - mse: 0.0023 - val_loss: 0.0040 - val_mae: 0.0419 - val_mse: 0.0039\n",
            "\n",
            "Epoch 00078: val_mae did not improve from 0.03446\n",
            "Epoch 79/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0025 - mae: 0.0353 - mse: 0.0023 - val_loss: 0.0042 - val_mae: 0.0412 - val_mse: 0.0040\n",
            "\n",
            "Epoch 00079: val_mae did not improve from 0.03446\n",
            "Epoch 80/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0025 - mae: 0.0353 - mse: 0.0023 - val_loss: 0.0040 - val_mae: 0.0414 - val_mse: 0.0038\n",
            "\n",
            "Epoch 00080: val_mae did not improve from 0.03446\n",
            "Epoch 81/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0025 - mae: 0.0353 - mse: 0.0023 - val_loss: 0.0041 - val_mae: 0.0424 - val_mse: 0.0039\n",
            "\n",
            "Epoch 00081: val_mae did not improve from 0.03446\n",
            "Epoch 82/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0025 - mae: 0.0350 - mse: 0.0023 - val_loss: 0.0043 - val_mae: 0.0433 - val_mse: 0.0041\n",
            "\n",
            "Epoch 00082: val_mae did not improve from 0.03446\n",
            "Epoch 83/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0024 - mae: 0.0345 - mse: 0.0022 - val_loss: 0.0049 - val_mae: 0.0457 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00083: val_mae did not improve from 0.03446\n",
            "Epoch 84/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0025 - mae: 0.0352 - mse: 0.0023 - val_loss: 0.0042 - val_mae: 0.0414 - val_mse: 0.0040\n",
            "\n",
            "Epoch 00084: val_mae did not improve from 0.03446\n",
            "Epoch 85/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0024 - mae: 0.0348 - mse: 0.0022 - val_loss: 0.0042 - val_mae: 0.0430 - val_mse: 0.0040\n",
            "\n",
            "Epoch 00085: val_mae did not improve from 0.03446\n",
            "Epoch 86/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0024 - mae: 0.0345 - mse: 0.0022 - val_loss: 0.0044 - val_mae: 0.0438 - val_mse: 0.0042\n",
            "\n",
            "Epoch 00086: val_mae did not improve from 0.03446\n",
            "Epoch 87/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0024 - mae: 0.0344 - mse: 0.0022 - val_loss: 0.0043 - val_mae: 0.0449 - val_mse: 0.0041\n",
            "\n",
            "Epoch 00087: val_mae did not improve from 0.03446\n",
            "Epoch 88/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0024 - mae: 0.0344 - mse: 0.0022 - val_loss: 0.0042 - val_mae: 0.0430 - val_mse: 0.0040\n",
            "\n",
            "Epoch 00088: val_mae did not improve from 0.03446\n",
            "Epoch 89/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0024 - mae: 0.0346 - mse: 0.0022 - val_loss: 0.0049 - val_mae: 0.0461 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00089: val_mae did not improve from 0.03446\n",
            "Epoch 90/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0023 - mae: 0.0340 - mse: 0.0022 - val_loss: 0.0041 - val_mae: 0.0422 - val_mse: 0.0039\n",
            "\n",
            "Epoch 00090: val_mae did not improve from 0.03446\n",
            "Epoch 91/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0023 - mae: 0.0336 - mse: 0.0021 - val_loss: 0.0050 - val_mae: 0.0451 - val_mse: 0.0048\n",
            "\n",
            "Epoch 00091: val_mae did not improve from 0.03446\n",
            "Epoch 92/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0024 - mae: 0.0344 - mse: 0.0022 - val_loss: 0.0044 - val_mae: 0.0459 - val_mse: 0.0043\n",
            "\n",
            "Epoch 00092: val_mae did not improve from 0.03446\n",
            "Epoch 93/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0023 - mae: 0.0338 - mse: 0.0022 - val_loss: 0.0045 - val_mae: 0.0429 - val_mse: 0.0043\n",
            "\n",
            "Epoch 00093: val_mae did not improve from 0.03446\n",
            "Epoch 94/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0023 - mae: 0.0334 - mse: 0.0021 - val_loss: 0.0047 - val_mae: 0.0455 - val_mse: 0.0045\n",
            "\n",
            "Epoch 00094: val_mae did not improve from 0.03446\n",
            "Epoch 95/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0023 - mae: 0.0334 - mse: 0.0021 - val_loss: 0.0047 - val_mae: 0.0461 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00095: val_mae did not improve from 0.03446\n",
            "Epoch 96/100\n",
            "17/17 [==============================] - 0s 18ms/step - loss: 0.0022 - mae: 0.0325 - mse: 0.0020 - val_loss: 0.0048 - val_mae: 0.0457 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00096: val_mae did not improve from 0.03446\n",
            "Epoch 97/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0022 - mae: 0.0328 - mse: 0.0021 - val_loss: 0.0044 - val_mae: 0.0434 - val_mse: 0.0042\n",
            "\n",
            "Epoch 00097: val_mae did not improve from 0.03446\n",
            "Epoch 98/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0022 - mae: 0.0325 - mse: 0.0020 - val_loss: 0.0047 - val_mae: 0.0455 - val_mse: 0.0045\n",
            "\n",
            "Epoch 00098: val_mae did not improve from 0.03446\n",
            "Epoch 99/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0022 - mae: 0.0324 - mse: 0.0020 - val_loss: 0.0049 - val_mae: 0.0465 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00099: val_mae did not improve from 0.03446\n",
            "Epoch 100/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0021 - mae: 0.0319 - mse: 0.0019 - val_loss: 0.0045 - val_mae: 0.0440 - val_mse: 0.0043\n",
            "\n",
            "Epoch 00100: val_mae did not improve from 0.03446\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "TBu6uBWPF2n_",
        "outputId": "5ca75103-f418-43d7-b0e1-34af5f3687ea"
      },
      "source": [
        "history.history.keys()"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "dict_keys(['loss', 'mae', 'mse', 'val_loss', 'val_mae', 'val_mse', 'lr'])"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 52
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "gbWAsnhLGYN5",
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 282
        },
        "outputId": "c8bc71dd-7439-4a03-dcfd-5838a6bac3fc"
      },
      "source": [
        "plt.plot(history.epoch,history.history.get('mae'),label='mae')\r\n",
        "plt.plot(history.epoch,history.history.get('val_mae'),label='val_mae')"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "[<matplotlib.lines.Line2D at 0x7f7062e2f400>]"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 53
        },
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYAAAAD4CAYAAADlwTGnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nO3deXxU9bn48c8zSxIStmxsCZuAIIgiIOJed9wQq1ao191qq1Zv7WZ/erW1dvHetmpbr1fc97UuqFDqWhUVCYusImHfSUggIfvMPL8/vifJJCRkIIFozvN+vfLKzNnm+82ZnOd81yOqijHGGP8JtHcCjDHGtA8LAMYY41MWAIwxxqcsABhjjE9ZADDGGJ8KtXcC9kZWVpYOGDCgvZNhjDHfKnPnzi1U1ezGy79VAWDAgAHk5eW1dzKMMeZbRUTWNrXcqoCMMcanLAAYY4xPWQAwxhifsgBgjDE+ZQHAGGN8ygKAMcb4lAUAY4zxKV8EgFfnbeDZ2U12gzXGGN/yRQB488tNvPDF+vZOhjHGfKP4IgCEggFqorH2ToYxxnyj+CIAhINCJGZPPjPGmHg+CQBWAjDGmMZ8EQBCgQCRqJUAjDEmni8CQDgoVgIwxphGEgoAIjJBRJaLSL6I3NrE+ltEZKmILBSR90Skf9y6y0VkhfdzedzyMSKyyDvmX0VE2iZLuwtZG4AxxuymxQAgIkHgAeBMYDgwRUSGN9psPjBWVQ8DXgH+29s3A7gTOAoYB9wpIunePg8CPwCGeD8TWp2bZoQC1gZgjDGNJVICGAfkq+oqVa0GXgDOi99AVT9Q1XLv7edArvf6DOAdVS1S1WLgHWCCiPQGuqrq56qqwFPApDbIT5PCQbE2AGOMaSSRAJADxI+i2uAta87VwIwW9s3xXrd4TBG5VkTyRCSvoKAggeTuLhQMEIlZCcAYY+K1aSOwiPwHMBb4n7Y6pqpOVdWxqjo2O3u3R1omJBwQaqKKK2wYY4yBxALARqBv3Ptcb1kDInIqcBswUVWrWth3I/XVRM0es62Egy6b1hBsjDH1EgkAc4AhIjJQRJKAycC0+A1E5AjgIdzFf1vcqpnA6SKS7jX+ng7MVNXNQImIjPd6/1wGvNEG+WlSqDYAWDuAMcbUCbW0gapGRORG3MU8CDymqktE5C4gT1Wn4ap8OgMve70516nqRFUtEpHf4oIIwF2qWuS9vh54AuiEazOYwX4SDroepjWxGJ0I7q+PMcaYb5UWAwCAqk4Hpjdadkfc61P3sO9jwGNNLM8DDk04pa0QCrgAYCUAY4yp54uRwPVVQNYTyBhjavkiANRXAVkJwBhjavkiAIQCVgIwxpjG/BEAaksA1gZgjDF1fBEAkrw2AJsPyBhj6vkiANg4AGOM2Z1PAkD9OABjjDGOLwJAOGAlAGOMacwXAaC2BGC9gIwxpp4vAoCNAzDGmN35IgDYOABjjNmdPwKAjQMwxpjd+CIA2DgAY4zZnS8CQN04AOsGaowxdfwRAAJWBWSMMY35IgCEbSSwMcbsxhcBoG4cgFUBGWNMHV8EgNqRwFYFZIwx9XwRAGwksDHG7C6hACAiE0RkuYjki8itTaw/QUTmiUhERC6MW36SiCyI+6kUkUneuidEZHXculFtl62G6quArARgjDG1WnwovIgEgQeA04ANwBwRmaaqS+M2WwdcAfwsfl9V/QAY5R0nA8gH/hW3yc9V9ZXWZCARtVVA1RErARhjTK0WAwAwDshX1VUAIvICcB5QFwBUdY23bk9X2AuBGapavs+p3UeBgBAMiDUCG2NMnESqgHKA9XHvN3jL9tZk4PlGy34nIgtF5F4RSd6HYyYsFBDrBmqMMXEOSCOwiPQGRgIz4xb/ChgGHAlkAL9sZt9rRSRPRPIKCgr2OQ3hYMB6ARljTJxEAsBGoG/c+1xv2d74HvCaqtbULlDVzepUAY/jqpp2o6pTVXWsqo7Nzs7ey4+tFwpaFZAxxsRLJADMAYaIyEARScJV5Uzby8+ZQqPqH69UgIgIMAlYvJfH3CuhgJUAjDEmXosBQFUjwI246ptlwEuqukRE7hKRiQAicqSIbAAuAh4SkSW1+4vIAFwJ4t+NDv2siCwCFgFZwN2tz07zwkGxcQDGGBMnkV5AqOp0YHqjZXfEvZ6Dqxpqat81NNForKon701CWyscDNg4AGOMieOLkcDg2gCqrQRgjDF1fBMAwoGAVQEZY0wc3wSAUNDGARhjTDwfBYAANdYGYIwxdXwTAMIB6wVkjDHxfBMArArIGGMa8k0ACAcD1NhIYGOMqeOrAGAlAGOMqeebABAKCDXWBmCMMXV8EwDcbKAWAIwxppZvAoCbDdSqgIwxppZ/AkDA2gCMMSaebwJAOGhtAMYYE883AcCqgIwxpiH/BICANQIbY0w83wSApJC1ARhjTDzfBAAbB2CMMQ35JwB4TwRTtVKAMcaAjwJAOCAA1hBsjDEe3wSAUNBl1doBjDHGSSgAiMgEEVkuIvkicmsT608QkXkiEhGRCxuti4rIAu9nWtzygSIy2zvmiyKS1PrsNC8cdCUAmxHUGGOcFgOAiASBB4AzgeHAFBEZ3mizdcAVwHNNHKJCVUd5PxPjlt8D3Kuqg4Fi4Op9SH/CQrVVQFYCMMYYILESwDggX1VXqWo18AJwXvwGqrpGVRcCCd1ei4gAJwOveIueBCYlnOp9UF8FZCUAY4yBxAJADrA+7v0Gb1miUkQkT0Q+F5Hai3wmsENVIy0dU0Su9fbPKygo2IuPbSjJCwD2XGBjjHFCB+Az+qvqRhE5CHhfRBYBOxPdWVWnAlMBxo4du89X71BtG0DESgDGGAOJlQA2An3j3ud6yxKiqhu936uAD4EjgO1AdxGpDUB7dcx9UVcFZI3AxhgDJBYA5gBDvF47ScBkYFoL+wAgIukikuy9zgKOBZaqG431AVDbY+hy4I29TfzeqB0HUGONwMYYAyQQALx6+huBmcAy4CVVXSIid4nIRAAROVJENgAXAQ+JyBJv90OAPBH5EnfB/6OqLvXW/RK4RUTycW0Cj7ZlxhqzcQDGGNNQQm0AqjodmN5o2R1xr+fgqnEa7/cpMLKZY67C9TA6IEI2DsAYYxrwzUjgcMBKAMYYE883AaC2BGDjAIwxxvFNAAjbOABjjGnARwHAxgEYY0w83wSAUMDGARhjTDzfBIC6EoA1AhtjDOCjAGAjgY0xpiH/BAAbCWyMMQ34JgCEbSSwMcY04JsAUDcOwKqAjDEG8FEAqBsHYCUAY4wBfBUAatsArARgjDHgowBQNw7AAoAxxgA+CgA2DsAYYxryTQAQEYIBsUZgY4zx+CYAgBsLYN1AjTHG8VUACAcDVgVkjDEenwUAqwIyxphavgoAISsBGGNMnYQCgIhMEJHlIpIvIrc2sf4EEZknIhERuTBu+SgR+UxElojIQhG5OG7dEyKyWkQWeD+j2iZLzQsHxMYBGGOMp8WHwotIEHgAOA3YAMwRkWmqujRus3XAFcDPGu1eDlymqitEpA8wV0RmquoOb/3PVfWV1mYiUaFgwMYBGGOMp8UAAIwD8lV1FYCIvACcB9QFAFVd461rcHVV1a/jXm8SkW1ANrCDdhAKij0S0hhjPIlUAeUA6+Peb/CW7RURGQckASvjFv/Oqxq6V0SSm9nvWhHJE5G8goKCvf3YBsIBKwEYY0ytA9IILCK9gaeBK1W19gr8K2AYcCSQAfyyqX1VdaqqjlXVsdnZ2a1KRyho4wCMMaZWIgFgI9A37n2utywhItIVeBu4TVU/r12uqpvVqQIex1U17VehYMCqgIwxxpNIAJgDDBGRgSKSBEwGpiVycG/714CnGjf2eqUCRESAScDivUn4vkgKilUBGWOMp8UAoKoR4EZgJrAMeElVl4jIXSIyEUBEjhSRDcBFwEMissTb/XvACcAVTXT3fFZEFgGLgCzg7jbNWRNCgYBVARljjCeRXkCo6nRgeqNld8S9noOrGmq83zPAM80c8+S9SmkbCAWFiprogf5YY4z5RvLVSOBwMGBTQRhjjMdXAcBmAzXGmHq+CgBuNlArARhjDPgsAISCQsS6gRpjDOC3AGC9gIwxpo6vAkBSyGYDNcaYWr4KAKFAwKqAjDHG468AEBRqIlYCMMYY8FkACAcD1Ng4AGOMAXwWAGwcgDHG1PNXAAi6NgBVCwLGGOOrABAOCIA1BBtjDD4LAKGgy65VAxljjM8CQDjoSgDWEGyMMb4LAFYCMMaYWr4KAKHaEoCNBjbGGH8FgHDAZdcCgDHG+CwA1JYArArIGGN8FwC8NgBrBDbGmMQCgIhMEJHlIpIvIrc2sf4EEZknIhERubDRustFZIX3c3nc8jEissg75l9FRFqfnT2rHQdQYyUAY4xpOQCISBB4ADgTGA5MEZHhjTZbB1wBPNdo3wzgTuAoYBxwp4ike6sfBH4ADPF+JuxzLhJk4wCMMaZeIiWAcUC+qq5S1WrgBeC8+A1UdY2qLgQa162cAbyjqkWqWgy8A0wQkd5AV1X9XN28DE8Bk1qbmZbYOABjjKmXSADIAdbHvd/gLUtEc/vmeK9bPKaIXCsieSKSV1BQkODHNs3GARhjTL1vfCOwqk5V1bGqOjY7O7tVxwoFbByAMcbUSiQAbAT6xr3P9ZYlorl9N3qv9+WY+6y2DcACgDHGJBYA5gBDRGSgiCQBk4FpCR5/JnC6iKR7jb+nAzNVdTNQIiLjvd4/lwFv7EP690rYxgEYY0ydFgOAqkaAG3EX82XAS6q6RETuEpGJACJypIhsAC4CHhKRJd6+RcBvcUFkDnCXtwzgeuARIB9YCcxo05w1IRSwcQDGGFMrlMhGqjodmN5o2R1xr+fQsEonfrvHgMeaWJ4HHLo3iW2tul5AVgIwxphvfiNwW7KRwMYYU89XAcBKAMYYU89nAcDGARhjTC1fBQAbB2CMMfX8FQBsHIAxxtTxVQCoGwcQsyogY4zxVQCoGwdgJQBjjPFXALBeQMYYU89XAUBECAXExgEYYww+CwDgngts3UCNMcaHASAcCFgVkDHG4MMAEAqKdQM1xhh8GQAC1gZgjDH4MACEA2JVQMYYgw8DQCgYsHEAxhiDLwOAUGMjgY0xxn8BIMlKAMYYA/gwANg4AGOMcfwXAAIBqwIyxhgSDAAiMkFElotIvojc2sT6ZBF50Vs/W0QGeMsvEZEFcT8xERnlrfvQO2btuh5tmbHmhINCTcSqgIwxpsUAICJB4AHgTGA4MEVEhjfa7GqgWFUHA/cC9wCo6rOqOkpVRwGXAqtVdUHcfpfUrlfVbW2QnxaFAjYOwBhjILESwDggX1VXqWo18AJwXqNtzgOe9F6/ApwiItJomynevu3KjQS2KiBjjEkkAOQA6+Peb/CWNbmNqkaAnUBmo20uBp5vtOxxr/rnv5oIGACIyLUikicieQUFBQkkd8/CNhLYGGOAA9QILCJHAeWqujhu8SWqOhI43vu5tKl9VXWqqo5V1bHZ2dmtTksoYL2AjDEGEgsAG4G+ce9zvWVNbiMiIaAbsD1u/WQa3f2r6kbvdynwHK6qaf+Y9zR8/iAA4VDAJoMzxhgSCwBzgCEiMlBEknAX82mNtpkGXO69vhB4X1UVQEQCwPeIq/8XkZCIZHmvw8A5wGL2lxUzYc4jgBsIVl4dxUueMcb4VosBwKvTvxGYCSwDXlLVJSJyl4hM9DZ7FMgUkXzgFiC+q+gJwHpVXRW3LBmYKSILgQW4EsTDrc5NczKHQPEaiNYwun86m3dWsnRzyX77OGOM+TYIJbKRqk4Hpjdadkfc60rgomb2/RAY32hZGTBmL9O677IOhlgEitdwzsj+3PXmEl6bt5ERfbodsCQYY8w3jT9GAmcNcb8LV5CelsRJQ3vw+oJNNieQMcbX/BEAMge734VfA/Dd0bkU7qrik/zCdkyUMca0L38EgE7dIS0btq8A4KRh2XTrFOa1+Y07MxljjH/4IwCAawguzAcgORTk3MN7M3PJFnZVRdo5YcYY0z78EwCyBteVAADOPyKXypoYMxZtbsdEGWNM+/FPAMgcAuXbobwIgNH9ujMgM5UX5qy3xmBjjC/5JwBkHex+b3fVQCLClccOZO7aYi6e+jkbd1S0Y+KMMebA81EAqO8KWuvyYwZw/+RRfLW5hLPu/5jpizbbCGFjjG/4JwB07w+BcF1X0Frnjcrh7ZuOp29GJ65/dh6n3/sRz85eS3m1NQ4bYzo2/wSAYAgyBtZVAcUbkJXGqz86lj9ddDhJoQC3vbaYo37/Hj9/+Us++rrA2giMMR1SQlNBdBiZQxpUAcVLCgW4cEwuF4zOIW9tMc9/sY4Zi7fw8twNZKQl8Z2h2ZwyrCfHH5xF15TwAU64Mca0PX8FgKzBsOJfEI24EkETRIQjB2Rw5IAMKmuifLi8gOmLNvPesm28Om8jIpCb3omDsjozKLszxw7O5JhBWXRKCh7gzBhjTOv4LAAcDLEa2LEWMge1uHlKOMiEQ3sx4dBeRKIx5q/fwaz8QlYWlLGqYBezV2/nsVmrSQkHOHZQFqcN78kph/Qku0vyAciMMca0jr8CQKbXE2h7fkIBIF4oGKgrGdSqikT5YnUR7y3bxrvLtvLeV9sQWcTofumMG5jBoX26MaJPV/pnptLMEy+NMabd+CsA1HUF/RoOPqPVh0sOBTl+SDbHD8nmznOH89WWUt5ZupX3lm3lkY9X1T18Pj01zBH90hnTP50zRvRicI/Orf7spnyyopDfT1/G/ZNHMaRnl/3yGcaYjkO+Tf3ex44dq3l5ea07yD0D4ZBzYeJf2yZRzaiKRFmxdReLNu5k/rpi5q4tZmVBGSJw1sje3PCdwfTPTGXhhp3MX19Mt05hJo3KIS1532Ly6sIyzvv7J5RURjhyQDovXns0gYCVOowxICJzVXVs4+X+KgGAKwVsWwaqsB+rZZJDQQ7N6cahOd2YMq4fAAWlVTz56Rqe/HQNby/cTDAgRGP1AfieGV8xZVw/LhiTy4DMNJJCifXSLams4Zon5xAMCDefMoT731vBS3nrmex9rjHGNMV/JYB3fwOf/AX6jocz74E+o9omcXthZ0UNz3+xjvKqCEf0T+eIvt1ZWVDGY5+sZsbizcQUAgJ9M1Lp2TWFgEBAhNSkEIN6pDE4uzP9MlJJCgUIBwP8+V/L+XhFIU9ffRTjD8rg4qmfs3xLKe/99ESyOluDtDF+11wJwH8BIBaF+c/Ae3e5yeHG/wjO+P1+LQ3sjQ3F5XyxuojVhWWsKiyjoLQKFGKqlFZGWF1YRnUTA9PunnQo/zG+PwD520o58/6POfPQ3lx6dH8WrNvB8q2l9M9I5Yh+6RzWt1uLYxnmrSsmPTWJgVlpCaddVa2x25hvoFYFABGZANwPBIFHVPWPjdYnA0/hnvO7HbhYVdeIyADcg+SXe5t+rqo/9PYZAzwBdMI9b/hmbSExbRIAalXuhHfugLlPwAWPwsgL2+a4+1kkGmN9cQUbiyuoicWIRJWMtDBj+mc02O7P/1rO396vH/WcmZbE9rLquvcZaUlkd04mu0syRw/K5ILRufTqlsLmnRXc/fYy3l64mXBQ+OGJg7jhpMGkhJsf5xCNKbe9toiPvi7g1xNHcPqIXm2fcWPMPtvnACAiQeBr4DRgAzAHmKKqS+O2uR44TFV/KCKTgfNV9WIvALylqoc2cdwvgJuA2bgA8FdVnbGntLRpAABXGnj0dCheDTfMgbTMtjt2O6uKRHl+9jpy0lM5vG83enRJYWdFDQs37ODL9TvYtLOSwtIqNu6oYMmmEgIC4w/KZMH6HURjyg9PHMT6onJenb+R/pmp3HjSYI4amEnfjE4N7vJrojFueelL3vxyEzndO7FxRwXnHt6H30wcQUZaUjv+BYwxtVoTAI4Gfq2qZ3jvfwWgqn+I22amt81nIhICtgDZQH+aCAAi0hv4QFWHee+nAN9R1ev2lJY2DwAAW5fCQyfAiPPhgofb9tjfEmsKy3hl7gbeWriJob26cPvZw+mbkQrAp/mF3P76YlYVlgGQ1TmZsf3TGTvAdWv9v3+vZOaSrfzqzGFcddxAHvxwJX97fwVdUsL8csJQLhrT13ojGdPOWhMALgQmqOo13vtLgaNU9ca4bRZ722zw3q8EjgI6A0twJYgS4HZV/VhExgJ/VNVTve2PB36pquc08fnXAtcC9OvXb8zatWv3OvMt+uAP8O8/wvdfhoNPb/vjf8tFY8qKbaXkrXHdWfPWFrG+qP75Cb8+dzhXHDuw7v3yLaXc/voi5qwp5vC+3bl1wjB6dUshKEJKUoDszsm7tRWUVtbQxeZYMma/aK9uoJuBfqq63avzf11ERuzNAVR1KjAVXAlgP6QRjr8Flr4O026ESf8Lg0/dLx/zbRUMCMN6dWVYr651Dc1bdlYyZ00RmWlJHDM4q8H2Q3t14aXrjub1BRv5/fSvmPLw5w3WZ3dJZnS/7gzp0YX8bbuYv76YrSVVjO2fztXHDeT0Eb0IWqnBmP0ukQCwEegb9z7XW9bUNhu8KqBuwHavUbcKQFXneiWDg73tc1s45oETSnYNwS9dBs9c4KqDzvgDdO3dbkn6puvVLYVzD+/T7HoR4fwjcjn1kJ58vKKQ6kiMSEwpraxh4YadzFtXzMwlW+mfmcrRB2XSNyOVNxZs4kfPziOneycOzelKRloyWZ2TmHBoL0b06XYAc2eMPyRSBRTCVeGcgrtIzwG+r6pL4ra5ARgZ1wj8XVX9nohkA0WqGhWRg4CPve2KmmgE/puqTt9TWvZLG0C8mkqYdT98/Gf3Pncs9D8GDjoJBhy7/z7Xp6oiUZJD9b2LojHlnaVbeXHOOjbuqKCorIbi8mqiMeXcw/vw09MOZkCjbqmVNVE+W7WdwtIqksNBkoKBugF2MVX6dO/E4bndrHuq8bXWdgM9C7gP1w30MVX9nYjcBeSp6jQRSQGeBo4AioDJqrpKRC4A7gJqgBhwp6q+6R1zLPXdQGcAPz6g3UD3ZPtKyHsM1s6CzV+CxmDcdTDhDxCwaZ8PpJ0VNTz80Soe/WQ11dEYh/bpSm56Kn26p5C/bRefrtxOVWTPD+wZ1qsLl4zvz6RRfXZrZ6iojlJUXk1O9077MxvGtCsbCLavqkrhwz/CZ3+Hg8+ECx+FpMQHR5m2sa20kkc/Wc3STSVsLK5gw44KendL4aShPTh5WA8GZKZRHY1SFYkRi0Eg4EZPL1i/g2c+X8uSTSWEg8LofukcOziL7C7JvLdsG8vzvyYzUkDOyOO5dcKwut5PvhX1HoXazPMyzLeTBYDW+uJhmPEL6HUYXPwMdO/b8j4Hwq4CWPIqHHmNr0onezPqWFX5csNOZizezKz8QpZt2sFpkselKR8zPraAIFHOj/6BJTqQSaP6UFIRYcW2UraWVHFEv+58Z2gPjh+SRUZaEuFggORQYLeBcVWRKOuLyunTvROpSQfg4lm1yz3caPh5bXfetyyGF6ZA1xy49HUIp7TNcU27swDQFr6eCa9c7W4vJ/4dhk9sv7SAm9Du2Ysg/x2Y8iIMndC+6fmWqJx+Oylf/A3t0hs57GKY8ygVA0/h9uAtvLVwEznpnRjSozOZnZOZvWo7KwvKdjtG5+QQPbomk9U5mW0llawrKiemkBIOcNLQHpw5sjfHD84ifX8MhlOFl69wPdfO/SuMubz1x1w+A/5xjesQUb4dDp8Ckx5seYoUVYhUQtiq0L7JLAC0laJV8MpVsGk+jLkShp0NaVnQuSd06d22cwrFYhCtbv5ObP6z8Mb1IAEYehZMfrbtPrujKiuEew+FYWfBdx92d8/v3AGf/g1+PBdNH7hbyWJ9kZufqbw6QnVUqayJUririq0llRSX7OL44GJOrJnFkB0f82nG+fy8aKKbwwnqejSlJYUoLq+mqLyGrikhDs/tzuF9uzOiT1d6d0tp+Jmq7iIcSoFwqrvhiDf3CXjzZkjqDJ3S4cfzINREoNm8EN76CRx78+43K8v/CduWQKQKSrfAvKfcxIiTn4d5T8KHf4DTfgvH3uS2r9zpfqfE9caq2gWvXgurPoDT7oKxV++e1j2JxWDnOqgscd/zWAR6j/r2lTzKCl218Dc4CFoAaEuRanj/LnfRiNc1FwadBINOdmMJUrq27nOm/djdmV3+FvQY1nBdySZ4YDz0HAE5o2H2/8Ety6Bzj9Z95jdBLLr/qrPe/Q18ci/c8AVkH+yWlW6B+0bCEf8B59y7+z4VO1zp75BzISmujaBoNTx+JpRudhfGjINg03xi332EuV1PYd7aYhZvKmHJpp1UR2JkdgpyU9VDlEWEn5ZOoSbmLvqpSUEGZqXRs2sKOZUruKLoPgZVL6//nC594ISfsXP499m8chFDp50L/Y5Gxl8Pz10EZ//ZVQHGW/EuvHw5VO+CUCe46p/1M99+8TBM/1n9tqEUGD7J5T0p1V2YX7kSlr4BY690HSE2zYdgMhzzYxcUqkrhuYth62LofbhbP/AEOO8B6N5oGnJV2DQPitfCrm1QssE75pdQtbPhtukDYMI9bVeaXf5P16GjqsSlORCE/se5/9H+Rzdsz1N121UUu2qwYAsDE7cuhY//BItfdf93J/wcRl/edDCOePNwxa+LRV2Pw/Wz4agfweBT9tuklBYA9oeSTbBzA5QVwM6NsOZjWP1vd7cUTHJBYPgkGDHJFa3jVexw2yQ10+i4ZTH833HudZde7h84fYB7rwrPfQ9Wfww/muW+SA8c6e7Cjr153/NTusXdUTZO64FUsgkeOArO/gscdlHbHruiGO4dCUNOhYueaLjuzZthwfPwn4ugS0+3rLocvpjqAkblDhh0Ckx5wf0TV5fBI6e5i9n5D7l1AE+d5y52V/0T+hxRf/xYFF67Dha9DEDksCksOOJuvtpaxsqCXWzaWsCEwseZWDmNUunKk5xFWbWSm6acGF5G/7IvWRHLIUCMrlLGhfo/pGb04a8Vt9Ijto0Xxr9Bvx4ZHJTdmYPWvUJ4xk+hx3CY9AA8/31A4QcfuO/nqz9wJcYLHnEljKYuOtXlLrhtXQw5Y93FfXu+a29K6+EupFWlcOFjMOR0mPsE+q/biRFAr36HUM+4G5ZZf0F0YHEAABETSURBVIV3/qv+fTDJpS1ntAseqZkuCFXvcqPyC5fDwRNg7FXQa+S+l6zzHoe3b4FuudC9PyR3hepSWDcboq6ERjjVLQ8luzv5Gq+6L5gMPYe7EsmYyxuey8J8eO83sGyaK4WNvhw2L3C9Brv3g/E3uLaZrr3d3zHvUfjkPlfCOf4WGHetV3q6BlZ96G4eKne6v8XYq9zfI9wJUrNcm+PelKqaYQHgQIlGYGMeLJ3m6mhLNsKwc1zDce2XuLzIXdwDIbj0taafT/zsRe7OYMoL8PwUd2G+9FXYtAAWvghf/xMm/NFNZw3w6Bmu2uDGOe5z1nwCb9wIoy+FY25q+W5m3efw1CQ39uHS19uuF0ik2lU19B6V2D/xR/8D798NGYNcXtqyJPDhPfDh7+GHs6BXo/kJt6+Ev491AXTE+bDkNRcQdm1xF7jccfDB3S6gX/Ao/ONqdwG45OWGI8d3FcDU7wAKl70BmYNdwJ72Y1jwDJxyB0RrXBXLmCvg1F+7O/LP/9cFqDFXwql3UhnqylsLN/P0Z2tYsmkn1/ZYxjWVT5JeuZ53Rz/ApzKKtdvLyS74nHvKbufXNZfxcWwkt4We5eTgAuaGR/Puof/N4YNyOSi6isFvXkCsay7B4lUUZ43ljRH30yk1jX6ZqQzITCOrc3LdA4gi0Rgbd1SwemsRZRVVBJPTCHvjK7oXfcnA+ffQqWILwSnPEexzGKrKzCVbeOKt9/l7xa3slC48MGgqRw3rx9FJq+j7xgXIwRPg5NtdVWlKdwgEKK+O8NWWUob37lrfqB6tcaXZD//oAgK4C2Kvke471PtwdzFOH9Dw+xSpckE5pZurEv3oT+58DTkdLnqy4Y1WTQWs/RQ2znOBvXKna8dI6+Eu2sld3WNjtyyEjfNd0Bh2Dhx9Ayx53V3QQykw/nr3/5ea4c7xyvfg/d+5GwAE+h7lJprctdWNJQoEIf9dV7qIRd1nn/UnOOxi9z/9yb1QtLLh97JbPzdT8ciLXEDaRxYA2kMsBrPudc8eOPd+9w+v6kYcL58ByV3cl/WSl93dUK01s+CJs9zF4bifwIY8eHJi/d1J514w6vtw8n/V3x3MfwbeuAGumunuHh4/2x27aif0HOkegRn/GfE2L4QnznFBorzQFWVPvr1+/drPXEApK3Drc8fB+B8mkP8ovHgpLH/b3XGe/Zc9j66OxeBvR7jSUeUO9487YlLzx5bAnoPK1/9yd7wDjnfVHw8cBf2Ohu+/0PT2L13ugjaABF113nG31A8C/PTv8K/boMcIF9RO/Q0c95+7H2fzly4gRyogqYsrwW1fASfeCif9yn0H3v+tK/4HwhCrcXe8J/wCcsfsdrhoTN3UGNEaV0JK799wgyfOQTfNh5pKIsEUZuVcydSqM8jbsItqb4zEhMAX/F/SfSyIHcQl1bdRxu711UnBAGnJQXZVReqeZ908pUtKmHEDMiitjPDFmiKG9uzCzw7ewilzruP9wFH8tPxK3k7+f4BwW8//pUv3LDLTkkhJCjJ/7Q7mry+mJqr07pbCT049mO+OzgHgi9VFfLRkDWxZSHrJcnpXrGBIbDWDWUcY1021NNidwm4j6ZKWSmb5KqRoFWjUy0gXd9E+bDKc9/eWb372pLIEPn/QdQOvKnHfudGXw0n/r/nq1oLlrvps2ZsuOJzwi/rv0OqPXemhcqcrPfUaWb9fLOqqFWvK3KDUopWw+B+w8gOXtx+8Dzm7fz8SYQGgvcRi8Mz5sP4LuO4jd6c97UZ38Rh2jltXth3OvQ8OmeiKoo+e5qqWfjyv/s5l7WfujnPY2e4i1vjOuGoX/Hmou+vYstDdoVw1092NvP0zKNvm7mBOuq1hY1VhPjw+wRV5r/on/PsemP80XPIPOOhEd6Gadb/bNrmbq/4o3w7Xfbz7XXQ8VXj7p+5uacR3Yfl09xkn/sKtL17t7nhP/119UFj9ETx5rut98tGfXIC89sPdL/Ir3oFpN7l/wDPvgX7jd//8SDXcfziUbmq4/Jr3XCmnKYX5roQw8ERX35+asfs279/tSikjvuv+gZsLQNtXuuCzbRkUfOWqiI69uX57VXecwhWuXr33YU0fJxHr58DTk9xd4km3QedswHVNXbqphO27qimprCG0dSHB7CHk9symX0Yqu6oirN1ezprtZRSXVbOrOkJZVYS05BCDsjozMDuN7p3C1ESVSCxGTdSNro7FlM07K5m9ejuzVxWxqyrCTacMYfKRfQkFA6664907qenSl+CuzTwy5EHeKcmlcFc123dVsasqwog+3ThmcCZDe3bhyc/W8uX6HfTLSKWksoYd5TUkhwIclN2ZHl3cMyuSQwGIVJNVsZJuxYvoWbKYg2uWEyTK6kA/NPsQsnv2pitlpMVKkYyBVIy6hnA4WNd1NykUIBwIoLgHLAm49CaivMhd1PuNhx6H7Pu5qrU3j6Tdtc39/xxx2T5XB1kAaE8lm+DBY1xdZvFadyd+2TR3Mku3wLMXwpZF7s6l75Gw8v1969437ceuN0dqprv4Zw1xy+MffpMxyAWbWNTdoSx5zd3VXDUTsga74vHDp7iGzYyDXHXWmCvh9LshubP7R/jbaFc3edkbzX+JP/qTCx7H3uzaJravdOlbO8utT+4GNeXuLvv7L7nj/OMad9f+s+Ww8CV48yZXHTXoJC8fJe4OfN5TkDXU1UGXbnIXvtN+27B08eULrs794mdduld+4OprT/z53v1NG1N1QTxndPu2lTS2n59xvVdqS7nLprmn7R19Q6PVDcdwuCqkrTw2azU53TtxxoienHBwdovjKcqqIszKL2TG4i28u3QrpVWRvU5qemqYHl1SyOzsxniEvEkIt5dVU7iriuKyajqnhMjqnFxXTRYUIRgQFKX28tkpHKRLSoiuncKkJYdISwqSmhQiHArUPdI1FBBSwkHvx40lSQkFSardJiAERBDc9lXRKJt2VLKhuJyNxRVcMr4/nZP3rWrWAkB7W/YmvPgfrv7zR59Ct5z6ddGIu1tc8prbrlsuXPvvva+H3/aVu8ie+cemi4qr/u3W7/Cm1A6nuvrrk/5fw7uawhWuHlsCrupoxPkNjzN7Ksz4uesyOOys+uWqrjEs73HXlXDk91wDae1dSyzm6lbTst3d9eyH4J+/dD1Hhp0Nfxrq9cT5i6vTve8wyB4Kk59zx/v0b64+9Zib4Du/co1qs+5zjYzpA+C6f7vSjaprY4lF4frPvjkXRj+pLof1n7u67wPw96+KRNlYXMGOihp2lFdTWhkhElVqojGqozGqIzGqIjFqojECIgQEaqJK4a4qtpVWUVRWTSQaI6ruol77xLzuqUmUVtZQuKuK7WXVVEdixFSJxlwQq81ZRU2UkooaSqsi7K9L6oybj+eQ3vvWs9ACwDdB7Z1rv6Oa3yYacXMPNdWVrC1Ul7n2gm65ritcc32XC/NdF7mm6uyjNa5EE4vA9bPd74UvuuqeLYtct8PDJ8OZ/73nfMRi8OQ5bp8jr3aNYNd+WN/jorb3SG0vif7Hwal3Qt9xDY+z8n14+nzXKDfhD+5u/+lJbrDe6Ev35a9kzD6JxZTKSJSyqijl1RFqojFi3jO9I94YksqamPsdca9rg4p6gUVx9zChoNC7Wydy0zuRk96pxed474kFANO2Vrzjqq4GneLaGSqKXYPWmCtclUz8gKE9KVoNDx7rGr56joQfflx/x1hVCo+cCukDXfe5xhf+eNN/7rpsXvaGCxxbFsFPFn+zqmmMaSft9UAY01ENOc11sct/1zVmj/+Ra5ze2+J+xkA47TduYNLoyxrun9wFbpid2HFO/Y2783/latdT6eTb7eJvTAssAJh9d9GTrq92a0cfH3mNa3AeeMK+HyMpFb77kBucFerkpiUwxuyRBQCz75JSmx/JvDdE3DD41soZ46brjkWb7sJpjGnAAoDpWBr3WDLGNKv1k0wYY4z5VrIAYIwxPmUBwBhjfCqhACAiE0RkuYjki8itTaxPFpEXvfWzRWSAt/w0EZkrIou83yfH7fOhd8wF3k8HmMjeGGO+PVpsBBaRIPAAcBqwAZgjItNUdWncZlcDxao6WEQmA/cAFwOFwLmquklEDgVmAnFzIHCJqtrILmOMaQeJlADGAfmqukpVq4EXgPMabXMe8KT3+hXgFBERVZ2vqrXTMS4BOomIjc4xxphvgEQCQA6wPu79BhrexTfYRlUjwE4gs9E2FwDzVLUqbtnjXvXPf0njB7F6RORaEckTkbyCgoIEkmuMMSYRB6QRWERG4KqFrotbfImqjgSO936anLVLVaeq6lhVHZudnb3/E2uMMT6RyECwjUDfuPe53rKmttkgIiGgG7AdQERygdeAy1S17nlnqrrR+10qIs/hqpqe2lNC5s6dWygiaxNIc1OycG0SfuPHfPsxz+DPfFueE9O/qYWJBIA5wBARGYi70E8Gvt9om2nA5cBnwIXA+6qqItIdeBu4VVVn1W7sBYnuqlooImHgHODdlhKiqvtcBBCRvKZmw+vo/JhvP+YZ/Jlvy3PrtFgF5NXp34jrwbMMeElVl4jIXSIy0dvsUSBTRPKBW4DarqI3AoOBOxp190wGZorIQmABLrA83BYZMsYYk5hv1fMAWsOPdwrgz3z7Mc/gz3xbnlvHTyOBp7Z3AtqJH/PtxzyDP/NteW4F35QAjDHGNOSnEoAxxpg4FgCMMcanfBEAWprMriMQkb4i8oGILBWRJSJys7c8Q0TeEZEV3u/09k5rWxORoIjMF5G3vPcDvUkJ871JCpPaO41tTUS6i8grIvKViCwTkaM7+rkWkZ943+3FIvK8iKR0xHMtIo+JyDYRWRy3rMlzK85fvfwvFJHRe/NZHT4AxE1mdyYwHJgiIsPbN1X7RQT4qaoOB8YDN3j5vBV4T1WHAO9R30W3I7kZ10W51j3Avao6GCjGTVbY0dwP/FNVhwGH4/LfYc+1iOQANwFjVfVQIIgbk9QRz/UTwIRGy5o7t2cCQ7yfa4EH9+aDOnwAILHJ7L71VHWzqs7zXpfiLgg5NJyo70lgUvukcP/wRpqfDTzivRfgZNykhNAx89wNOAE3/gZVrVbVHXTwc40buNrJG0iaCmymA55rVf0IKGq0uLlzex7wlDqfA91FpHein+WHAJDIZHYdivc8hiOA2UBPVd3srdoC9GynZO0v9wG/AGLe+0xghzeAETrm+R4IFOAmU5wvIo+ISBod+Fx7U8f8CViHu/DvBObS8c91rebObauub34IAL4iIp2BfwD/qaol8evU9fntMP1+ReQcYJuqzm3vtBxgIWA08KCqHgGU0ai6pwOe63Tc3e5AoA+Qxu7VJL7QlufWDwEgkcnsOgRvXqV/AM+q6qve4q21RULv97b2St9+cCwwUUTW4Kr2TsbVjXf3qgmgY57vDcAGVZ3tvX8FFxA68rk+FVitqgWqWgO8ijv/Hf1c12ru3Lbq+uaHAFA3mZ3XQ2AybvK6DsWr+34UWKaqf4lbVTtRH97vNw502vYXVf2Vquaq6gDceX1fVS8BPsBNSggdLM8AqroFWC8iQ71FpwBL6cDnGlf1M15EUr3vem2eO/S5jtPcuZ0GXOb1BhoP7IyrKmqZqnb4H+As4GtgJXBbe6dnP+XxOFyxsHaCvQVevjNxvQZW4GZczWjvtO6n/H8HeMt7fRDwBZAPvAwkt3f69kN+RwF53vl+HUjv6Oca+A3wFbAYeBo3qWSHO9fA87h2jhpcae/q5s4tILhejiuBRbheUgl/lk0FYYwxPuWHKiBjjDFNsABgjDE+ZQHAGGN8ygKAMcb4lAUAY4zxKQsAxhjjUxYAjDHGp/4//p+DE+hKREUAAAAASUVORK5CYII=\n",
            "text/plain": [
              "<Figure size 432x288 with 1 Axes>"
            ]
          },
          "metadata": {
            "tags": [],
            "needs_background": "light"
          }
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/",
          "height": 282
        },
        "id": "hAG5kF9VFHgA",
        "outputId": "a02cef4b-f566-4ae2-9279-c207315560fc"
      },
      "source": [
        "plt.plot(history.epoch,history.history.get('loss'),label='loss')\r\n",
        "plt.plot(history.epoch,history.history.get('val_loss'),label='val_loss')"
      ],
      "execution_count": null,
      "outputs": [
        {
          "output_type": "execute_result",
          "data": {
            "text/plain": [
              "[<matplotlib.lines.Line2D at 0x7f70fea8bc18>]"
            ]
          },
          "metadata": {
            "tags": []
          },
          "execution_count": 37
        },
        {
          "output_type": "display_data",
          "data": {
            "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAeCklEQVR4nO3dfZAcd33n8fe3u2dmHyXtSmvLlmRLYIEtwPhBGF/sS1wJ1NmQs0hBgp27OrhAXBT44C6pu7OLK9+dU7m7QEwugIuLeQgkBZjH45RERIB5COHBaG0T25L8sBgZSehhrcfV7s7OTPf3/uheafbJO5J2NeqZz6s8tds9rZlvb68/+51fP5m7IyIi+Rc0uwAREVkYCnQRkRahQBcRaREKdBGRFqFAFxFpEVGz3njFihW+du3aZr29iEguPfLIIy+4+8BszzUt0NeuXcvg4GCz3l5EJJfM7Pm5ntOQi4hIi1Cgi4i0CAW6iEiLUKCLiLQIBbqISItQoIuItAgFuohIi8hdoG/bdZg/3fo0tThpdikiIueV3AX6Y784wke/M0S5pkAXEamXu0AvhGnJVQW6iMgUuQv0YpSWXNGQi4jIFPkL9KxDr6hDFxGZIn+Brg5dRGRW+Qt0degiIrPKXaCf3CmqDl1EZIrcBfrJIRd16CIiU+Qu0Cc7dI2hi4hMlbtAV4cuIjK73AV6SYEuIjKr3AX6qZ2i3uRKRETOL7kL9FPHocdNrkRE5PySu0AvhAZAtaYOXUSkXu4CfbJDn9BRLiIiU+Qu0EthCGinqIjIdA0FupndbGZPm9mQmd01xzK/Y2Y7zGy7mX1uYcs8pRBlQy7q0EVEpojmW8DMQuB+4PXAHmCbmW129x11y6wH7gZucPcjZnbBYhWsa7mIiMyukQ79OmDI3Z9z9wrwILBp2jK/D9zv7kcA3P3gwpZ5ShgYZurQRUSmayTQVwG766b3ZPPqvQx4mZn9wMx+bGY3z/ZCZnaHmQ2a2eDw8PAZFWxmFMNAHbqIyDQLtVM0AtYDNwG3Ax83s2XTF3L3B9x9o7tvHBgYOOM3K0YBEwp0EZEpGgn0vcCauunV2bx6e4DN7l51958Dz5AG/KIohoGGXEREpmkk0LcB681snZkVgduAzdOW+Rppd46ZrSAdgnluAeucohhpyEVEZLp5A93da8CdwFZgJ/BFd99uZvea2a3ZYluBQ2a2A/gO8B/d/dBiFV1Qhy4iMsO8hy0CuPsWYMu0effUfe/AH2SPRVeMAl0PXURkmtydKQroKBcRkVnkMtALUUBFl88VEZkil4FeCgMqNV0+V0SkXi4DvRCZbnAhIjJNLgNdY+giIjPlM9B1HLqIyAy5DHQdhy4iMlMuA13XchERmSmfga4OXURkhnwGus4UFRGZIZ+BrqNcRERmyGWgFyINuYiITJfLQE/H0J0k0clFIiKT8hnoUVp2NVGXLiIyKZ+BHqZlaxxdROSUfAZ6pEAXEZkul4FeyDp0XaBLROSUXAa6OnQRkZlyGeiF0AB0cpGISJ1cBnpJHbqIyAy5DPSTQy7q0EVETmoo0M3sZjN72syGzOyuWZ5/u5kNm9lPs8c7F77UU07tFFWgi4hMiuZbwMxC4H7g9cAeYJuZbXb3HdMW/YK737kINc6g49BFRGZqpEO/Dhhy9+fcvQI8CGxa3LJeXEFDLiIiMzQS6KuA3XXTe7J5073ZzB43sy+b2ZrZXsjM7jCzQTMbHB4ePoNyU+rQRURmWqidon8DrHX3K4FvAp+ZbSF3f8DdN7r7xoGBgTN+Mx3lIiIyUyOBvheo77hXZ/NOcvdD7j6RTX4CuHZhypuddoqKiMzUSKBvA9ab2TozKwK3AZvrFzCzi+ombwV2LlyJM+lMURGRmeY9ysXda2Z2J7AVCIFPuft2M7sXGHT3zcB7zexWoAYcBt6+iDWrQxcRmcW8gQ7g7luALdPm3VP3/d3A3Qtb2twmO/QJdegiIifl8kzRkg5bFBGZIZeBfnLIpabL54qITMploIeBEQZGJY6bXYqIyHkjl4EO6SV0dYMLEZFTchvoxTDQYYsiInXyG+hRqKNcRETq5DfQQ9Nx6CIidfIb6JGGXERE6uU20AthoA5dRKRObgNdHbqIyFS5DfRCGOhMURGROrkNdHXoIiJT5TbQS5E6dBGRerkNdO0UFRGZKreBrjNFRUSmym2gFzSGLiIyRW4DvRgGujiXiEid/AZ6FOhaLiIidfIb6LqWi4jIFPkNdI2hi4hMkdtA15miIiJTNRToZnazmT1tZkNmdteLLPdmM3Mz27hwJc6uGAXEiRMn2jEqIgINBLqZhcD9wC3ABuB2M9swy3K9wPuAhxe6yNkUo+xG0erSRUSAxjr064Ahd3/O3SvAg8CmWZb7I+BPgPIC1jenYpiWrmEXEZFUI4G+CthdN70nm3eSmV0DrHH3v3uxFzKzO8xs0MwGh4eHT7vYepMdunaMioikznqnqJkFwIeAP5xvWXd/wN03uvvGgYGBs3rfQqhAFxGp10ig7wXW1E2vzuZN6gVeCXzXzHYB1wObF3vH6OSQi8bQRURSjQT6NmC9ma0zsyJwG7B58kl3P+buK9x9rbuvBX4M3Orug4tScUZDLiIiU80b6O5eA+4EtgI7gS+6+3Yzu9fMbl3sAudS0E5REZEpokYWcvctwJZp8+6ZY9mbzr6s+ZXUoYuITJHrM0VBgS4iMim3gX7qxCKdKSoiAi0Q6JU4bnIlIiLnh9wGeiE0ACo1degiIpDjQD+5U1RHuYiIADkOdO0UFRGZKreBrqstiohMld9AV4cuIjJFbgO9oA5dRGSK3Ab6ZIc+oQ5dRARogUDXkIuISCq3gR4ERhSYhlxERDK5DXRIj3RRhy4ikspfoO99BL5/H7hTCAN16CIimfwF+vM/gofuhYnjaYeuQBcRAfIY6F396dexQxTDQEe5iIhkchjoy9OvY0coRoEunysikslfoHdO7dArNV0+V0QE8hjok0Mu44cpRKYOXUQkk99AP9mhawxdRATyGOilpWAhjB2moEAXETmpoUA3s5vN7GkzGzKzu2Z5/l1m9oSZ/dTM/tHMNix8qZkggM6+tEPXYYsiIifNG+hmFgL3A7cAG4DbZwnsz7n7q9z9KuADwIcWvNJ6Xcth/DAlnSkqInJSIx36dcCQuz/n7hXgQWBT/QLufrxushtY3D2VXf0nh1x0pqiISKqRQF8F7K6b3pPNm8LM3mNmPyPt0N872wuZ2R1mNmhmg8PDw2dSb6prOYwd1pCLiEidBdsp6u73u/tLgf8M/Jc5lnnA3Te6+8aBgYEzf7NsDF07RUVETmkk0PcCa+qmV2fz5vIg8KazKWpe2Rh6MdTlc0VEJjUS6NuA9Wa2zsyKwG3A5voFzGx93eQbgWcXrsRZdPVDXKHHyrqWi4hIJppvAXevmdmdwFYgBD7l7tvN7F5g0N03A3ea2euAKnAEeNtiFj15PZeljKhDFxHJzBvoAO6+Bdgybd49dd+/b4HrenHZ9VyWJMep1Irn9K1FRM5X+TtTFE526D3JCIlDTV26iEheAz3t0HuTYwC6QJeICLkN9LRD747TQNehiyIieQ30jqWA0V3LAl1DLiIiOQ30IITOPrri9IoD5apuciEiks9AB+jqPzmG/sKJiSYXIyLSfDkO9OV0Zx36geMKdBGR/AZ6Zz+l6lEAhkfKTS5GRKT58hvoXf1EE0cIDA6OqEMXEcl1oNvYYZb3lDioIRcRkRwHemc/1Mqs6XEOashFRCTHgZ6dXLSua0JDLiIi5DrQ09P/L+koK9BFRMh1oKcd+qrSGIdOTBAnup6LiLS3/AZ6dgndC6NREodDOrlIRNpcfgM969CXB6OADl0UEclvoHf2AbDMRgB0pIuItL38BnoYQcdSliTp6f86Fl1E2l1+Ax2gs5/O7BK6GnIRkXaX70DvWk5YPkJfV4EDxzXkIiLtLeeB3g9jh7igt0Mduoi0vYYC3cxuNrOnzWzIzO6a5fk/MLMdZva4mT1kZpcufKmz6FoOY0e4YElJgS4ibW/eQDezELgfuAXYANxuZhumLfYYsNHdrwS+DHxgoQudVWfaoQ/0lhjWkIuItLlGOvTrgCF3f87dK8CDwKb6Bdz9O+4+lk3+GFi9sGXOoasfqqNc1G0Mn5jAXWeLikj7aiTQVwG766b3ZPPm8g7g67M9YWZ3mNmgmQ0ODw83XuVcspOLLimNUY2dI2PVs39NEZGcWtCdomb2r4GNwAdne97dH3D3je6+cWBg4OzfcOkaANYELwA6uUhE2lsjgb4XWFM3vTqbN4WZvQ54P3Cru5+bPZT96wBYmewHdHKRiLS3RgJ9G7DezNaZWRG4Ddhcv4CZXQ38BWmYH1z4MuewdA1YQP/EHkAnF4lIe5s30N29BtwJbAV2Al909+1mdq+Z3Zot9kGgB/iSmf3UzDbP8XILKyrC0tX0jE0GuoZcRKR9RY0s5O5bgC3T5t1T9/3rFriuxvWtIzq6i95SpCEXEWlr+T5TFNJx9CM/Z2BJiWENuYhIG8t/oPethbFDXNpd05CLiLS1Fgj09EiXK0qHOaAhFxFpY/kP9OzQxZdEwxwcKetsURFpW/kP9KxDX8MBytWEkYlakwsSEWmO/Ad6xxLoWs6F8S8BOKiLdIlIm8p/oAP0rWN5JQ30nw2PNrkYEZHmaI1A719H9+huzOCpfSPNrkZEpClaI9D71hIc38Nl/UWe2n+82dWIiDRFiwT6OvCEX1kxxs59CnQRaU+tEejZoYtX9x7j+cNjjOpIFxFpQ60R6Nmhiy8vDOMOzxzQOLqItJ/WCPTelRB1spoDAOzUjlERaUOtEehm0LeWnrHd9JQi7RgVkbbUGoEO0L8OO7KLy1f26tBFEWlLrRPofWvhyC4uX9nDzv3HdU0XEWk7LRTo66A6xtX9FUbKNfYeHW92RSIi51TrBPqK9QBcWUhvR6dhFxFpN60T6KtfA0HEpSOPAegEIxFpO60T6KUeuPgairt/yCX9XTy1Xx26iLSX1gl0gLU3wC8f5dUXRuzUoYsi0mYaCnQzu9nMnjazITO7a5bnf9XMHjWzmpm9ZeHLbNDaGyGpcVPXLna9MMp4JW5aKSIi59q8gW5mIXA/cAuwAbjdzDZMW+wXwNuBzy10gadlzfVgIVfFT5A4OsFIRNpKIx36dcCQuz/n7hXgQWBT/QLuvsvdHweSRaixcaUeWHUNlxx/DDP43jPDTS1HRORcaiTQVwG766b3ZPNOm5ndYWaDZjY4PLxIYXvpDRT2P8avrOlk6/YDi/MeIiLnoXO6U9TdH3D3je6+cWBgYHHeZO0/h6TK7168n537jrP78NjivI+IyHmmkUDfC6ypm16dzTs/XfJasJAbo6cA2Lp9f5MLEhE5NxoJ9G3AejNbZ2ZF4DZg8+KWdRZKvXDxVSw9+BMuX9nLNzTsIiJtYt5Ad/cacCewFdgJfNHdt5vZvWZ2K4CZvcbM9gC/DfyFmW1fzKLntfZG2DPIGy5fxrbnD/PCiYmmliMici40NIbu7lvc/WXu/lJ3/+Ns3j3uvjn7fpu7r3b3bndf7u6vWMyi55WNo29a+izu8K0d6tJFpPW11pmik9b9GnRfwCU//wKr+zo1ji4ibaE1Az0qwrVvw575Bm+9LOEHQ4cYKVebXZWIyKJqzUAHuPbfggW82b9JJU74+hPq0kWktbVuoC9dBS+/hYt+9kVevbKD+787RC1u7omsIiKLqXUDHeC638fGD/PHL3uW5w+N8dXHzt/D50VEzlZrB/q6X4Pl63nF3i/xqlVL+ci3n6WqLl1EWlRrB7oZvOad2N5B/us1Y+w+PM5XHtnT7KpERBZFawc6wFW/C90XcO0Tf8Q1q3v4yLeHqNTUpYtI62n9QO9YAr/5IWz/49y36rvsPTrOx7//XLOrEhFZcK0f6ABX/Et4xW+x9smP8s6Xl7nvG0/zw6EXml2ViMiCao9AB7jlg1ipl7srH+WyFZ38u88/xi+Pjje7KhGRBdM+gd4zALd8gHDfo3zhsm9Rrsa8+7OPMlHTfUdFpDW0T6ADvPLNcO3b6Xvsfr509eP8dPdR3vPZx3QzaRFpCe0V6Gbwhvvg5W9kwz/9Dz593R4eeuoAt3/8xxzSJXZFJOfaK9ABwgje8km45HpuevL9fOWmI+zcd5w3f+yHDB080ezqRETOWPsFOkChE27/PKx8Jdf86D38w5VbGR8f5w0f/j4feehZHacuIrnUnoEO0NkHv7cVXvsuLtzxl/zjiv/J2146yn3ffIY3fvj7fPfpg7h7s6sUEWmYNSu0Nm7c6IODg0157xme+jv42ruhfJSDq1/PXQf/Bd8+fjHrL+jh925cx5uuWkVnMWx2lSIimNkj7r5x1ucU6Jmxw/Dw/0kf5WMcWnYlf1O+is8eeyW7ozXceNkAv3HFhdx42QpW93ViZs2uWETakAL9dJSPwyOfhu3/F375KABHiiv5XnwlXx9/BY8m65no6OeKi5ax4eIlXLFyCVdctITLLuhRFy8ii+6sA93Mbgb+HAiBT7j7/5r2fAn4K+Ba4BDwVnff9WKved4Ger3j++CZv4ehb+HPfQ+rjABQswIv2HKeiVfyo9rlPJxczhP+Epb2dLFqWScXLe1kRW+R5d0llvcUWdpZYElHgZ6OiCgwwsCIgoDejoglnQV6SxFBoI5fROZ3VoFuZiHwDPB6YA+wDbjd3XfULfNu4Ep3f5eZ3Qb8lru/9cVeNxeBXi+uwu6fwIHtcHwPHNuLH3gSG34KAMc4ES3jiPVxMFnK/qSXfdVejnkXBYspUiUg4bh3c5Qejnk3J+jkhHcwRgdhGFKMjGIYEAVGFAZEAfQwTq+foNdPcCzpZHfcz/NxP2U6KEZGKQrojKCrYHRFMMARLo2fZ3X1eUJq7CpcxnPhSzkUXUBXUKM7qNJlE5SScUpeJkhq1KxA1YrUrEAQhgRBgIUFKtbBhJUoW4nxOGCiljBRiwnNWGrj9NtxLIhICt14oZM4KEIQEhgEZgRmhAaleJTO6hG6akcIceJCd/pvwg4sjLCwgAcBToBjOAaWfcVwS5+jbpgrMDv5xzEwqMZOJU6o1JLsOeiwKoGBWYAFRuCebikDD4u4hRgQBEZo6Wul2zLl7tTiGKuOE3hMFBWIChFRGBKREJJggeFRF0EYYgZmxmSVceJUYydOnDBIj0CILCEI0vpDwGtlrDqK1crEYYk46iGOunCMxJ3EIQqMUmR0+DhRGJBEXWBGkjiUjxGN7iOojGIBhGZ4WCTu6KNW6qMWdBInMR7XIKlSoEqU1AipETskCXiSUKodpat6mGL1BEmxh1ppGXGhB0tqWFwmiCeIS30kPRfiHX0EQZCukxkBEMTjhOUj6c826sSLXVhcI5g4SlA+irsTF5cRl5bgGMWJw4TjLxAkFazUA6VerNhNHES4FXA3wtoYQW2cMB4nIMl+MxwjAXfMk5O/EoaBAdk2Tv/z7LkAjPR3wROIK5BU09+3qAOPOqDQBYUuvNCJWwRxGeJquu0rxwgnRgiSCZLiErzYDVEngRlGgnkMEydg4jhUx6HYAx1LodSb1pLUIK7BxDEYPwrjR+DSG+DCDWcURS8W6FED//46YMjdn8te7EFgE7CjbplNwH/Lvv8y8FEzM2+lw0TCAqy9IX1kDGD0BXj+B9j+J+kdPUjviYNccuIAjO7CTwxjtfR6MUlQxM0I43lOYIqzRyP3tK5lj1mfCnGMwlwLnKYqERUrUqNAl4/O+boxATXSoaeQmIiFPQR0wiOqRMQERMRExIQkxATEhNQIKFGjZPP/AMe8xCglADqo0kGFgIQaIQkBAd7Q6wCUvUCZIjEBCQFx9u+NhIiEElVKVAht/v8lah4wRolxSox5iQ6r0s/IyVpqHnCCTkpU6bRKQ/UtpAmPmMjWNSagh3E6Gvw55d1CDapu23A3r/mdMwv0F9NIoK8CdtdN7wFeO9cy7l4zs2PAcqD1L2nYvQI2bEof0xhArQJhgWCylaiWoZz9lZ4YSR/VsayzYEoXCqR/5Tv70r/45WNwbE/6qI5nyxpYAEEIFkL3crhgA1H/S9PnX3gG9v0TjOzLupDO9GuxO/0aFtOOpVaG2gR4kj6SWlpXZQyqYxSq4xSq4+lyncugewC6lkMSQ2UUqqNQqxDGFcKkmtYVRGldpV7oWpH+rIIw7WYqo3h1nCSu4kkt7SBxzD19fzz9mZzsuBIsiQmTKmFcgbhGEkQkFlG1kICEIjU6PcbDErWwlH5iwHB33JP0ezPcweIJqJygp3ICMOKwg/GwhFuAJXHadVlArdAJhQ48iIjjmCSukSROYiGJBSRJQlAbxyqj6R9vTyCJCXECC7Ag3TbjYYkTQYk4KGSfRDxdvagDL3SmHV88QVgdIZgYIayNUaiN0VcbJwlLHC72MVFYlnbs1RHC6ggnggLV7pXUulaSlJYQO+nPqzZBoXKUqHyYKB7DgwIWptsizj6JJRZm3bVDEDBRWMZ4oY9y2EtYG6VQOUahehwPCiRRB0lQIJw4RnH8AMWxYYJ4ArwGSczhsIuJ4jImCstwIIrHCWvjeBBRKSylUliCm9FRHaFYOwYO5WIf5WI/VSsS1sYIqyNEtTFCjwk9/V2ohZ1Ugk6qQYkk+0OZOCRmJz/NZW15dohxtn051bEHnvbpCeknEbeQ2NLfG8MpJGWiZCL7WqYQlzFPSMICiRWpBiXKYQ/jYQ9VjyglZYrJCQpxefKzAgkBlbCHiaiHWlCiGI9Rik9QrI2m9VhIYiHlsJdy2Mt4tIQbXnXZAobQKY0E+oIxszuAOwAuueSSc/nWzRMVp04XOqCwEnpXntnrXfTq01v+wlekj/OQsXAdz/TXDTjHv9wi54FGTizaC6ypm16dzZt1GTOLgKWkO0encPcH3H2ju28cGBg4s4pFRGRWjQT6NmC9ma0zsyJwG7B52jKbgbdl378F+HZLjZ+LiOTAvJ9KszHxO4GtpJ+QP+Xu283sXmDQ3TcDnwT+2syGgMOkoS8iIudQQ8OM7r4F2DJt3j1135eB317Y0kRE5HS078W5RERajAJdRKRFKNBFRFqEAl1EpEU07WqLZjYMPH+G/3wF7XAW6kztuN7tuM7QnuvdjusMp7/el7r7rCfyNC3Qz4aZDc51cZpW1o7r3Y7rDO253u24zrCw660hFxGRFqFAFxFpEXkN9AeaXUCTtON6t+M6Q3uudzuuMyzgeudyDF1ERGbKa4cuIiLTKNBFRFpE7gLdzG42s6fNbMjM7mp2PYvBzNaY2XfMbIeZbTez92Xz+83sm2b2bPa1r9m1LjQzC83sMTP722x6nZk9nG3vL2SXcG4pZrbMzL5sZk+Z2U4z+2dtsq3/Q/b7/aSZfd7MOlpte5vZp8zsoJk9WTdv1m1rqQ9n6/64mV1zuu+Xq0DPblh9P3ALsAG43cwW/sZ8zVcD/tDdNwDXA+/J1vMu4CF3Xw88lE23mvcBO+um/wT4M3e/DDgCvKMpVS2uPwf+3t0vB15Nuv4tva3NbBXwXmCju7+S9NLct9F62/vTwM3T5s21bW8B1mePO4CPne6b5SrQqbthtbtXgMkbVrcUd9/n7o9m34+Q/g++inRdP5Mt9hngTc2pcHGY2WrgjcAnsmkDfp30xuPQmuu8FPhV0nsK4O4Vdz9Ki2/rTAR0Znc56wL20WLb293/gfQeEfXm2rabgL/y1I+BZWZ20em8X94CfbYbVq9qUi3nhJmtBa4GHgYudPd92VP7gQubVNZi+d/AfyK9py+kNxo/6u61bLoVt/c6YBj4y2yo6RNm1k2Lb2t33wv8KfAL0iA/BjxC629vmHvbnnW+5S3Q24qZ9QBfAf69ux+vfy67xV/LHHNqZr8JHHT3R5pdyzkWAdcAH3P3q4FRpg2vtNq2BsjGjTeR/kG7GOhm5tBEy1vobZu3QG/khtUtwcwKpGH+WXf/ajb7wORHsOzrwWbVtwhuAG41s12kQ2m/Tjq2vCz7SA6tub33AHvc/eFs+sukAd/K2xrgdcDP3X3Y3avAV0l/B1p9e8Pc2/as8y1vgd7IDatzLxs7/iSw090/VPdU/c243wb8v3Nd22Jx97vdfbW7ryXdrt92938FfIf0xuPQYusM4O77gd1m9vJs1m8AO2jhbZ35BXC9mXVlv++T693S2zsz17bdDPyb7GiX64FjdUMzjXH3XD2ANwDPAD8D3t/sehZpHW8k/Rj2OPDT7PEG0jHlh4BngW8B/c2udZHW/ybgb7PvXwL8BBgCvgSUml3fIqzvVcBgtr2/BvS1w7YG/jvwFPAk8NdAqdW2N/B50n0EVdJPY++Ya9sCRnoU38+AJ0iPADqt99Op/yIiLSJvQy4iIjIHBbqISItQoIuItAgFuohIi1Cgi4i0CAW6iEiLUKCLiLSI/w9BHrchqURmTgAAAABJRU5ErkJggg==\n",
            "text/plain": [
              "<Figure size 432x288 with 1 Axes>"
            ]
          },
          "metadata": {
            "tags": [],
            "needs_background": "light"
          }
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "i2LxebbZsEe_"
      },
      "source": [
        "test_true_y = test_data['PE'].values"
      ],
      "execution_count": 38,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "fyd7ny5osMyC"
      },
      "source": [
        "def help(model,test_x,test_true_y):\r\n",
        "  y_pred = model.predict(test_x)\r\n",
        "  y_pred1 = y_pred[1,:50,:]\r\n",
        "  y_pred_2 = y_pred[:,50,:]\r\n",
        "  y_pred_3 = y_pred[-1,50:,:]\r\n",
        "  y_pred_true = np.append(y_pred1,y_pred_2)\r\n",
        "  y_pred_true = np.append(y_pred_true,y_pred_3)\r\n",
        "  print(f'MAE={metrics.mean_absolute_error(test_true_y,y_pred_true)}')\r\n",
        "  print(f'可决系数R2:{r2_score(test_true_y,y_pred_true)}')"
      ],
      "execution_count": 39,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "U1NPzjucsP4j",
        "outputId": "ab5d0d77-1dd8-40f7-cb7a-f070afa4eb98"
      },
      "source": [
        "help(model,test_x,test_true_y)"
      ],
      "execution_count": 40,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "MAE=0.044495593430076426\n",
            "可决系数R2:0.428773764750247\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "q_M3v0eAWBjH"
      },
      "source": [
        "#双向LSTM"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "Mdit2ZE2WA8m"
      },
      "source": [
        "x = tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(64,return_sequences=True))(input)\r\n",
        "x = tf.keras.layers.Dense(64,activation='relu',kernel_regularizer='l2')(x)\r\n",
        "x = tf.keras.layers.Dense(1)(x)"
      ],
      "execution_count": 41,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "XRhHhF1gGK1v"
      },
      "source": [
        "model2 = tf.keras.Model(inputs = input,outputs = x)"
      ],
      "execution_count": 42,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "jXApVa3DejuK",
        "outputId": "c639c51e-0a67-4ccc-963f-256a21daa914"
      },
      "source": [
        "model2.summary()"
      ],
      "execution_count": 43,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Model: \"model_1\"\n",
            "_________________________________________________________________\n",
            "Layer (type)                 Output Shape              Param #   \n",
            "=================================================================\n",
            "input_1 (InputLayer)         [(None, 100, 9)]          0         \n",
            "_________________________________________________________________\n",
            "bidirectional (Bidirectional (None, 100, 128)          37888     \n",
            "_________________________________________________________________\n",
            "dense_2 (Dense)              (None, 100, 64)           8256      \n",
            "_________________________________________________________________\n",
            "dense_3 (Dense)              (None, 100, 1)            65        \n",
            "=================================================================\n",
            "Total params: 46,209\n",
            "Trainable params: 46,209\n",
            "Non-trainable params: 0\n",
            "_________________________________________________________________\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "Fgk_wJ3SWVwO"
      },
      "source": [
        "#学习率衰减\r\n",
        "learning_rate=0.01\r\n",
        "\r\n",
        "Lr_change=tf.keras.callbacks.ReduceLROnPlateau('val_mae',patience = 20, factor = 0.5, min_lr=0.0001)\r\n",
        "#保存准确率最好的模型\r\n",
        "from keras.callbacks import ModelCheckpoint\r\n",
        "filepath=\"best_weight.h5\"\r\n",
        "checkpoint = ModelCheckpoint(filepath, monitor='val_mae', verbose=1, save_best_only=True,mode='min')\r\n",
        "Adam=tf.keras.optimizers.Adam(learning_rate=learning_rate)\r\n",
        "model2.compile(optimizer=Adam,loss='mse',metrics=['mae'])"
      ],
      "execution_count": 44,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "b39F2EK1edDS"
      },
      "source": [
        ""
      ],
      "execution_count": 44,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "WHSpOyHZWaUb",
        "outputId": "660df4e8-9dca-4372-fc0e-6478a6277092"
      },
      "source": [
        "history=model2.fit( X,Y,batch_size=128,\r\n",
        "         epochs=100, \r\n",
        "         callbacks=[Lr_change,checkpoint],\r\n",
        "         validation_data=(test_x,test_y))"
      ],
      "execution_count": 45,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Epoch 1/100\n",
            "17/17 [==============================] - 3s 70ms/step - loss: 1.7534 - mae: 0.6679 - val_loss: 0.5274 - val_mae: 0.0977\n",
            "\n",
            "Epoch 00001: val_mae improved from inf to 0.09771, saving model to best_weight.h5\n",
            "Epoch 2/100\n",
            "17/17 [==============================] - 0s 18ms/step - loss: 0.4791 - mae: 0.0792 - val_loss: 0.3509 - val_mae: 0.0510\n",
            "\n",
            "Epoch 00002: val_mae improved from 0.09771 to 0.05100, saving model to best_weight.h5\n",
            "Epoch 3/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.3215 - mae: 0.0673 - val_loss: 0.2374 - val_mae: 0.0585\n",
            "\n",
            "Epoch 00003: val_mae did not improve from 0.05100\n",
            "Epoch 4/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.2194 - mae: 0.0638 - val_loss: 0.1668 - val_mae: 0.0517\n",
            "\n",
            "Epoch 00004: val_mae did not improve from 0.05100\n",
            "Epoch 5/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.1568 - mae: 0.0633 - val_loss: 0.1223 - val_mae: 0.0487\n",
            "\n",
            "Epoch 00005: val_mae improved from 0.05100 to 0.04870, saving model to best_weight.h5\n",
            "Epoch 6/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.1160 - mae: 0.0611 - val_loss: 0.0924 - val_mae: 0.0535\n",
            "\n",
            "Epoch 00006: val_mae did not improve from 0.04870\n",
            "Epoch 7/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0886 - mae: 0.0611 - val_loss: 0.0712 - val_mae: 0.0498\n",
            "\n",
            "Epoch 00007: val_mae did not improve from 0.04870\n",
            "Epoch 8/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0690 - mae: 0.0601 - val_loss: 0.0558 - val_mae: 0.0503\n",
            "\n",
            "Epoch 00008: val_mae did not improve from 0.04870\n",
            "Epoch 9/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0544 - mae: 0.0586 - val_loss: 0.0442 - val_mae: 0.0487\n",
            "\n",
            "Epoch 00009: val_mae improved from 0.04870 to 0.04868, saving model to best_weight.h5\n",
            "Epoch 10/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0434 - mae: 0.0576 - val_loss: 0.0350 - val_mae: 0.0480\n",
            "\n",
            "Epoch 00010: val_mae improved from 0.04868 to 0.04800, saving model to best_weight.h5\n",
            "Epoch 11/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0349 - mae: 0.0559 - val_loss: 0.0280 - val_mae: 0.0441\n",
            "\n",
            "Epoch 00011: val_mae improved from 0.04800 to 0.04406, saving model to best_weight.h5\n",
            "Epoch 12/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0287 - mae: 0.0573 - val_loss: 0.0227 - val_mae: 0.0493\n",
            "\n",
            "Epoch 00012: val_mae did not improve from 0.04406\n",
            "Epoch 13/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0235 - mae: 0.0559 - val_loss: 0.0190 - val_mae: 0.0438\n",
            "\n",
            "Epoch 00013: val_mae improved from 0.04406 to 0.04383, saving model to best_weight.h5\n",
            "Epoch 14/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0195 - mae: 0.0551 - val_loss: 0.0148 - val_mae: 0.0419\n",
            "\n",
            "Epoch 00014: val_mae improved from 0.04383 to 0.04185, saving model to best_weight.h5\n",
            "Epoch 15/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0161 - mae: 0.0529 - val_loss: 0.0121 - val_mae: 0.0379\n",
            "\n",
            "Epoch 00015: val_mae improved from 0.04185 to 0.03786, saving model to best_weight.h5\n",
            "Epoch 16/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0136 - mae: 0.0516 - val_loss: 0.0103 - val_mae: 0.0425\n",
            "\n",
            "Epoch 00016: val_mae did not improve from 0.03786\n",
            "Epoch 17/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0118 - mae: 0.0520 - val_loss: 0.0088 - val_mae: 0.0376\n",
            "\n",
            "Epoch 00017: val_mae improved from 0.03786 to 0.03762, saving model to best_weight.h5\n",
            "Epoch 18/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0102 - mae: 0.0513 - val_loss: 0.0080 - val_mae: 0.0404\n",
            "\n",
            "Epoch 00018: val_mae did not improve from 0.03762\n",
            "Epoch 19/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0093 - mae: 0.0526 - val_loss: 0.0061 - val_mae: 0.0358\n",
            "\n",
            "Epoch 00019: val_mae improved from 0.03762 to 0.03575, saving model to best_weight.h5\n",
            "Epoch 20/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0081 - mae: 0.0508 - val_loss: 0.0070 - val_mae: 0.0433\n",
            "\n",
            "Epoch 00020: val_mae did not improve from 0.03575\n",
            "Epoch 21/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0079 - mae: 0.0536 - val_loss: 0.0050 - val_mae: 0.0351\n",
            "\n",
            "Epoch 00021: val_mae improved from 0.03575 to 0.03513, saving model to best_weight.h5\n",
            "Epoch 22/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0068 - mae: 0.0497 - val_loss: 0.0046 - val_mae: 0.0395\n",
            "\n",
            "Epoch 00022: val_mae did not improve from 0.03513\n",
            "Epoch 23/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0066 - mae: 0.0513 - val_loss: 0.0042 - val_mae: 0.0382\n",
            "\n",
            "Epoch 00023: val_mae did not improve from 0.03513\n",
            "Epoch 24/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0061 - mae: 0.0500 - val_loss: 0.0039 - val_mae: 0.0365\n",
            "\n",
            "Epoch 00024: val_mae did not improve from 0.03513\n",
            "Epoch 25/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0056 - mae: 0.0491 - val_loss: 0.0039 - val_mae: 0.0356\n",
            "\n",
            "Epoch 00025: val_mae did not improve from 0.03513\n",
            "Epoch 26/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0054 - mae: 0.0492 - val_loss: 0.0034 - val_mae: 0.0357\n",
            "\n",
            "Epoch 00026: val_mae did not improve from 0.03513\n",
            "Epoch 27/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0053 - mae: 0.0495 - val_loss: 0.0032 - val_mae: 0.0360\n",
            "\n",
            "Epoch 00027: val_mae did not improve from 0.03513\n",
            "Epoch 28/100\n",
            "17/17 [==============================] - 0s 14ms/step - loss: 0.0057 - mae: 0.0530 - val_loss: 0.0035 - val_mae: 0.0413\n",
            "\n",
            "Epoch 00028: val_mae did not improve from 0.03513\n",
            "Epoch 29/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0053 - mae: 0.0502 - val_loss: 0.0034 - val_mae: 0.0347\n",
            "\n",
            "Epoch 00029: val_mae improved from 0.03513 to 0.03467, saving model to best_weight.h5\n",
            "Epoch 30/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0049 - mae: 0.0489 - val_loss: 0.0034 - val_mae: 0.0353\n",
            "\n",
            "Epoch 00030: val_mae did not improve from 0.03467\n",
            "Epoch 31/100\n",
            "17/17 [==============================] - 0s 22ms/step - loss: 0.0050 - mae: 0.0503 - val_loss: 0.0028 - val_mae: 0.0327\n",
            "\n",
            "Epoch 00031: val_mae improved from 0.03467 to 0.03266, saving model to best_weight.h5\n",
            "Epoch 32/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0045 - mae: 0.0472 - val_loss: 0.0030 - val_mae: 0.0384\n",
            "\n",
            "Epoch 00032: val_mae did not improve from 0.03266\n",
            "Epoch 33/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0055 - mae: 0.0533 - val_loss: 0.0031 - val_mae: 0.0333\n",
            "\n",
            "Epoch 00033: val_mae did not improve from 0.03266\n",
            "Epoch 34/100\n",
            "17/17 [==============================] - 0s 14ms/step - loss: 0.0045 - mae: 0.0474 - val_loss: 0.0027 - val_mae: 0.0343\n",
            "\n",
            "Epoch 00034: val_mae did not improve from 0.03266\n",
            "Epoch 35/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0044 - mae: 0.0465 - val_loss: 0.0034 - val_mae: 0.0427\n",
            "\n",
            "Epoch 00035: val_mae did not improve from 0.03266\n",
            "Epoch 36/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0045 - mae: 0.0480 - val_loss: 0.0028 - val_mae: 0.0338\n",
            "\n",
            "Epoch 00036: val_mae did not improve from 0.03266\n",
            "Epoch 37/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0043 - mae: 0.0466 - val_loss: 0.0029 - val_mae: 0.0363\n",
            "\n",
            "Epoch 00037: val_mae did not improve from 0.03266\n",
            "Epoch 38/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0039 - mae: 0.0442 - val_loss: 0.0031 - val_mae: 0.0344\n",
            "\n",
            "Epoch 00038: val_mae did not improve from 0.03266\n",
            "Epoch 39/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0041 - mae: 0.0454 - val_loss: 0.0027 - val_mae: 0.0329\n",
            "\n",
            "Epoch 00039: val_mae did not improve from 0.03266\n",
            "Epoch 40/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0038 - mae: 0.0437 - val_loss: 0.0027 - val_mae: 0.0342\n",
            "\n",
            "Epoch 00040: val_mae did not improve from 0.03266\n",
            "Epoch 41/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0037 - mae: 0.0428 - val_loss: 0.0029 - val_mae: 0.0381\n",
            "\n",
            "Epoch 00041: val_mae did not improve from 0.03266\n",
            "Epoch 42/100\n",
            "17/17 [==============================] - 0s 18ms/step - loss: 0.0038 - mae: 0.0440 - val_loss: 0.0030 - val_mae: 0.0341\n",
            "\n",
            "Epoch 00042: val_mae did not improve from 0.03266\n",
            "Epoch 43/100\n",
            "17/17 [==============================] - 0s 18ms/step - loss: 0.0037 - mae: 0.0434 - val_loss: 0.0026 - val_mae: 0.0328\n",
            "\n",
            "Epoch 00043: val_mae did not improve from 0.03266\n",
            "Epoch 44/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0034 - mae: 0.0412 - val_loss: 0.0027 - val_mae: 0.0356\n",
            "\n",
            "Epoch 00044: val_mae did not improve from 0.03266\n",
            "Epoch 45/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0035 - mae: 0.0418 - val_loss: 0.0029 - val_mae: 0.0329\n",
            "\n",
            "Epoch 00045: val_mae did not improve from 0.03266\n",
            "Epoch 46/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0036 - mae: 0.0429 - val_loss: 0.0025 - val_mae: 0.0317\n",
            "\n",
            "Epoch 00046: val_mae improved from 0.03266 to 0.03171, saving model to best_weight.h5\n",
            "Epoch 47/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0033 - mae: 0.0406 - val_loss: 0.0026 - val_mae: 0.0308\n",
            "\n",
            "Epoch 00047: val_mae improved from 0.03171 to 0.03081, saving model to best_weight.h5\n",
            "Epoch 48/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0033 - mae: 0.0408 - val_loss: 0.0026 - val_mae: 0.0346\n",
            "\n",
            "Epoch 00048: val_mae did not improve from 0.03081\n",
            "Epoch 49/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0034 - mae: 0.0413 - val_loss: 0.0028 - val_mae: 0.0331\n",
            "\n",
            "Epoch 00049: val_mae did not improve from 0.03081\n",
            "Epoch 50/100\n",
            "17/17 [==============================] - 0s 18ms/step - loss: 0.0033 - mae: 0.0410 - val_loss: 0.0030 - val_mae: 0.0397\n",
            "\n",
            "Epoch 00050: val_mae did not improve from 0.03081\n",
            "Epoch 51/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0034 - mae: 0.0417 - val_loss: 0.0027 - val_mae: 0.0323\n",
            "\n",
            "Epoch 00051: val_mae did not improve from 0.03081\n",
            "Epoch 52/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0031 - mae: 0.0394 - val_loss: 0.0029 - val_mae: 0.0387\n",
            "\n",
            "Epoch 00052: val_mae did not improve from 0.03081\n",
            "Epoch 53/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0036 - mae: 0.0432 - val_loss: 0.0024 - val_mae: 0.0298\n",
            "\n",
            "Epoch 00053: val_mae improved from 0.03081 to 0.02981, saving model to best_weight.h5\n",
            "Epoch 54/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0032 - mae: 0.0401 - val_loss: 0.0025 - val_mae: 0.0339\n",
            "\n",
            "Epoch 00054: val_mae did not improve from 0.02981\n",
            "Epoch 55/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0031 - mae: 0.0393 - val_loss: 0.0027 - val_mae: 0.0335\n",
            "\n",
            "Epoch 00055: val_mae did not improve from 0.02981\n",
            "Epoch 56/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0031 - mae: 0.0395 - val_loss: 0.0026 - val_mae: 0.0349\n",
            "\n",
            "Epoch 00056: val_mae did not improve from 0.02981\n",
            "Epoch 57/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0029 - mae: 0.0380 - val_loss: 0.0026 - val_mae: 0.0328\n",
            "\n",
            "Epoch 00057: val_mae did not improve from 0.02981\n",
            "Epoch 58/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0029 - mae: 0.0385 - val_loss: 0.0025 - val_mae: 0.0328\n",
            "\n",
            "Epoch 00058: val_mae did not improve from 0.02981\n",
            "Epoch 59/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0028 - mae: 0.0369 - val_loss: 0.0026 - val_mae: 0.0340\n",
            "\n",
            "Epoch 00059: val_mae did not improve from 0.02981\n",
            "Epoch 60/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0028 - mae: 0.0373 - val_loss: 0.0027 - val_mae: 0.0344\n",
            "\n",
            "Epoch 00060: val_mae did not improve from 0.02981\n",
            "Epoch 61/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0029 - mae: 0.0383 - val_loss: 0.0026 - val_mae: 0.0326\n",
            "\n",
            "Epoch 00061: val_mae did not improve from 0.02981\n",
            "Epoch 62/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0027 - mae: 0.0368 - val_loss: 0.0027 - val_mae: 0.0337\n",
            "\n",
            "Epoch 00062: val_mae did not improve from 0.02981\n",
            "Epoch 63/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0027 - mae: 0.0368 - val_loss: 0.0033 - val_mae: 0.0438\n",
            "\n",
            "Epoch 00063: val_mae did not improve from 0.02981\n",
            "Epoch 64/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0029 - mae: 0.0385 - val_loss: 0.0027 - val_mae: 0.0353\n",
            "\n",
            "Epoch 00064: val_mae did not improve from 0.02981\n",
            "Epoch 65/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0026 - mae: 0.0358 - val_loss: 0.0028 - val_mae: 0.0370\n",
            "\n",
            "Epoch 00065: val_mae did not improve from 0.02981\n",
            "Epoch 66/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0026 - mae: 0.0358 - val_loss: 0.0027 - val_mae: 0.0344\n",
            "\n",
            "Epoch 00066: val_mae did not improve from 0.02981\n",
            "Epoch 67/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0026 - mae: 0.0357 - val_loss: 0.0027 - val_mae: 0.0347\n",
            "\n",
            "Epoch 00067: val_mae did not improve from 0.02981\n",
            "Epoch 68/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0025 - mae: 0.0350 - val_loss: 0.0028 - val_mae: 0.0368\n",
            "\n",
            "Epoch 00068: val_mae did not improve from 0.02981\n",
            "Epoch 69/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0025 - mae: 0.0349 - val_loss: 0.0028 - val_mae: 0.0352\n",
            "\n",
            "Epoch 00069: val_mae did not improve from 0.02981\n",
            "Epoch 70/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0026 - mae: 0.0360 - val_loss: 0.0036 - val_mae: 0.0406\n",
            "\n",
            "Epoch 00070: val_mae did not improve from 0.02981\n",
            "Epoch 71/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0038 - mae: 0.0451 - val_loss: 0.0027 - val_mae: 0.0360\n",
            "\n",
            "Epoch 00071: val_mae did not improve from 0.02981\n",
            "Epoch 72/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0026 - mae: 0.0355 - val_loss: 0.0028 - val_mae: 0.0348\n",
            "\n",
            "Epoch 00072: val_mae did not improve from 0.02981\n",
            "Epoch 73/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0024 - mae: 0.0346 - val_loss: 0.0025 - val_mae: 0.0327\n",
            "\n",
            "Epoch 00073: val_mae did not improve from 0.02981\n",
            "Epoch 74/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0024 - mae: 0.0344 - val_loss: 0.0030 - val_mae: 0.0384\n",
            "\n",
            "Epoch 00074: val_mae did not improve from 0.02981\n",
            "Epoch 75/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0023 - mae: 0.0337 - val_loss: 0.0029 - val_mae: 0.0367\n",
            "\n",
            "Epoch 00075: val_mae did not improve from 0.02981\n",
            "Epoch 76/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0023 - mae: 0.0334 - val_loss: 0.0028 - val_mae: 0.0356\n",
            "\n",
            "Epoch 00076: val_mae did not improve from 0.02981\n",
            "Epoch 77/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0023 - mae: 0.0334 - val_loss: 0.0029 - val_mae: 0.0373\n",
            "\n",
            "Epoch 00077: val_mae did not improve from 0.02981\n",
            "Epoch 78/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0023 - mae: 0.0334 - val_loss: 0.0029 - val_mae: 0.0360\n",
            "\n",
            "Epoch 00078: val_mae did not improve from 0.02981\n",
            "Epoch 79/100\n",
            "17/17 [==============================] - 0s 24ms/step - loss: 0.0023 - mae: 0.0336 - val_loss: 0.0029 - val_mae: 0.0360\n",
            "\n",
            "Epoch 00079: val_mae did not improve from 0.02981\n",
            "Epoch 80/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0023 - mae: 0.0334 - val_loss: 0.0028 - val_mae: 0.0357\n",
            "\n",
            "Epoch 00080: val_mae did not improve from 0.02981\n",
            "Epoch 81/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0023 - mae: 0.0335 - val_loss: 0.0030 - val_mae: 0.0383\n",
            "\n",
            "Epoch 00081: val_mae did not improve from 0.02981\n",
            "Epoch 82/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0022 - mae: 0.0328 - val_loss: 0.0029 - val_mae: 0.0370\n",
            "\n",
            "Epoch 00082: val_mae did not improve from 0.02981\n",
            "Epoch 83/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0022 - mae: 0.0326 - val_loss: 0.0029 - val_mae: 0.0365\n",
            "\n",
            "Epoch 00083: val_mae did not improve from 0.02981\n",
            "Epoch 84/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0022 - mae: 0.0328 - val_loss: 0.0029 - val_mae: 0.0375\n",
            "\n",
            "Epoch 00084: val_mae did not improve from 0.02981\n",
            "Epoch 85/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0021 - mae: 0.0320 - val_loss: 0.0029 - val_mae: 0.0373\n",
            "\n",
            "Epoch 00085: val_mae did not improve from 0.02981\n",
            "Epoch 86/100\n",
            "17/17 [==============================] - 0s 18ms/step - loss: 0.0021 - mae: 0.0322 - val_loss: 0.0028 - val_mae: 0.0370\n",
            "\n",
            "Epoch 00086: val_mae did not improve from 0.02981\n",
            "Epoch 87/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0023 - mae: 0.0333 - val_loss: 0.0029 - val_mae: 0.0369\n",
            "\n",
            "Epoch 00087: val_mae did not improve from 0.02981\n",
            "Epoch 88/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0021 - mae: 0.0320 - val_loss: 0.0028 - val_mae: 0.0364\n",
            "\n",
            "Epoch 00088: val_mae did not improve from 0.02981\n",
            "Epoch 89/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0021 - mae: 0.0319 - val_loss: 0.0027 - val_mae: 0.0356\n",
            "\n",
            "Epoch 00089: val_mae did not improve from 0.02981\n",
            "Epoch 90/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0021 - mae: 0.0320 - val_loss: 0.0027 - val_mae: 0.0357\n",
            "\n",
            "Epoch 00090: val_mae did not improve from 0.02981\n",
            "Epoch 91/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0021 - mae: 0.0317 - val_loss: 0.0029 - val_mae: 0.0361\n",
            "\n",
            "Epoch 00091: val_mae did not improve from 0.02981\n",
            "Epoch 92/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0021 - mae: 0.0319 - val_loss: 0.0030 - val_mae: 0.0368\n",
            "\n",
            "Epoch 00092: val_mae did not improve from 0.02981\n",
            "Epoch 93/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0021 - mae: 0.0321 - val_loss: 0.0030 - val_mae: 0.0382\n",
            "\n",
            "Epoch 00093: val_mae did not improve from 0.02981\n",
            "Epoch 94/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0020 - mae: 0.0312 - val_loss: 0.0028 - val_mae: 0.0363\n",
            "\n",
            "Epoch 00094: val_mae did not improve from 0.02981\n",
            "Epoch 95/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0020 - mae: 0.0310 - val_loss: 0.0028 - val_mae: 0.0366\n",
            "\n",
            "Epoch 00095: val_mae did not improve from 0.02981\n",
            "Epoch 96/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0020 - mae: 0.0308 - val_loss: 0.0028 - val_mae: 0.0369\n",
            "\n",
            "Epoch 00096: val_mae did not improve from 0.02981\n",
            "Epoch 97/100\n",
            "17/17 [==============================] - 0s 15ms/step - loss: 0.0020 - mae: 0.0307 - val_loss: 0.0028 - val_mae: 0.0365\n",
            "\n",
            "Epoch 00097: val_mae did not improve from 0.02981\n",
            "Epoch 98/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0020 - mae: 0.0310 - val_loss: 0.0029 - val_mae: 0.0378\n",
            "\n",
            "Epoch 00098: val_mae did not improve from 0.02981\n",
            "Epoch 99/100\n",
            "17/17 [==============================] - 0s 17ms/step - loss: 0.0020 - mae: 0.0310 - val_loss: 0.0028 - val_mae: 0.0364\n",
            "\n",
            "Epoch 00099: val_mae did not improve from 0.02981\n",
            "Epoch 100/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0020 - mae: 0.0308 - val_loss: 0.0028 - val_mae: 0.0364\n",
            "\n",
            "Epoch 00100: val_mae did not improve from 0.02981\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "SujuvGHfkd0_",
        "outputId": "16eee1bb-9b3b-4697-e374-7adff6f902fb"
      },
      "source": [
        "help(model2,test_x,test_true_y)"
      ],
      "execution_count": 46,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "MAE=0.03772090406205945\n",
            "可决系数R2:0.6258629090865854\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "HUZy-MTyW0Wz"
      },
      "source": [
        "# simple RNN"
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "1nID2w-NWeJj"
      },
      "source": [
        "x = tf.keras.layers.SimpleRNN(64,return_sequences=True)(input)\r\n",
        "x = tf.keras.layers.Dense(64,activation='relu',kernel_regularizer='l2')(x)\r\n",
        "x = tf.keras.layers.Dense(1)(x)"
      ],
      "execution_count": 47,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "NVbMfoLFW-gq"
      },
      "source": [
        "model3 = tf.keras.Model(inputs = input,outputs = x)"
      ],
      "execution_count": 48,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "ZEw-tam8XIoO"
      },
      "source": [
        "#学习率衰减\r\n",
        "learning_rate=0.01\r\n",
        "\r\n",
        "Lr_change=tf.keras.callbacks.ReduceLROnPlateau('val_mae',patience = 20, factor = 0.5, min_lr=0.0001)\r\n",
        "#保存准确率最好的模型\r\n",
        "from keras.callbacks import ModelCheckpoint\r\n",
        "filepath=\"best_weight.h5\"\r\n",
        "checkpoint = ModelCheckpoint(filepath, monitor='val_mae', verbose=1, save_best_only=True,mode='min')\r\n",
        "Adam=tf.keras.optimizers.Adam(learning_rate=learning_rate)\r\n",
        "model3.compile(optimizer=Adam,loss='mse',metrics=['mae'])"
      ],
      "execution_count": 49,
      "outputs": []
    },
    {
      "cell_type": "code",
      "metadata": {
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "id": "SObzm9_5XUqY",
        "outputId": "165371c4-bffb-460f-a461-cbaf80515f7a"
      },
      "source": [
        "history = model.fit(X,Y,batch_size=128,\r\n",
        "                    callbacks=[Lr_change,checkpoint],epochs=100,\r\n",
        "                    validation_data=(test_x,test_y))"
      ],
      "execution_count": 50,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "Epoch 1/100\n",
            "17/17 [==============================] - 0s 16ms/step - loss: 0.0022 - mae: 0.0322 - mse: 0.0020 - val_loss: 0.0047 - val_mae: 0.0463 - val_mse: 0.0045\n",
            "\n",
            "Epoch 00001: val_mae improved from inf to 0.04627, saving model to best_weight.h5\n",
            "Epoch 2/100\n",
            "17/17 [==============================] - 0s 13ms/step - loss: 0.0021 - mae: 0.0320 - mse: 0.0020 - val_loss: 0.0046 - val_mae: 0.0449 - val_mse: 0.0044\n",
            "\n",
            "Epoch 00002: val_mae improved from 0.04627 to 0.04485, saving model to best_weight.h5\n",
            "Epoch 3/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0021 - mae: 0.0319 - mse: 0.0020 - val_loss: 0.0047 - val_mae: 0.0453 - val_mse: 0.0045\n",
            "\n",
            "Epoch 00003: val_mae did not improve from 0.04485\n",
            "Epoch 4/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0021 - mae: 0.0318 - mse: 0.0020 - val_loss: 0.0045 - val_mae: 0.0446 - val_mse: 0.0043\n",
            "\n",
            "Epoch 00004: val_mae improved from 0.04485 to 0.04460, saving model to best_weight.h5\n",
            "Epoch 5/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0021 - mae: 0.0317 - mse: 0.0019 - val_loss: 0.0046 - val_mae: 0.0439 - val_mse: 0.0044\n",
            "\n",
            "Epoch 00005: val_mae improved from 0.04460 to 0.04393, saving model to best_weight.h5\n",
            "Epoch 6/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0021 - mae: 0.0318 - mse: 0.0020 - val_loss: 0.0048 - val_mae: 0.0465 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00006: val_mae did not improve from 0.04393\n",
            "Epoch 7/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0021 - mae: 0.0319 - mse: 0.0020 - val_loss: 0.0043 - val_mae: 0.0439 - val_mse: 0.0042\n",
            "\n",
            "Epoch 00007: val_mae improved from 0.04393 to 0.04390, saving model to best_weight.h5\n",
            "Epoch 8/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0021 - mae: 0.0318 - mse: 0.0020 - val_loss: 0.0048 - val_mae: 0.0470 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00008: val_mae did not improve from 0.04390\n",
            "Epoch 9/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0021 - mae: 0.0315 - mse: 0.0019 - val_loss: 0.0047 - val_mae: 0.0460 - val_mse: 0.0045\n",
            "\n",
            "Epoch 00009: val_mae did not improve from 0.04390\n",
            "Epoch 10/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0021 - mae: 0.0315 - mse: 0.0019 - val_loss: 0.0050 - val_mae: 0.0476 - val_mse: 0.0049\n",
            "\n",
            "Epoch 00010: val_mae did not improve from 0.04390\n",
            "Epoch 11/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0021 - mae: 0.0312 - mse: 0.0019 - val_loss: 0.0047 - val_mae: 0.0464 - val_mse: 0.0045\n",
            "\n",
            "Epoch 00011: val_mae did not improve from 0.04390\n",
            "Epoch 12/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0021 - mae: 0.0313 - mse: 0.0019 - val_loss: 0.0044 - val_mae: 0.0446 - val_mse: 0.0042\n",
            "\n",
            "Epoch 00012: val_mae did not improve from 0.04390\n",
            "Epoch 13/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0021 - mae: 0.0311 - mse: 0.0019 - val_loss: 0.0050 - val_mae: 0.0477 - val_mse: 0.0049\n",
            "\n",
            "Epoch 00013: val_mae did not improve from 0.04390\n",
            "Epoch 14/100\n",
            "17/17 [==============================] - 0s 19ms/step - loss: 0.0020 - mae: 0.0310 - mse: 0.0019 - val_loss: 0.0045 - val_mae: 0.0464 - val_mse: 0.0043\n",
            "\n",
            "Epoch 00014: val_mae did not improve from 0.04390\n",
            "Epoch 15/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0021 - mae: 0.0312 - mse: 0.0019 - val_loss: 0.0046 - val_mae: 0.0471 - val_mse: 0.0045\n",
            "\n",
            "Epoch 00015: val_mae did not improve from 0.04390\n",
            "Epoch 16/100\n",
            "17/17 [==============================] - 0s 13ms/step - loss: 0.0020 - mae: 0.0310 - mse: 0.0019 - val_loss: 0.0048 - val_mae: 0.0463 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00016: val_mae did not improve from 0.04390\n",
            "Epoch 17/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0020 - mae: 0.0306 - mse: 0.0018 - val_loss: 0.0047 - val_mae: 0.0475 - val_mse: 0.0045\n",
            "\n",
            "Epoch 00017: val_mae did not improve from 0.04390\n",
            "Epoch 18/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0020 - mae: 0.0311 - mse: 0.0019 - val_loss: 0.0044 - val_mae: 0.0443 - val_mse: 0.0042\n",
            "\n",
            "Epoch 00018: val_mae did not improve from 0.04390\n",
            "Epoch 19/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0020 - mae: 0.0307 - mse: 0.0019 - val_loss: 0.0046 - val_mae: 0.0464 - val_mse: 0.0044\n",
            "\n",
            "Epoch 00019: val_mae did not improve from 0.04390\n",
            "Epoch 20/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0020 - mae: 0.0304 - mse: 0.0018 - val_loss: 0.0045 - val_mae: 0.0468 - val_mse: 0.0043\n",
            "\n",
            "Epoch 00020: val_mae did not improve from 0.04390\n",
            "Epoch 21/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0020 - mae: 0.0304 - mse: 0.0018 - val_loss: 0.0044 - val_mae: 0.0450 - val_mse: 0.0042\n",
            "\n",
            "Epoch 00021: val_mae did not improve from 0.04390\n",
            "Epoch 22/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0020 - mae: 0.0302 - mse: 0.0018 - val_loss: 0.0051 - val_mae: 0.0490 - val_mse: 0.0049\n",
            "\n",
            "Epoch 00022: val_mae did not improve from 0.04390\n",
            "Epoch 23/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0020 - mae: 0.0303 - mse: 0.0018 - val_loss: 0.0048 - val_mae: 0.0452 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00023: val_mae did not improve from 0.04390\n",
            "Epoch 24/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0020 - mae: 0.0307 - mse: 0.0018 - val_loss: 0.0049 - val_mae: 0.0473 - val_mse: 0.0048\n",
            "\n",
            "Epoch 00024: val_mae did not improve from 0.04390\n",
            "Epoch 25/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0020 - mae: 0.0300 - mse: 0.0018 - val_loss: 0.0046 - val_mae: 0.0460 - val_mse: 0.0045\n",
            "\n",
            "Epoch 00025: val_mae did not improve from 0.04390\n",
            "Epoch 26/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0019 - mae: 0.0298 - mse: 0.0018 - val_loss: 0.0046 - val_mae: 0.0462 - val_mse: 0.0045\n",
            "\n",
            "Epoch 00026: val_mae did not improve from 0.04390\n",
            "Epoch 27/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0019 - mae: 0.0297 - mse: 0.0018 - val_loss: 0.0049 - val_mae: 0.0470 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00027: val_mae did not improve from 0.04390\n",
            "Epoch 28/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0019 - mae: 0.0298 - mse: 0.0018 - val_loss: 0.0050 - val_mae: 0.0485 - val_mse: 0.0049\n",
            "\n",
            "Epoch 00028: val_mae did not improve from 0.04390\n",
            "Epoch 29/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0019 - mae: 0.0297 - mse: 0.0018 - val_loss: 0.0047 - val_mae: 0.0465 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00029: val_mae did not improve from 0.04390\n",
            "Epoch 30/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0019 - mae: 0.0297 - mse: 0.0018 - val_loss: 0.0050 - val_mae: 0.0483 - val_mse: 0.0048\n",
            "\n",
            "Epoch 00030: val_mae did not improve from 0.04390\n",
            "Epoch 31/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0019 - mae: 0.0297 - mse: 0.0018 - val_loss: 0.0049 - val_mae: 0.0479 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00031: val_mae did not improve from 0.04390\n",
            "Epoch 32/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0019 - mae: 0.0296 - mse: 0.0018 - val_loss: 0.0047 - val_mae: 0.0467 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00032: val_mae did not improve from 0.04390\n",
            "Epoch 33/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0019 - mae: 0.0295 - mse: 0.0017 - val_loss: 0.0047 - val_mae: 0.0477 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00033: val_mae did not improve from 0.04390\n",
            "Epoch 34/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0019 - mae: 0.0295 - mse: 0.0017 - val_loss: 0.0047 - val_mae: 0.0474 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00034: val_mae did not improve from 0.04390\n",
            "Epoch 35/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0019 - mae: 0.0295 - mse: 0.0017 - val_loss: 0.0050 - val_mae: 0.0485 - val_mse: 0.0049\n",
            "\n",
            "Epoch 00035: val_mae did not improve from 0.04390\n",
            "Epoch 36/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0019 - mae: 0.0295 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0478 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00036: val_mae did not improve from 0.04390\n",
            "Epoch 37/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0019 - mae: 0.0295 - mse: 0.0017 - val_loss: 0.0049 - val_mae: 0.0486 - val_mse: 0.0048\n",
            "\n",
            "Epoch 00037: val_mae did not improve from 0.04390\n",
            "Epoch 38/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0019 - mae: 0.0294 - mse: 0.0017 - val_loss: 0.0045 - val_mae: 0.0471 - val_mse: 0.0044\n",
            "\n",
            "Epoch 00038: val_mae did not improve from 0.04390\n",
            "Epoch 39/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0019 - mae: 0.0295 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0478 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00039: val_mae did not improve from 0.04390\n",
            "Epoch 40/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0019 - mae: 0.0293 - mse: 0.0017 - val_loss: 0.0047 - val_mae: 0.0473 - val_mse: 0.0045\n",
            "\n",
            "Epoch 00040: val_mae did not improve from 0.04390\n",
            "Epoch 41/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0019 - mae: 0.0293 - mse: 0.0017 - val_loss: 0.0047 - val_mae: 0.0471 - val_mse: 0.0045\n",
            "\n",
            "Epoch 00041: val_mae did not improve from 0.04390\n",
            "Epoch 42/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0019 - mae: 0.0293 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0477 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00042: val_mae did not improve from 0.04390\n",
            "Epoch 43/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0019 - mae: 0.0293 - mse: 0.0017 - val_loss: 0.0049 - val_mae: 0.0475 - val_mse: 0.0048\n",
            "\n",
            "Epoch 00043: val_mae did not improve from 0.04390\n",
            "Epoch 44/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0019 - mae: 0.0294 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0472 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00044: val_mae did not improve from 0.04390\n",
            "Epoch 45/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0019 - mae: 0.0292 - mse: 0.0017 - val_loss: 0.0050 - val_mae: 0.0483 - val_mse: 0.0048\n",
            "\n",
            "Epoch 00045: val_mae did not improve from 0.04390\n",
            "Epoch 46/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0019 - mae: 0.0291 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0477 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00046: val_mae did not improve from 0.04390\n",
            "Epoch 47/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0019 - mae: 0.0290 - mse: 0.0017 - val_loss: 0.0047 - val_mae: 0.0473 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00047: val_mae did not improve from 0.04390\n",
            "Epoch 48/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0290 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0480 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00048: val_mae did not improve from 0.04390\n",
            "Epoch 49/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0018 - mae: 0.0289 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0477 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00049: val_mae did not improve from 0.04390\n",
            "Epoch 50/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0018 - mae: 0.0290 - mse: 0.0017 - val_loss: 0.0049 - val_mae: 0.0478 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00050: val_mae did not improve from 0.04390\n",
            "Epoch 51/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0289 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0479 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00051: val_mae did not improve from 0.04390\n",
            "Epoch 52/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0018 - mae: 0.0289 - mse: 0.0017 - val_loss: 0.0049 - val_mae: 0.0486 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00052: val_mae did not improve from 0.04390\n",
            "Epoch 53/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0018 - mae: 0.0289 - mse: 0.0017 - val_loss: 0.0049 - val_mae: 0.0475 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00053: val_mae did not improve from 0.04390\n",
            "Epoch 54/100\n",
            "17/17 [==============================] - 0s 18ms/step - loss: 0.0018 - mae: 0.0290 - mse: 0.0017 - val_loss: 0.0049 - val_mae: 0.0482 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00054: val_mae did not improve from 0.04390\n",
            "Epoch 55/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0289 - mse: 0.0017 - val_loss: 0.0049 - val_mae: 0.0481 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00055: val_mae did not improve from 0.04390\n",
            "Epoch 56/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0289 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0482 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00056: val_mae did not improve from 0.04390\n",
            "Epoch 57/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0288 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0477 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00057: val_mae did not improve from 0.04390\n",
            "Epoch 58/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0018 - mae: 0.0288 - mse: 0.0017 - val_loss: 0.0047 - val_mae: 0.0476 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00058: val_mae did not improve from 0.04390\n",
            "Epoch 59/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0288 - mse: 0.0017 - val_loss: 0.0050 - val_mae: 0.0486 - val_mse: 0.0048\n",
            "\n",
            "Epoch 00059: val_mae did not improve from 0.04390\n",
            "Epoch 60/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0289 - mse: 0.0017 - val_loss: 0.0049 - val_mae: 0.0485 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00060: val_mae did not improve from 0.04390\n",
            "Epoch 61/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0018 - mae: 0.0287 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0481 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00061: val_mae did not improve from 0.04390\n",
            "Epoch 62/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0287 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0477 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00062: val_mae did not improve from 0.04390\n",
            "Epoch 63/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0018 - mae: 0.0287 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0481 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00063: val_mae did not improve from 0.04390\n",
            "Epoch 64/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0287 - mse: 0.0017 - val_loss: 0.0049 - val_mae: 0.0486 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00064: val_mae did not improve from 0.04390\n",
            "Epoch 65/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0018 - mae: 0.0287 - mse: 0.0017 - val_loss: 0.0050 - val_mae: 0.0490 - val_mse: 0.0048\n",
            "\n",
            "Epoch 00065: val_mae did not improve from 0.04390\n",
            "Epoch 66/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0018 - mae: 0.0286 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0479 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00066: val_mae did not improve from 0.04390\n",
            "Epoch 67/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0018 - mae: 0.0286 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0481 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00067: val_mae did not improve from 0.04390\n",
            "Epoch 68/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0018 - mae: 0.0286 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0482 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00068: val_mae did not improve from 0.04390\n",
            "Epoch 69/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0286 - mse: 0.0017 - val_loss: 0.0049 - val_mae: 0.0484 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00069: val_mae did not improve from 0.04390\n",
            "Epoch 70/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0018 - mae: 0.0286 - mse: 0.0017 - val_loss: 0.0047 - val_mae: 0.0480 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00070: val_mae did not improve from 0.04390\n",
            "Epoch 71/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0018 - mae: 0.0286 - mse: 0.0017 - val_loss: 0.0049 - val_mae: 0.0486 - val_mse: 0.0048\n",
            "\n",
            "Epoch 00071: val_mae did not improve from 0.04390\n",
            "Epoch 72/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0286 - mse: 0.0017 - val_loss: 0.0047 - val_mae: 0.0476 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00072: val_mae did not improve from 0.04390\n",
            "Epoch 73/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0018 - mae: 0.0286 - mse: 0.0017 - val_loss: 0.0049 - val_mae: 0.0484 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00073: val_mae did not improve from 0.04390\n",
            "Epoch 74/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0018 - mae: 0.0285 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0478 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00074: val_mae did not improve from 0.04390\n",
            "Epoch 75/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0018 - mae: 0.0285 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0483 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00075: val_mae did not improve from 0.04390\n",
            "Epoch 76/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0018 - mae: 0.0285 - mse: 0.0017 - val_loss: 0.0047 - val_mae: 0.0477 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00076: val_mae did not improve from 0.04390\n",
            "Epoch 77/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0018 - mae: 0.0285 - mse: 0.0017 - val_loss: 0.0050 - val_mae: 0.0491 - val_mse: 0.0048\n",
            "\n",
            "Epoch 00077: val_mae did not improve from 0.04390\n",
            "Epoch 78/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0285 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0480 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00078: val_mae did not improve from 0.04390\n",
            "Epoch 79/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0018 - mae: 0.0285 - mse: 0.0017 - val_loss: 0.0048 - val_mae: 0.0484 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00079: val_mae did not improve from 0.04390\n",
            "Epoch 80/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0285 - mse: 0.0016 - val_loss: 0.0049 - val_mae: 0.0483 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00080: val_mae did not improve from 0.04390\n",
            "Epoch 81/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0285 - mse: 0.0016 - val_loss: 0.0047 - val_mae: 0.0479 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00081: val_mae did not improve from 0.04390\n",
            "Epoch 82/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0285 - mse: 0.0016 - val_loss: 0.0048 - val_mae: 0.0482 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00082: val_mae did not improve from 0.04390\n",
            "Epoch 83/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0284 - mse: 0.0016 - val_loss: 0.0048 - val_mae: 0.0481 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00083: val_mae did not improve from 0.04390\n",
            "Epoch 84/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0284 - mse: 0.0016 - val_loss: 0.0048 - val_mae: 0.0480 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00084: val_mae did not improve from 0.04390\n",
            "Epoch 85/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0018 - mae: 0.0284 - mse: 0.0016 - val_loss: 0.0049 - val_mae: 0.0483 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00085: val_mae did not improve from 0.04390\n",
            "Epoch 86/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0018 - mae: 0.0284 - mse: 0.0016 - val_loss: 0.0048 - val_mae: 0.0484 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00086: val_mae did not improve from 0.04390\n",
            "Epoch 87/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0284 - mse: 0.0016 - val_loss: 0.0048 - val_mae: 0.0481 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00087: val_mae did not improve from 0.04390\n",
            "Epoch 88/100\n",
            "17/17 [==============================] - 0s 18ms/step - loss: 0.0018 - mae: 0.0284 - mse: 0.0016 - val_loss: 0.0049 - val_mae: 0.0482 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00088: val_mae did not improve from 0.04390\n",
            "Epoch 89/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0018 - mae: 0.0284 - mse: 0.0016 - val_loss: 0.0048 - val_mae: 0.0483 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00089: val_mae did not improve from 0.04390\n",
            "Epoch 90/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0018 - mae: 0.0284 - mse: 0.0016 - val_loss: 0.0048 - val_mae: 0.0483 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00090: val_mae did not improve from 0.04390\n",
            "Epoch 91/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0018 - mae: 0.0284 - mse: 0.0016 - val_loss: 0.0049 - val_mae: 0.0484 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00091: val_mae did not improve from 0.04390\n",
            "Epoch 92/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0018 - mae: 0.0284 - mse: 0.0016 - val_loss: 0.0048 - val_mae: 0.0481 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00092: val_mae did not improve from 0.04390\n",
            "Epoch 93/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0284 - mse: 0.0016 - val_loss: 0.0048 - val_mae: 0.0481 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00093: val_mae did not improve from 0.04390\n",
            "Epoch 94/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0283 - mse: 0.0016 - val_loss: 0.0048 - val_mae: 0.0479 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00094: val_mae did not improve from 0.04390\n",
            "Epoch 95/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0283 - mse: 0.0016 - val_loss: 0.0048 - val_mae: 0.0481 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00095: val_mae did not improve from 0.04390\n",
            "Epoch 96/100\n",
            "17/17 [==============================] - 0s 10ms/step - loss: 0.0018 - mae: 0.0283 - mse: 0.0016 - val_loss: 0.0048 - val_mae: 0.0481 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00096: val_mae did not improve from 0.04390\n",
            "Epoch 97/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0283 - mse: 0.0016 - val_loss: 0.0048 - val_mae: 0.0483 - val_mse: 0.0047\n",
            "\n",
            "Epoch 00097: val_mae did not improve from 0.04390\n",
            "Epoch 98/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0283 - mse: 0.0016 - val_loss: 0.0047 - val_mae: 0.0478 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00098: val_mae did not improve from 0.04390\n",
            "Epoch 99/100\n",
            "17/17 [==============================] - 0s 12ms/step - loss: 0.0018 - mae: 0.0283 - mse: 0.0016 - val_loss: 0.0048 - val_mae: 0.0480 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00099: val_mae did not improve from 0.04390\n",
            "Epoch 100/100\n",
            "17/17 [==============================] - 0s 11ms/step - loss: 0.0018 - mae: 0.0283 - mse: 0.0016 - val_loss: 0.0048 - val_mae: 0.0477 - val_mse: 0.0046\n",
            "\n",
            "Epoch 00100: val_mae did not improve from 0.04390\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "HPwnvgZ9Xvin",
        "colab": {
          "base_uri": "https://localhost:8080/"
        },
        "outputId": "f30b7155-5ab2-4754-e022-4202e491f4c2"
      },
      "source": [
        "help(model3,test_x,test_true_y)"
      ],
      "execution_count": 51,
      "outputs": [
        {
          "output_type": "stream",
          "text": [
            "MAE=0.9731696362186457\n",
            "可决系数R2:-142.56246343244626\n"
          ],
          "name": "stdout"
        }
      ]
    },
    {
      "cell_type": "code",
      "metadata": {
        "id": "ePF5PO3Bks04"
      },
      "source": [
        ""
      ],
      "execution_count": null,
      "outputs": []
    }
  ]
}