{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 129,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>CRIM</th>\n",
       "      <th>ZN</th>\n",
       "      <th>INDUS</th>\n",
       "      <th>CHAS</th>\n",
       "      <th>NOX</th>\n",
       "      <th>RM</th>\n",
       "      <th>AGE</th>\n",
       "      <th>DIS</th>\n",
       "      <th>RAD</th>\n",
       "      <th>TAX</th>\n",
       "      <th>PIRATIO</th>\n",
       "      <th>B</th>\n",
       "      <th>LSTAT</th>\n",
       "      <th>MEDV</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>0.00632</td>\n",
       "      <td>18.0</td>\n",
       "      <td>2.31</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.538</td>\n",
       "      <td>6.575</td>\n",
       "      <td>65.2</td>\n",
       "      <td>4.0900</td>\n",
       "      <td>1</td>\n",
       "      <td>296.0</td>\n",
       "      <td>15.3</td>\n",
       "      <td>396.90</td>\n",
       "      <td>4.98</td>\n",
       "      <td>24.0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.02731</td>\n",
       "      <td>0.0</td>\n",
       "      <td>7.07</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.469</td>\n",
       "      <td>6.421</td>\n",
       "      <td>78.9</td>\n",
       "      <td>4.9671</td>\n",
       "      <td>2</td>\n",
       "      <td>242.0</td>\n",
       "      <td>17.8</td>\n",
       "      <td>396.90</td>\n",
       "      <td>9.14</td>\n",
       "      <td>21.6</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.02729</td>\n",
       "      <td>0.0</td>\n",
       "      <td>7.07</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.469</td>\n",
       "      <td>7.185</td>\n",
       "      <td>61.1</td>\n",
       "      <td>4.9671</td>\n",
       "      <td>2</td>\n",
       "      <td>242.0</td>\n",
       "      <td>17.8</td>\n",
       "      <td>392.83</td>\n",
       "      <td>4.03</td>\n",
       "      <td>34.7</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.03237</td>\n",
       "      <td>0.0</td>\n",
       "      <td>2.18</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.458</td>\n",
       "      <td>6.998</td>\n",
       "      <td>45.8</td>\n",
       "      <td>6.0622</td>\n",
       "      <td>3</td>\n",
       "      <td>222.0</td>\n",
       "      <td>18.7</td>\n",
       "      <td>394.63</td>\n",
       "      <td>2.94</td>\n",
       "      <td>33.4</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.06905</td>\n",
       "      <td>0.0</td>\n",
       "      <td>2.18</td>\n",
       "      <td>0.0</td>\n",
       "      <td>0.458</td>\n",
       "      <td>7.147</td>\n",
       "      <td>54.2</td>\n",
       "      <td>6.0622</td>\n",
       "      <td>3</td>\n",
       "      <td>222.0</td>\n",
       "      <td>18.7</td>\n",
       "      <td>396.90</td>\n",
       "      <td>5.33</td>\n",
       "      <td>36.2</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "      CRIM    ZN  INDUS  CHAS    NOX     RM   AGE     DIS  RAD    TAX  \\\n",
       "0  0.00632  18.0   2.31   0.0  0.538  6.575  65.2  4.0900    1  296.0   \n",
       "1  0.02731   0.0   7.07   0.0  0.469  6.421  78.9  4.9671    2  242.0   \n",
       "2  0.02729   0.0   7.07   0.0  0.469  7.185  61.1  4.9671    2  242.0   \n",
       "3  0.03237   0.0   2.18   0.0  0.458  6.998  45.8  6.0622    3  222.0   \n",
       "4  0.06905   0.0   2.18   0.0  0.458  7.147  54.2  6.0622    3  222.0   \n",
       "\n",
       "   PIRATIO       B  LSTAT  MEDV  \n",
       "0     15.3  396.90   4.98  24.0  \n",
       "1     17.8  396.90   9.14  21.6  \n",
       "2     17.8  392.83   4.03  34.7  \n",
       "3     18.7  394.63   2.94  33.4  \n",
       "4     18.7  396.90   5.33  36.2  "
      ]
     },
     "execution_count": 129,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import tensorflow as tf\n",
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import pandas as pd \n",
    "from sklearn.utils import shuffle\n",
    "from sklearn.preprocessing import scale\n",
    "\n",
    "Housing_url = \"data/boston.csv\"\n",
    "df = pd.read_csv(Housing_url,header=0)\n",
    "df.head()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 130,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>CRIM</th>\n",
       "      <th>ZN</th>\n",
       "      <th>INDUS</th>\n",
       "      <th>CHAS</th>\n",
       "      <th>NOX</th>\n",
       "      <th>RM</th>\n",
       "      <th>AGE</th>\n",
       "      <th>DIS</th>\n",
       "      <th>RAD</th>\n",
       "      <th>TAX</th>\n",
       "      <th>PIRATIO</th>\n",
       "      <th>B</th>\n",
       "      <th>LSTAT</th>\n",
       "      <th>MEDV</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>count</th>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>452.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>mean</th>\n",
       "      <td>1.269195</td>\n",
       "      <td>13.295257</td>\n",
       "      <td>9.205158</td>\n",
       "      <td>0.140765</td>\n",
       "      <td>1.101175</td>\n",
       "      <td>15.679800</td>\n",
       "      <td>58.744660</td>\n",
       "      <td>6.173308</td>\n",
       "      <td>78.063241</td>\n",
       "      <td>339.317787</td>\n",
       "      <td>42.614980</td>\n",
       "      <td>332.791107</td>\n",
       "      <td>11.537806</td>\n",
       "      <td>23.750442</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>std</th>\n",
       "      <td>2.399207</td>\n",
       "      <td>23.048697</td>\n",
       "      <td>7.169630</td>\n",
       "      <td>0.312765</td>\n",
       "      <td>1.646991</td>\n",
       "      <td>27.220206</td>\n",
       "      <td>33.104049</td>\n",
       "      <td>6.476435</td>\n",
       "      <td>203.542157</td>\n",
       "      <td>180.670077</td>\n",
       "      <td>87.585243</td>\n",
       "      <td>125.322456</td>\n",
       "      <td>6.064932</td>\n",
       "      <td>8.808602</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>min</th>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.385000</td>\n",
       "      <td>3.561000</td>\n",
       "      <td>1.137000</td>\n",
       "      <td>1.129600</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>20.200000</td>\n",
       "      <td>2.600000</td>\n",
       "      <td>0.320000</td>\n",
       "      <td>1.730000</td>\n",
       "      <td>6.300000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25%</th>\n",
       "      <td>0.049442</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>3.440000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.449000</td>\n",
       "      <td>5.961500</td>\n",
       "      <td>32.000000</td>\n",
       "      <td>2.430575</td>\n",
       "      <td>4.000000</td>\n",
       "      <td>254.000000</td>\n",
       "      <td>17.000000</td>\n",
       "      <td>364.995000</td>\n",
       "      <td>6.877500</td>\n",
       "      <td>18.500000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>50%</th>\n",
       "      <td>0.144655</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>6.960000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.538000</td>\n",
       "      <td>6.322500</td>\n",
       "      <td>65.250000</td>\n",
       "      <td>3.925850</td>\n",
       "      <td>5.000000</td>\n",
       "      <td>307.000000</td>\n",
       "      <td>18.900000</td>\n",
       "      <td>390.660000</td>\n",
       "      <td>10.380000</td>\n",
       "      <td>21.950000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>75%</th>\n",
       "      <td>0.819622</td>\n",
       "      <td>18.100000</td>\n",
       "      <td>18.100000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.647000</td>\n",
       "      <td>6.949000</td>\n",
       "      <td>89.975000</td>\n",
       "      <td>6.332075</td>\n",
       "      <td>24.000000</td>\n",
       "      <td>403.000000</td>\n",
       "      <td>20.200000</td>\n",
       "      <td>395.615000</td>\n",
       "      <td>15.015000</td>\n",
       "      <td>26.600000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>max</th>\n",
       "      <td>9.966540</td>\n",
       "      <td>100.000000</td>\n",
       "      <td>27.740000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>7.313000</td>\n",
       "      <td>100.000000</td>\n",
       "      <td>100.000000</td>\n",
       "      <td>24.000000</td>\n",
       "      <td>666.000000</td>\n",
       "      <td>711.000000</td>\n",
       "      <td>396.900000</td>\n",
       "      <td>396.900000</td>\n",
       "      <td>34.410000</td>\n",
       "      <td>50.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "             CRIM          ZN       INDUS        CHAS         NOX          RM  \\\n",
       "count  506.000000  506.000000  506.000000  506.000000  506.000000  506.000000   \n",
       "mean     1.269195   13.295257    9.205158    0.140765    1.101175   15.679800   \n",
       "std      2.399207   23.048697    7.169630    0.312765    1.646991   27.220206   \n",
       "min      0.000000    0.000000    0.000000    0.000000    0.385000    3.561000   \n",
       "25%      0.049442    0.000000    3.440000    0.000000    0.449000    5.961500   \n",
       "50%      0.144655    0.000000    6.960000    0.000000    0.538000    6.322500   \n",
       "75%      0.819622   18.100000   18.100000    0.000000    0.647000    6.949000   \n",
       "max      9.966540  100.000000   27.740000    1.000000    7.313000  100.000000   \n",
       "\n",
       "              AGE         DIS         RAD         TAX     PIRATIO           B  \\\n",
       "count  506.000000  506.000000  506.000000  506.000000  506.000000  506.000000   \n",
       "mean    58.744660    6.173308   78.063241  339.317787   42.614980  332.791107   \n",
       "std     33.104049    6.476435  203.542157  180.670077   87.585243  125.322456   \n",
       "min      1.137000    1.129600    1.000000   20.200000    2.600000    0.320000   \n",
       "25%     32.000000    2.430575    4.000000  254.000000   17.000000  364.995000   \n",
       "50%     65.250000    3.925850    5.000000  307.000000   18.900000  390.660000   \n",
       "75%     89.975000    6.332075   24.000000  403.000000   20.200000  395.615000   \n",
       "max    100.000000   24.000000  666.000000  711.000000  396.900000  396.900000   \n",
       "\n",
       "            LSTAT        MEDV  \n",
       "count  506.000000  452.000000  \n",
       "mean    11.537806   23.750442  \n",
       "std      6.064932    8.808602  \n",
       "min      1.730000    6.300000  \n",
       "25%      6.877500   18.500000  \n",
       "50%     10.380000   21.950000  \n",
       "75%     15.015000   26.600000  \n",
       "max     34.410000   50.000000  "
      ]
     },
     "execution_count": 130,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "df.describe()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 131,
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.impute import SimpleImputer\n",
    "imputer = SimpleImputer(strategy=\"median\")\n",
    "temp = imputer.fit_transform(df)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 132,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>0</th>\n",
       "      <th>1</th>\n",
       "      <th>2</th>\n",
       "      <th>3</th>\n",
       "      <th>4</th>\n",
       "      <th>5</th>\n",
       "      <th>6</th>\n",
       "      <th>7</th>\n",
       "      <th>8</th>\n",
       "      <th>9</th>\n",
       "      <th>10</th>\n",
       "      <th>11</th>\n",
       "      <th>12</th>\n",
       "      <th>13</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>count</th>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "      <td>506.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>mean</th>\n",
       "      <td>1.269195</td>\n",
       "      <td>13.295257</td>\n",
       "      <td>9.205158</td>\n",
       "      <td>0.140765</td>\n",
       "      <td>1.101175</td>\n",
       "      <td>15.679800</td>\n",
       "      <td>58.744660</td>\n",
       "      <td>6.173308</td>\n",
       "      <td>78.063241</td>\n",
       "      <td>339.317787</td>\n",
       "      <td>42.614980</td>\n",
       "      <td>332.791107</td>\n",
       "      <td>11.537806</td>\n",
       "      <td>23.558300</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>std</th>\n",
       "      <td>2.399207</td>\n",
       "      <td>23.048697</td>\n",
       "      <td>7.169630</td>\n",
       "      <td>0.312765</td>\n",
       "      <td>1.646991</td>\n",
       "      <td>27.220206</td>\n",
       "      <td>33.104049</td>\n",
       "      <td>6.476435</td>\n",
       "      <td>203.542157</td>\n",
       "      <td>180.670077</td>\n",
       "      <td>87.585243</td>\n",
       "      <td>125.322456</td>\n",
       "      <td>6.064932</td>\n",
       "      <td>8.342912</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>min</th>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.385000</td>\n",
       "      <td>3.561000</td>\n",
       "      <td>1.137000</td>\n",
       "      <td>1.129600</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>20.200000</td>\n",
       "      <td>2.600000</td>\n",
       "      <td>0.320000</td>\n",
       "      <td>1.730000</td>\n",
       "      <td>6.300000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25%</th>\n",
       "      <td>0.049442</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>3.440000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.449000</td>\n",
       "      <td>5.961500</td>\n",
       "      <td>32.000000</td>\n",
       "      <td>2.430575</td>\n",
       "      <td>4.000000</td>\n",
       "      <td>254.000000</td>\n",
       "      <td>17.000000</td>\n",
       "      <td>364.995000</td>\n",
       "      <td>6.877500</td>\n",
       "      <td>19.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>50%</th>\n",
       "      <td>0.144655</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>6.960000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.538000</td>\n",
       "      <td>6.322500</td>\n",
       "      <td>65.250000</td>\n",
       "      <td>3.925850</td>\n",
       "      <td>5.000000</td>\n",
       "      <td>307.000000</td>\n",
       "      <td>18.900000</td>\n",
       "      <td>390.660000</td>\n",
       "      <td>10.380000</td>\n",
       "      <td>21.950000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>75%</th>\n",
       "      <td>0.819622</td>\n",
       "      <td>18.100000</td>\n",
       "      <td>18.100000</td>\n",
       "      <td>0.000000</td>\n",
       "      <td>0.647000</td>\n",
       "      <td>6.949000</td>\n",
       "      <td>89.975000</td>\n",
       "      <td>6.332075</td>\n",
       "      <td>24.000000</td>\n",
       "      <td>403.000000</td>\n",
       "      <td>20.200000</td>\n",
       "      <td>395.615000</td>\n",
       "      <td>15.015000</td>\n",
       "      <td>25.000000</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>max</th>\n",
       "      <td>9.966540</td>\n",
       "      <td>100.000000</td>\n",
       "      <td>27.740000</td>\n",
       "      <td>1.000000</td>\n",
       "      <td>7.313000</td>\n",
       "      <td>100.000000</td>\n",
       "      <td>100.000000</td>\n",
       "      <td>24.000000</td>\n",
       "      <td>666.000000</td>\n",
       "      <td>711.000000</td>\n",
       "      <td>396.900000</td>\n",
       "      <td>396.900000</td>\n",
       "      <td>34.410000</td>\n",
       "      <td>50.000000</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "               0           1           2           3           4           5   \\\n",
       "count  506.000000  506.000000  506.000000  506.000000  506.000000  506.000000   \n",
       "mean     1.269195   13.295257    9.205158    0.140765    1.101175   15.679800   \n",
       "std      2.399207   23.048697    7.169630    0.312765    1.646991   27.220206   \n",
       "min      0.000000    0.000000    0.000000    0.000000    0.385000    3.561000   \n",
       "25%      0.049442    0.000000    3.440000    0.000000    0.449000    5.961500   \n",
       "50%      0.144655    0.000000    6.960000    0.000000    0.538000    6.322500   \n",
       "75%      0.819622   18.100000   18.100000    0.000000    0.647000    6.949000   \n",
       "max      9.966540  100.000000   27.740000    1.000000    7.313000  100.000000   \n",
       "\n",
       "               6           7           8           9           10          11  \\\n",
       "count  506.000000  506.000000  506.000000  506.000000  506.000000  506.000000   \n",
       "mean    58.744660    6.173308   78.063241  339.317787   42.614980  332.791107   \n",
       "std     33.104049    6.476435  203.542157  180.670077   87.585243  125.322456   \n",
       "min      1.137000    1.129600    1.000000   20.200000    2.600000    0.320000   \n",
       "25%     32.000000    2.430575    4.000000  254.000000   17.000000  364.995000   \n",
       "50%     65.250000    3.925850    5.000000  307.000000   18.900000  390.660000   \n",
       "75%     89.975000    6.332075   24.000000  403.000000   20.200000  395.615000   \n",
       "max    100.000000   24.000000  666.000000  711.000000  396.900000  396.900000   \n",
       "\n",
       "               12          13  \n",
       "count  506.000000  506.000000  \n",
       "mean    11.537806   23.558300  \n",
       "std      6.064932    8.342912  \n",
       "min      1.730000    6.300000  \n",
       "25%      6.877500   19.000000  \n",
       "50%     10.380000   21.950000  \n",
       "75%     15.015000   25.000000  \n",
       "max     34.410000   50.000000  "
      ]
     },
     "execution_count": 132,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "temp2 = pd.DataFrame(temp)\n",
    "temp2.describe()\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 133,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(506, 14)"
      ]
     },
     "execution_count": 133,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "ds = temp2.values\n",
    "ds.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 134,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([[6.3200e-03, 1.8000e+01, 2.3100e+00, ..., 3.9690e+02, 4.9800e+00,\n",
       "        2.4000e+01],\n",
       "       [2.7310e-02, 0.0000e+00, 7.0700e+00, ..., 3.9690e+02, 9.1400e+00,\n",
       "        2.1600e+01],\n",
       "       [2.7290e-02, 0.0000e+00, 7.0700e+00, ..., 3.9283e+02, 4.0300e+00,\n",
       "        3.4700e+01],\n",
       "       ...,\n",
       "       [6.0760e-02, 0.0000e+00, 1.1930e+01, ..., 3.9690e+02, 5.6400e+00,\n",
       "        2.3900e+01],\n",
       "       [1.0959e-01, 0.0000e+00, 1.1930e+01, ..., 3.9345e+02, 6.4800e+00,\n",
       "        2.2000e+01],\n",
       "       [4.7410e-02, 0.0000e+00, 1.1930e+01, ..., 3.9690e+02, 7.8800e+00,\n",
       "        1.1900e+01]])"
      ]
     },
     "execution_count": 134,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "ds"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 135,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[[6.3200e-03 1.8000e+01 2.3100e+00 ... 3.9690e+02 4.9800e+00 2.4000e+01]\n",
      " [2.7310e-02 0.0000e+00 7.0700e+00 ... 3.9690e+02 9.1400e+00 2.1600e+01]\n",
      " [2.7290e-02 0.0000e+00 7.0700e+00 ... 3.9283e+02 4.0300e+00 3.4700e+01]\n",
      " ...\n",
      " [6.0760e-02 0.0000e+00 1.1930e+01 ... 3.9690e+02 5.6400e+00 2.3900e+01]\n",
      " [1.0959e-01 0.0000e+00 1.1930e+01 ... 3.9345e+02 6.4800e+00 2.2000e+01]\n",
      " [4.7410e-02 0.0000e+00 1.1930e+01 ... 3.9690e+02 7.8800e+00 1.1900e+01]]\n"
     ]
    }
   ],
   "source": [
    "print(ds)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 136,
   "metadata": {},
   "outputs": [],
   "source": [
    "x_data = ds[:,:13]\n",
    "y_data = ds[:,13]\n",
    "train_num = 300\n",
    "valid_num = 100\n",
    "test_num = len(x_data) - train_num - valid_num\n",
    "\n",
    "x_train = x_data[:train_num]\n",
    "x_valid = x_data[train_num:train_num+valid_num]\n",
    "x_test = x_data[train_num+valid_num:train_num+valid_num+test_num]\n",
    "\n",
    "y_train = y_data[:train_num]\n",
    "y_valid = y_data[train_num:train_num+valid_num]\n",
    "y_test = y_data[train_num+valid_num:train_num+valid_num+test_num]\n",
    "\n",
    "x_train = tf.cast(scale(x_train),dtype=tf.float32)\n",
    "x_valid = tf.cast(scale(x_valid),dtype=tf.float32)\n",
    "x_test = tf.cast(scale(x_test),dtype=tf.float32)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 137,
   "metadata": {},
   "outputs": [],
   "source": [
    "def model(w,x,b):\n",
    "    return tf.matmul(x,w)+b\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 138,
   "metadata": {},
   "outputs": [],
   "source": [
    "W = tf.Variable(tf.random.normal([13,1],mean=0.0,stddev=1.0,dtype=tf.float32))\n",
    "B = tf.Variable(tf.zeros(1),dtype=tf.float32)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 139,
   "metadata": {},
   "outputs": [],
   "source": [
    "def loss(w,x,b,y):\n",
    "    err = model(w,x,b) - y\n",
    "    squared_err = tf.square(err)\n",
    "    return tf.reduce_mean(squared_err)\n",
    "\n",
    "def grad(w,x,b,y):\n",
    "    with tf.GradientTape() as tape:\n",
    "        loss_ = loss(w,x,b,y)\n",
    "    return tape.gradient(loss_,[w,b])    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 140,
   "metadata": {},
   "outputs": [],
   "source": [
    "training_epochs = 500\n",
    "learning_rate = 0.001\n",
    "batch_size = 10\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 141,
   "metadata": {},
   "outputs": [],
   "source": [
    "optimizer = tf.keras.optimizers.SGD(learning_rate)\n",
    "\n",
    "loss_list_train = []\n",
    "loss_list_valid = []\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 142,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 1 train loss: 664.7934 valid loss: 518.39166\n",
      "epoch: 2 train loss: 600.0547 valid loss: 460.1923\n",
      "epoch: 3 train loss: 542.8513 valid loss: 409.21457\n",
      "epoch: 4 train loss: 492.2506 valid loss: 364.54266\n",
      "epoch: 5 train loss: 447.46533 valid loss: 325.397\n",
      "epoch: 6 train loss: 407.81693 valid loss: 291.10483\n",
      "epoch: 7 train loss: 372.713 valid loss: 261.0809\n",
      "epoch: 8 train loss: 341.63272 valid loss: 234.81262\n",
      "epoch: 9 train loss: 314.11578 valid loss: 211.84966\n",
      "epoch: 10 train loss: 289.75467 valid loss: 191.79567\n",
      "epoch: 11 train loss: 268.18857 valid loss: 174.3012\n",
      "epoch: 12 train loss: 249.09747 valid loss: 159.05812\n",
      "epoch: 13 train loss: 232.1976 valid loss: 145.79448\n",
      "epoch: 14 train loss: 217.23778 valid loss: 134.27042\n",
      "epoch: 15 train loss: 203.99538 valid loss: 124.27434\n",
      "epoch: 16 train loss: 192.27316 valid loss: 115.619576\n",
      "epoch: 17 train loss: 181.8966 valid loss: 108.14155\n",
      "epoch: 18 train loss: 172.71103 valid loss: 101.69514\n",
      "epoch: 19 train loss: 164.57962 valid loss: 96.15243\n",
      "epoch: 20 train loss: 157.38147 valid loss: 91.400894\n",
      "epoch: 21 train loss: 151.0091 valid loss: 87.3411\n",
      "epoch: 22 train loss: 145.36775 valid loss: 83.885826\n",
      "epoch: 23 train loss: 140.37358 valid loss: 80.95822\n",
      "epoch: 24 train loss: 135.95222 valid loss: 78.49065\n",
      "epoch: 25 train loss: 132.03793 valid loss: 76.42368\n",
      "epoch: 26 train loss: 128.57262 valid loss: 74.70515\n",
      "epoch: 27 train loss: 125.504776 valid loss: 73.289185\n",
      "epoch: 28 train loss: 122.78881 valid loss: 72.13554\n",
      "epoch: 29 train loss: 120.38435 valid loss: 71.208885\n",
      "epoch: 30 train loss: 118.25589 valid loss: 70.47836\n",
      "epoch: 31 train loss: 116.371735 valid loss: 69.916885\n",
      "epoch: 32 train loss: 114.70391 valid loss: 69.50069\n",
      "epoch: 33 train loss: 113.22773 valid loss: 69.209114\n",
      "epoch: 34 train loss: 111.9212 valid loss: 69.023994\n",
      "epoch: 35 train loss: 110.76498 valid loss: 68.9296\n",
      "epoch: 36 train loss: 109.74189 valid loss: 68.912125\n",
      "epoch: 37 train loss: 108.836685 valid loss: 68.95961\n",
      "epoch: 38 train loss: 108.03598 valid loss: 69.06166\n",
      "epoch: 39 train loss: 107.3278 valid loss: 69.20923\n",
      "epoch: 40 train loss: 106.7016 valid loss: 69.39448\n",
      "epoch: 41 train loss: 106.14806 valid loss: 69.61065\n",
      "epoch: 42 train loss: 105.65887 valid loss: 69.85191\n",
      "epoch: 43 train loss: 105.22667 valid loss: 70.11323\n",
      "epoch: 44 train loss: 104.845024 valid loss: 70.390274\n",
      "epoch: 45 train loss: 104.50818 valid loss: 70.67934\n",
      "epoch: 46 train loss: 104.21099 valid loss: 70.97731\n",
      "epoch: 47 train loss: 103.949 valid loss: 71.28145\n",
      "epoch: 48 train loss: 103.7182 valid loss: 71.589516\n",
      "epoch: 49 train loss: 103.515 valid loss: 71.89963\n",
      "epoch: 50 train loss: 103.33629 valid loss: 72.21016\n",
      "epoch: 51 train loss: 103.17931 valid loss: 72.51975\n",
      "epoch: 52 train loss: 103.0416 valid loss: 72.82734\n",
      "epoch: 53 train loss: 102.92094 valid loss: 73.13203\n",
      "epoch: 54 train loss: 102.81542 valid loss: 73.43306\n",
      "epoch: 55 train loss: 102.72332 valid loss: 73.72991\n",
      "epoch: 56 train loss: 102.64312 valid loss: 74.02211\n",
      "epoch: 57 train loss: 102.57348 valid loss: 74.309326\n",
      "epoch: 58 train loss: 102.513176 valid loss: 74.591324\n",
      "epoch: 59 train loss: 102.4612 valid loss: 74.86793\n",
      "epoch: 60 train loss: 102.41658 valid loss: 75.139084\n",
      "epoch: 61 train loss: 102.37849 valid loss: 75.40466\n",
      "epoch: 62 train loss: 102.34617 valid loss: 75.66476\n",
      "epoch: 63 train loss: 102.31902 valid loss: 75.91939\n",
      "epoch: 64 train loss: 102.29641 valid loss: 76.16865\n",
      "epoch: 65 train loss: 102.277855 valid loss: 76.41259\n",
      "epoch: 66 train loss: 102.262886 valid loss: 76.6514\n",
      "epoch: 67 train loss: 102.25109 valid loss: 76.88518\n",
      "epoch: 68 train loss: 102.2421 valid loss: 77.1141\n",
      "epoch: 69 train loss: 102.23562 valid loss: 77.33834\n",
      "epoch: 70 train loss: 102.23137 valid loss: 77.55803\n",
      "epoch: 71 train loss: 102.229065 valid loss: 77.773384\n",
      "epoch: 72 train loss: 102.22853 valid loss: 77.98453\n",
      "epoch: 73 train loss: 102.22953 valid loss: 78.19169\n",
      "epoch: 74 train loss: 102.23189 valid loss: 78.394966\n",
      "epoch: 75 train loss: 102.235466 valid loss: 78.59461\n",
      "epoch: 76 train loss: 102.24011 valid loss: 78.79076\n",
      "epoch: 77 train loss: 102.24572 valid loss: 78.983536\n",
      "epoch: 78 train loss: 102.252144 valid loss: 79.17317\n",
      "epoch: 79 train loss: 102.25932 valid loss: 79.35978\n",
      "epoch: 80 train loss: 102.26714 valid loss: 79.543465\n",
      "epoch: 81 train loss: 102.27556 valid loss: 79.72447\n",
      "epoch: 82 train loss: 102.28447 valid loss: 79.90284\n",
      "epoch: 83 train loss: 102.29382 valid loss: 80.07879\n",
      "epoch: 84 train loss: 102.303566 valid loss: 80.25236\n",
      "epoch: 85 train loss: 102.31367 valid loss: 80.42374\n",
      "epoch: 86 train loss: 102.32404 valid loss: 80.59299\n",
      "epoch: 87 train loss: 102.33471 valid loss: 80.76026\n",
      "epoch: 88 train loss: 102.34558 valid loss: 80.92565\n",
      "epoch: 89 train loss: 102.35664 valid loss: 81.089226\n",
      "epoch: 90 train loss: 102.36789 valid loss: 81.25109\n",
      "epoch: 91 train loss: 102.379265 valid loss: 81.41131\n",
      "epoch: 92 train loss: 102.39078 valid loss: 81.57002\n",
      "epoch: 93 train loss: 102.40237 valid loss: 81.727325\n",
      "epoch: 94 train loss: 102.41407 valid loss: 81.88321\n",
      "epoch: 95 train loss: 102.42582 valid loss: 82.0378\n",
      "epoch: 96 train loss: 102.43763 valid loss: 82.19114\n",
      "epoch: 97 train loss: 102.449486 valid loss: 82.34324\n",
      "epoch: 98 train loss: 102.46136 valid loss: 82.49422\n",
      "epoch: 99 train loss: 102.47326 valid loss: 82.644104\n",
      "epoch: 100 train loss: 102.485176 valid loss: 82.79295\n",
      "epoch: 101 train loss: 102.4971 valid loss: 82.94084\n",
      "epoch: 102 train loss: 102.50901 valid loss: 83.08782\n",
      "epoch: 103 train loss: 102.52091 valid loss: 83.233864\n",
      "epoch: 104 train loss: 102.5328 valid loss: 83.37903\n",
      "epoch: 105 train loss: 102.54467 valid loss: 83.52336\n",
      "epoch: 106 train loss: 102.55651 valid loss: 83.66687\n",
      "epoch: 107 train loss: 102.56834 valid loss: 83.80962\n",
      "epoch: 108 train loss: 102.58011 valid loss: 83.95162\n",
      "epoch: 109 train loss: 102.59184 valid loss: 84.09293\n",
      "epoch: 110 train loss: 102.603546 valid loss: 84.23353\n",
      "epoch: 111 train loss: 102.61521 valid loss: 84.373436\n",
      "epoch: 112 train loss: 102.62682 valid loss: 84.51269\n",
      "epoch: 113 train loss: 102.63839 valid loss: 84.65132\n",
      "epoch: 114 train loss: 102.64989 valid loss: 84.789314\n",
      "epoch: 115 train loss: 102.661354 valid loss: 84.926735\n",
      "epoch: 116 train loss: 102.672775 valid loss: 85.06356\n",
      "epoch: 117 train loss: 102.684135 valid loss: 85.199875\n",
      "epoch: 118 train loss: 102.69544 valid loss: 85.33554\n",
      "epoch: 119 train loss: 102.70669 valid loss: 85.47069\n",
      "epoch: 120 train loss: 102.71788 valid loss: 85.60523\n",
      "epoch: 121 train loss: 102.729 valid loss: 85.73928\n",
      "epoch: 122 train loss: 102.74007 valid loss: 85.87284\n",
      "epoch: 123 train loss: 102.75109 valid loss: 86.00586\n",
      "epoch: 124 train loss: 102.76205 valid loss: 86.13831\n",
      "epoch: 125 train loss: 102.772934 valid loss: 86.2703\n",
      "epoch: 126 train loss: 102.78375 valid loss: 86.401825\n",
      "epoch: 127 train loss: 102.794525 valid loss: 86.53282\n",
      "epoch: 128 train loss: 102.805244 valid loss: 86.663315\n",
      "epoch: 129 train loss: 102.81588 valid loss: 86.79338\n",
      "epoch: 130 train loss: 102.82645 valid loss: 86.92292\n",
      "epoch: 131 train loss: 102.83697 valid loss: 87.052\n",
      "epoch: 132 train loss: 102.84742 valid loss: 87.180565\n",
      "epoch: 133 train loss: 102.85782 valid loss: 87.30866\n",
      "epoch: 134 train loss: 102.86814 valid loss: 87.43628\n",
      "epoch: 135 train loss: 102.878426 valid loss: 87.56341\n",
      "epoch: 136 train loss: 102.88861 valid loss: 87.69008\n",
      "epoch: 137 train loss: 102.898766 valid loss: 87.816315\n",
      "epoch: 138 train loss: 102.908844 valid loss: 87.942\n",
      "epoch: 139 train loss: 102.91887 valid loss: 88.06725\n",
      "epoch: 140 train loss: 102.928825 valid loss: 88.19201\n",
      "epoch: 141 train loss: 102.93871 valid loss: 88.31632\n",
      "epoch: 142 train loss: 102.94853 valid loss: 88.440186\n",
      "epoch: 143 train loss: 102.95831 valid loss: 88.56354\n",
      "epoch: 144 train loss: 102.968025 valid loss: 88.68646\n",
      "epoch: 145 train loss: 102.977646 valid loss: 88.80888\n",
      "epoch: 146 train loss: 102.987236 valid loss: 88.93081\n",
      "epoch: 147 train loss: 102.99676 valid loss: 89.05228\n",
      "epoch: 148 train loss: 103.00621 valid loss: 89.17331\n",
      "epoch: 149 train loss: 103.015625 valid loss: 89.29386\n",
      "epoch: 150 train loss: 103.024956 valid loss: 89.4139\n",
      "epoch: 151 train loss: 103.03423 valid loss: 89.5335\n",
      "epoch: 152 train loss: 103.04344 valid loss: 89.65265\n",
      "epoch: 153 train loss: 103.0526 valid loss: 89.77131\n",
      "epoch: 154 train loss: 103.06169 valid loss: 89.88949\n",
      "epoch: 155 train loss: 103.07073 valid loss: 90.007225\n",
      "epoch: 156 train loss: 103.07971 valid loss: 90.124435\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 157 train loss: 103.08863 valid loss: 90.241135\n",
      "epoch: 158 train loss: 103.09749 valid loss: 90.35738\n",
      "epoch: 159 train loss: 103.106285 valid loss: 90.47319\n",
      "epoch: 160 train loss: 103.115036 valid loss: 90.58848\n",
      "epoch: 161 train loss: 103.123726 valid loss: 90.70331\n",
      "epoch: 162 train loss: 103.132355 valid loss: 90.817635\n",
      "epoch: 163 train loss: 103.14093 valid loss: 90.9315\n",
      "epoch: 164 train loss: 103.14945 valid loss: 91.04482\n",
      "epoch: 165 train loss: 103.15791 valid loss: 91.157715\n",
      "epoch: 166 train loss: 103.16632 valid loss: 91.2701\n",
      "epoch: 167 train loss: 103.17467 valid loss: 91.38199\n",
      "epoch: 168 train loss: 103.18295 valid loss: 91.49343\n",
      "epoch: 169 train loss: 103.1912 valid loss: 91.60439\n",
      "epoch: 170 train loss: 103.19938 valid loss: 91.71485\n",
      "epoch: 171 train loss: 103.2075 valid loss: 91.8248\n",
      "epoch: 172 train loss: 103.215576 valid loss: 91.93427\n",
      "epoch: 173 train loss: 103.2236 valid loss: 92.043274\n",
      "epoch: 174 train loss: 103.23157 valid loss: 92.1518\n",
      "epoch: 175 train loss: 103.23946 valid loss: 92.25982\n",
      "epoch: 176 train loss: 103.24732 valid loss: 92.36738\n",
      "epoch: 177 train loss: 103.255135 valid loss: 92.47445\n",
      "epoch: 178 train loss: 103.262886 valid loss: 92.58104\n",
      "epoch: 179 train loss: 103.2706 valid loss: 92.6871\n",
      "epoch: 180 train loss: 103.278244 valid loss: 92.79268\n",
      "epoch: 181 train loss: 103.285866 valid loss: 92.897766\n",
      "epoch: 182 train loss: 103.29341 valid loss: 93.00239\n",
      "epoch: 183 train loss: 103.30091 valid loss: 93.10652\n",
      "epoch: 184 train loss: 103.30834 valid loss: 93.21016\n",
      "epoch: 185 train loss: 103.315735 valid loss: 93.31326\n",
      "epoch: 186 train loss: 103.32309 valid loss: 93.415855\n",
      "epoch: 187 train loss: 103.3304 valid loss: 93.518036\n",
      "epoch: 188 train loss: 103.33763 valid loss: 93.619736\n",
      "epoch: 189 train loss: 103.34484 valid loss: 93.72089\n",
      "epoch: 190 train loss: 103.351974 valid loss: 93.82162\n",
      "epoch: 191 train loss: 103.359085 valid loss: 93.92184\n",
      "epoch: 192 train loss: 103.36616 valid loss: 94.02155\n",
      "epoch: 193 train loss: 103.37317 valid loss: 94.1208\n",
      "epoch: 194 train loss: 103.38012 valid loss: 94.21951\n",
      "epoch: 195 train loss: 103.38705 valid loss: 94.31779\n",
      "epoch: 196 train loss: 103.39391 valid loss: 94.41559\n",
      "epoch: 197 train loss: 103.400734 valid loss: 94.51293\n",
      "epoch: 198 train loss: 103.4075 valid loss: 94.609764\n",
      "epoch: 199 train loss: 103.41422 valid loss: 94.7061\n",
      "epoch: 200 train loss: 103.42091 valid loss: 94.80197\n",
      "epoch: 201 train loss: 103.42756 valid loss: 94.89738\n",
      "epoch: 202 train loss: 103.434135 valid loss: 94.992325\n",
      "epoch: 203 train loss: 103.44069 valid loss: 95.08676\n",
      "epoch: 204 train loss: 103.4472 valid loss: 95.18071\n",
      "epoch: 205 train loss: 103.45367 valid loss: 95.27418\n",
      "epoch: 206 train loss: 103.46007 valid loss: 95.3672\n",
      "epoch: 207 train loss: 103.466446 valid loss: 95.4597\n",
      "epoch: 208 train loss: 103.47278 valid loss: 95.551765\n",
      "epoch: 209 train loss: 103.47906 valid loss: 95.6433\n",
      "epoch: 210 train loss: 103.48531 valid loss: 95.73441\n",
      "epoch: 211 train loss: 103.49151 valid loss: 95.82508\n",
      "epoch: 212 train loss: 103.497665 valid loss: 95.91519\n",
      "epoch: 213 train loss: 103.5038 valid loss: 96.00489\n",
      "epoch: 214 train loss: 103.509865 valid loss: 96.09415\n",
      "epoch: 215 train loss: 103.515915 valid loss: 96.182884\n",
      "epoch: 216 train loss: 103.52191 valid loss: 96.27119\n",
      "epoch: 217 train loss: 103.52787 valid loss: 96.35902\n",
      "epoch: 218 train loss: 103.533775 valid loss: 96.446365\n",
      "epoch: 219 train loss: 103.539665 valid loss: 96.53329\n",
      "epoch: 220 train loss: 103.54549 valid loss: 96.61969\n",
      "epoch: 221 train loss: 103.55129 valid loss: 96.70567\n",
      "epoch: 222 train loss: 103.557045 valid loss: 96.791214\n",
      "epoch: 223 train loss: 103.56277 valid loss: 96.87626\n",
      "epoch: 224 train loss: 103.56844 valid loss: 96.96089\n",
      "epoch: 225 train loss: 103.57409 valid loss: 97.04505\n",
      "epoch: 226 train loss: 103.5797 valid loss: 97.12874\n",
      "epoch: 227 train loss: 103.58526 valid loss: 97.21199\n",
      "epoch: 228 train loss: 103.59079 valid loss: 97.29478\n",
      "epoch: 229 train loss: 103.596275 valid loss: 97.3771\n",
      "epoch: 230 train loss: 103.60173 valid loss: 97.458984\n",
      "epoch: 231 train loss: 103.60715 valid loss: 97.54041\n",
      "epoch: 232 train loss: 103.612526 valid loss: 97.62142\n",
      "epoch: 233 train loss: 103.61787 valid loss: 97.702\n",
      "epoch: 234 train loss: 103.62318 valid loss: 97.78208\n",
      "epoch: 235 train loss: 103.62844 valid loss: 97.8618\n",
      "epoch: 236 train loss: 103.63367 valid loss: 97.941025\n",
      "epoch: 237 train loss: 103.63888 valid loss: 98.019775\n",
      "epoch: 238 train loss: 103.64404 valid loss: 98.09817\n",
      "epoch: 239 train loss: 103.649155 valid loss: 98.176125\n",
      "epoch: 240 train loss: 103.65426 valid loss: 98.253624\n",
      "epoch: 241 train loss: 103.659325 valid loss: 98.3307\n",
      "epoch: 242 train loss: 103.66433 valid loss: 98.40737\n",
      "epoch: 243 train loss: 103.66932 valid loss: 98.48363\n",
      "epoch: 244 train loss: 103.674286 valid loss: 98.55943\n",
      "epoch: 245 train loss: 103.6792 valid loss: 98.6348\n",
      "epoch: 246 train loss: 103.6841 valid loss: 98.70975\n",
      "epoch: 247 train loss: 103.68896 valid loss: 98.78426\n",
      "epoch: 248 train loss: 103.69378 valid loss: 98.85835\n",
      "epoch: 249 train loss: 103.69858 valid loss: 98.93205\n",
      "epoch: 250 train loss: 103.70333 valid loss: 99.00534\n",
      "epoch: 251 train loss: 103.70805 valid loss: 99.0782\n",
      "epoch: 252 train loss: 103.71275 valid loss: 99.15061\n",
      "epoch: 253 train loss: 103.71742 valid loss: 99.22265\n",
      "epoch: 254 train loss: 103.722046 valid loss: 99.294266\n",
      "epoch: 255 train loss: 103.72663 valid loss: 99.36544\n",
      "epoch: 256 train loss: 103.7312 valid loss: 99.436264\n",
      "epoch: 257 train loss: 103.73573 valid loss: 99.50666\n",
      "epoch: 258 train loss: 103.740234 valid loss: 99.576645\n",
      "epoch: 259 train loss: 103.74471 valid loss: 99.64624\n",
      "epoch: 260 train loss: 103.74917 valid loss: 99.71544\n",
      "epoch: 261 train loss: 103.75358 valid loss: 99.7842\n",
      "epoch: 262 train loss: 103.75796 valid loss: 99.852615\n",
      "epoch: 263 train loss: 103.762314 valid loss: 99.920616\n",
      "epoch: 264 train loss: 103.76665 valid loss: 99.98823\n",
      "epoch: 265 train loss: 103.770935 valid loss: 100.05544\n",
      "epoch: 266 train loss: 103.7752 valid loss: 100.12232\n",
      "epoch: 267 train loss: 103.77944 valid loss: 100.18873\n",
      "epoch: 268 train loss: 103.783646 valid loss: 100.254776\n",
      "epoch: 269 train loss: 103.78782 valid loss: 100.320435\n",
      "epoch: 270 train loss: 103.79197 valid loss: 100.385735\n",
      "epoch: 271 train loss: 103.79609 valid loss: 100.4506\n",
      "epoch: 272 train loss: 103.80018 valid loss: 100.515076\n",
      "epoch: 273 train loss: 103.80427 valid loss: 100.57921\n",
      "epoch: 274 train loss: 103.8083 valid loss: 100.642944\n",
      "epoch: 275 train loss: 103.81231 valid loss: 100.70633\n",
      "epoch: 276 train loss: 103.81629 valid loss: 100.76934\n",
      "epoch: 277 train loss: 103.820244 valid loss: 100.831955\n",
      "epoch: 278 train loss: 103.82418 valid loss: 100.89426\n",
      "epoch: 279 train loss: 103.82809 valid loss: 100.95616\n",
      "epoch: 280 train loss: 103.831955 valid loss: 101.01765\n",
      "epoch: 281 train loss: 103.83581 valid loss: 101.07879\n",
      "epoch: 282 train loss: 103.83962 valid loss: 101.13961\n",
      "epoch: 283 train loss: 103.843445 valid loss: 101.200066\n",
      "epoch: 284 train loss: 103.84721 valid loss: 101.26014\n",
      "epoch: 285 train loss: 103.85095 valid loss: 101.31988\n",
      "epoch: 286 train loss: 103.85469 valid loss: 101.379234\n",
      "epoch: 287 train loss: 103.858376 valid loss: 101.43824\n",
      "epoch: 288 train loss: 103.86203 valid loss: 101.496925\n",
      "epoch: 289 train loss: 103.86569 valid loss: 101.55523\n",
      "epoch: 290 train loss: 103.86931 valid loss: 101.61316\n",
      "epoch: 291 train loss: 103.8729 valid loss: 101.6708\n",
      "epoch: 292 train loss: 103.876465 valid loss: 101.72805\n",
      "epoch: 293 train loss: 103.88002 valid loss: 101.784966\n",
      "epoch: 294 train loss: 103.88353 valid loss: 101.841515\n",
      "epoch: 295 train loss: 103.887024 valid loss: 101.89773\n",
      "epoch: 296 train loss: 103.890526 valid loss: 101.95363\n",
      "epoch: 297 train loss: 103.89396 valid loss: 102.00916\n",
      "epoch: 298 train loss: 103.89739 valid loss: 102.06437\n",
      "epoch: 299 train loss: 103.90078 valid loss: 102.119255\n",
      "epoch: 300 train loss: 103.904175 valid loss: 102.17379\n",
      "epoch: 301 train loss: 103.90753 valid loss: 102.22797\n",
      "epoch: 302 train loss: 103.910866 valid loss: 102.281876\n",
      "epoch: 303 train loss: 103.91418 valid loss: 102.335434\n",
      "epoch: 304 train loss: 103.917465 valid loss: 102.38866\n",
      "epoch: 305 train loss: 103.92073 valid loss: 102.441574\n",
      "epoch: 306 train loss: 103.923965 valid loss: 102.49414\n",
      "epoch: 307 train loss: 103.92719 valid loss: 102.546425\n",
      "epoch: 308 train loss: 103.930374 valid loss: 102.598366\n",
      "epoch: 309 train loss: 103.93356 valid loss: 102.64997\n",
      "epoch: 310 train loss: 103.93674 valid loss: 102.70128\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 311 train loss: 103.93986 valid loss: 102.752266\n",
      "epoch: 312 train loss: 103.94298 valid loss: 102.80298\n",
      "epoch: 313 train loss: 103.94607 valid loss: 102.85333\n",
      "epoch: 314 train loss: 103.949135 valid loss: 102.9034\n",
      "epoch: 315 train loss: 103.95218 valid loss: 102.95311\n",
      "epoch: 316 train loss: 103.95521 valid loss: 103.002556\n",
      "epoch: 317 train loss: 103.95822 valid loss: 103.0517\n",
      "epoch: 318 train loss: 103.96121 valid loss: 103.10052\n",
      "epoch: 319 train loss: 103.96418 valid loss: 103.14904\n",
      "epoch: 320 train loss: 103.96715 valid loss: 103.197266\n",
      "epoch: 321 train loss: 103.97007 valid loss: 103.24518\n",
      "epoch: 322 train loss: 103.97295 valid loss: 103.2928\n",
      "epoch: 323 train loss: 103.975845 valid loss: 103.340126\n",
      "epoch: 324 train loss: 103.978714 valid loss: 103.38712\n",
      "epoch: 325 train loss: 103.98157 valid loss: 103.43389\n",
      "epoch: 326 train loss: 103.9844 valid loss: 103.480385\n",
      "epoch: 327 train loss: 103.98719 valid loss: 103.52656\n",
      "epoch: 328 train loss: 103.99 valid loss: 103.572426\n",
      "epoch: 329 train loss: 103.99277 valid loss: 103.61803\n",
      "epoch: 330 train loss: 103.995514 valid loss: 103.663315\n",
      "epoch: 331 train loss: 103.998245 valid loss: 103.70835\n",
      "epoch: 332 train loss: 104.00095 valid loss: 103.753075\n",
      "epoch: 333 train loss: 104.00365 valid loss: 103.79752\n",
      "epoch: 334 train loss: 104.006325 valid loss: 103.84172\n",
      "epoch: 335 train loss: 104.00898 valid loss: 103.885605\n",
      "epoch: 336 train loss: 104.01161 valid loss: 103.92925\n",
      "epoch: 337 train loss: 104.01424 valid loss: 103.97257\n",
      "epoch: 338 train loss: 104.01682 valid loss: 104.0157\n",
      "epoch: 339 train loss: 104.019424 valid loss: 104.05854\n",
      "epoch: 340 train loss: 104.02199 valid loss: 104.1011\n",
      "epoch: 341 train loss: 104.02454 valid loss: 104.14339\n",
      "epoch: 342 train loss: 104.02708 valid loss: 104.1854\n",
      "epoch: 343 train loss: 104.02959 valid loss: 104.227135\n",
      "epoch: 344 train loss: 104.032074 valid loss: 104.268585\n",
      "epoch: 345 train loss: 104.03455 valid loss: 104.30982\n",
      "epoch: 346 train loss: 104.037025 valid loss: 104.35074\n",
      "epoch: 347 train loss: 104.03946 valid loss: 104.39143\n",
      "epoch: 348 train loss: 104.041885 valid loss: 104.431885\n",
      "epoch: 349 train loss: 104.04429 valid loss: 104.472084\n",
      "epoch: 350 train loss: 104.0467 valid loss: 104.511955\n",
      "epoch: 351 train loss: 104.04908 valid loss: 104.55162\n",
      "epoch: 352 train loss: 104.051445 valid loss: 104.59101\n",
      "epoch: 353 train loss: 104.05379 valid loss: 104.630165\n",
      "epoch: 354 train loss: 104.056114 valid loss: 104.669075\n",
      "epoch: 355 train loss: 104.05843 valid loss: 104.70775\n",
      "epoch: 356 train loss: 104.06072 valid loss: 104.74618\n",
      "epoch: 357 train loss: 104.06301 valid loss: 104.784355\n",
      "epoch: 358 train loss: 104.06527 valid loss: 104.8223\n",
      "epoch: 359 train loss: 104.06751 valid loss: 104.86002\n",
      "epoch: 360 train loss: 104.069756 valid loss: 104.89746\n",
      "epoch: 361 train loss: 104.071976 valid loss: 104.93465\n",
      "epoch: 362 train loss: 104.07418 valid loss: 104.97159\n",
      "epoch: 363 train loss: 104.07638 valid loss: 105.00832\n",
      "epoch: 364 train loss: 104.07854 valid loss: 105.04478\n",
      "epoch: 365 train loss: 104.08069 valid loss: 105.08105\n",
      "epoch: 366 train loss: 104.08285 valid loss: 105.11709\n",
      "epoch: 367 train loss: 104.08497 valid loss: 105.152916\n",
      "epoch: 368 train loss: 104.08709 valid loss: 105.1885\n",
      "epoch: 369 train loss: 104.08918 valid loss: 105.22382\n",
      "epoch: 370 train loss: 104.09127 valid loss: 105.25893\n",
      "epoch: 371 train loss: 104.09333 valid loss: 105.293816\n",
      "epoch: 372 train loss: 104.09539 valid loss: 105.32845\n",
      "epoch: 373 train loss: 104.09742 valid loss: 105.36292\n",
      "epoch: 374 train loss: 104.099464 valid loss: 105.397125\n",
      "epoch: 375 train loss: 104.10146 valid loss: 105.431114\n",
      "epoch: 376 train loss: 104.10348 valid loss: 105.4649\n",
      "epoch: 377 train loss: 104.10545 valid loss: 105.49843\n",
      "epoch: 378 train loss: 104.10742 valid loss: 105.53175\n",
      "epoch: 379 train loss: 104.10939 valid loss: 105.56491\n",
      "epoch: 380 train loss: 104.111336 valid loss: 105.59776\n",
      "epoch: 381 train loss: 104.113266 valid loss: 105.63048\n",
      "epoch: 382 train loss: 104.1152 valid loss: 105.66295\n",
      "epoch: 383 train loss: 104.11709 valid loss: 105.69523\n",
      "epoch: 384 train loss: 104.11899 valid loss: 105.7273\n",
      "epoch: 385 train loss: 104.120865 valid loss: 105.75914\n",
      "epoch: 386 train loss: 104.122734 valid loss: 105.790764\n",
      "epoch: 387 train loss: 104.12459 valid loss: 105.822174\n",
      "epoch: 388 train loss: 104.12642 valid loss: 105.85344\n",
      "epoch: 389 train loss: 104.12824 valid loss: 105.88444\n",
      "epoch: 390 train loss: 104.130066 valid loss: 105.91526\n",
      "epoch: 391 train loss: 104.13187 valid loss: 105.945885\n",
      "epoch: 392 train loss: 104.133644 valid loss: 105.9763\n",
      "epoch: 393 train loss: 104.13542 valid loss: 106.006516\n",
      "epoch: 394 train loss: 104.13719 valid loss: 106.036514\n",
      "epoch: 395 train loss: 104.13895 valid loss: 106.066284\n",
      "epoch: 396 train loss: 104.140686 valid loss: 106.095924\n",
      "epoch: 397 train loss: 104.1424 valid loss: 106.125336\n",
      "epoch: 398 train loss: 104.14412 valid loss: 106.154625\n",
      "epoch: 399 train loss: 104.14582 valid loss: 106.18372\n",
      "epoch: 400 train loss: 104.14752 valid loss: 106.2126\n",
      "epoch: 401 train loss: 104.1492 valid loss: 106.24125\n",
      "epoch: 402 train loss: 104.15086 valid loss: 106.26975\n",
      "epoch: 403 train loss: 104.15252 valid loss: 106.2981\n",
      "epoch: 404 train loss: 104.15417 valid loss: 106.32617\n",
      "epoch: 405 train loss: 104.1558 valid loss: 106.35407\n",
      "epoch: 406 train loss: 104.157425 valid loss: 106.38183\n",
      "epoch: 407 train loss: 104.159035 valid loss: 106.409386\n",
      "epoch: 408 train loss: 104.160645 valid loss: 106.436775\n",
      "epoch: 409 train loss: 104.162224 valid loss: 106.46396\n",
      "epoch: 410 train loss: 104.1638 valid loss: 106.49103\n",
      "epoch: 411 train loss: 104.16537 valid loss: 106.5179\n",
      "epoch: 412 train loss: 104.16691 valid loss: 106.54458\n",
      "epoch: 413 train loss: 104.168465 valid loss: 106.571075\n",
      "epoch: 414 train loss: 104.17 valid loss: 106.597435\n",
      "epoch: 415 train loss: 104.171524 valid loss: 106.623566\n",
      "epoch: 416 train loss: 104.17302 valid loss: 106.64951\n",
      "epoch: 417 train loss: 104.17453 valid loss: 106.6753\n",
      "epoch: 418 train loss: 104.17601 valid loss: 106.70095\n",
      "epoch: 419 train loss: 104.17749 valid loss: 106.7264\n",
      "epoch: 420 train loss: 104.17898 valid loss: 106.751686\n",
      "epoch: 421 train loss: 104.180435 valid loss: 106.776825\n",
      "epoch: 422 train loss: 104.18187 valid loss: 106.8018\n",
      "epoch: 423 train loss: 104.18331 valid loss: 106.8266\n",
      "epoch: 424 train loss: 104.18475 valid loss: 106.85123\n",
      "epoch: 425 train loss: 104.186165 valid loss: 106.87569\n",
      "epoch: 426 train loss: 104.18758 valid loss: 106.89995\n",
      "epoch: 427 train loss: 104.18898 valid loss: 106.92407\n",
      "epoch: 428 train loss: 104.19037 valid loss: 106.948\n",
      "epoch: 429 train loss: 104.19174 valid loss: 106.97184\n",
      "epoch: 430 train loss: 104.19312 valid loss: 106.99552\n",
      "epoch: 431 train loss: 104.19449 valid loss: 107.019035\n",
      "epoch: 432 train loss: 104.19585 valid loss: 107.04237\n",
      "epoch: 433 train loss: 104.197174 valid loss: 107.065575\n",
      "epoch: 434 train loss: 104.19851 valid loss: 107.08862\n",
      "epoch: 435 train loss: 104.199844 valid loss: 107.11153\n",
      "epoch: 436 train loss: 104.20116 valid loss: 107.13426\n",
      "epoch: 437 train loss: 104.20247 valid loss: 107.15682\n",
      "epoch: 438 train loss: 104.20376 valid loss: 107.17922\n",
      "epoch: 439 train loss: 104.20505 valid loss: 107.20152\n",
      "epoch: 440 train loss: 104.20632 valid loss: 107.22365\n",
      "epoch: 441 train loss: 104.20759 valid loss: 107.2457\n",
      "epoch: 442 train loss: 104.208855 valid loss: 107.267525\n",
      "epoch: 443 train loss: 104.21011 valid loss: 107.28922\n",
      "epoch: 444 train loss: 104.21136 valid loss: 107.31074\n",
      "epoch: 445 train loss: 104.21258 valid loss: 107.33214\n",
      "epoch: 446 train loss: 104.2138 valid loss: 107.353386\n",
      "epoch: 447 train loss: 104.21502 valid loss: 107.3745\n",
      "epoch: 448 train loss: 104.21623 valid loss: 107.39546\n",
      "epoch: 449 train loss: 104.21742 valid loss: 107.41629\n",
      "epoch: 450 train loss: 104.21862 valid loss: 107.43697\n",
      "epoch: 451 train loss: 104.2198 valid loss: 107.45755\n",
      "epoch: 452 train loss: 104.22098 valid loss: 107.477936\n",
      "epoch: 453 train loss: 104.22213 valid loss: 107.49822\n",
      "epoch: 454 train loss: 104.2233 valid loss: 107.51835\n",
      "epoch: 455 train loss: 104.22444 valid loss: 107.53836\n",
      "epoch: 456 train loss: 104.22558 valid loss: 107.55825\n",
      "epoch: 457 train loss: 104.22672 valid loss: 107.57797\n",
      "epoch: 458 train loss: 104.227844 valid loss: 107.597565\n",
      "epoch: 459 train loss: 104.22896 valid loss: 107.61705\n",
      "epoch: 460 train loss: 104.230064 valid loss: 107.63639\n",
      "epoch: 461 train loss: 104.23116 valid loss: 107.655624\n",
      "epoch: 462 train loss: 104.23227 valid loss: 107.674736\n",
      "epoch: 463 train loss: 104.23335 valid loss: 107.69369\n",
      "epoch: 464 train loss: 104.23442 valid loss: 107.71251\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 465 train loss: 104.23551 valid loss: 107.731186\n",
      "epoch: 466 train loss: 104.23656 valid loss: 107.74978\n",
      "epoch: 467 train loss: 104.23763 valid loss: 107.76821\n",
      "epoch: 468 train loss: 104.238686 valid loss: 107.78654\n",
      "epoch: 469 train loss: 104.23973 valid loss: 107.804726\n",
      "epoch: 470 train loss: 104.24075 valid loss: 107.82278\n",
      "epoch: 471 train loss: 104.241776 valid loss: 107.84073\n",
      "epoch: 472 train loss: 104.2428 valid loss: 107.85855\n",
      "epoch: 473 train loss: 104.24382 valid loss: 107.87626\n",
      "epoch: 474 train loss: 104.24482 valid loss: 107.89386\n",
      "epoch: 475 train loss: 104.24582 valid loss: 107.91136\n",
      "epoch: 476 train loss: 104.24681 valid loss: 107.92871\n",
      "epoch: 477 train loss: 104.24779 valid loss: 107.94595\n",
      "epoch: 478 train loss: 104.248764 valid loss: 107.96307\n",
      "epoch: 479 train loss: 104.24973 valid loss: 107.9801\n",
      "epoch: 480 train loss: 104.25071 valid loss: 107.997\n",
      "epoch: 481 train loss: 104.251656 valid loss: 108.01375\n",
      "epoch: 482 train loss: 104.252625 valid loss: 108.03045\n",
      "epoch: 483 train loss: 104.253555 valid loss: 108.046974\n",
      "epoch: 484 train loss: 104.2545 valid loss: 108.06342\n",
      "epoch: 485 train loss: 104.255424 valid loss: 108.07979\n",
      "epoch: 486 train loss: 104.25636 valid loss: 108.09598\n",
      "epoch: 487 train loss: 104.25726 valid loss: 108.112076\n",
      "epoch: 488 train loss: 104.25819 valid loss: 108.1281\n",
      "epoch: 489 train loss: 104.25909 valid loss: 108.14395\n",
      "epoch: 490 train loss: 104.25998 valid loss: 108.15972\n",
      "epoch: 491 train loss: 104.26088 valid loss: 108.1754\n",
      "epoch: 492 train loss: 104.26176 valid loss: 108.19096\n",
      "epoch: 493 train loss: 104.26264 valid loss: 108.206436\n",
      "epoch: 494 train loss: 104.26351 valid loss: 108.2218\n",
      "epoch: 495 train loss: 104.26438 valid loss: 108.23702\n",
      "epoch: 496 train loss: 104.26522 valid loss: 108.25215\n",
      "epoch: 497 train loss: 104.266075 valid loss: 108.26719\n",
      "epoch: 498 train loss: 104.26693 valid loss: 108.282135\n",
      "epoch: 499 train loss: 104.26779 valid loss: 108.29699\n",
      "epoch: 500 train loss: 104.26862 valid loss: 108.31174\n"
     ]
    }
   ],
   "source": [
    "for epoch in range(training_epochs):\n",
    "    for step in range(int(train_num/batch_size)):\n",
    "        rx = x_train[step*batch_size:(step+1)*batch_size]\n",
    "        ry = y_train[step*batch_size:(step+1)*batch_size]\n",
    "        grad_ = grad(W,rx,B,ry)\n",
    "        optimizer.apply_gradients(zip(grad_,[W,B]))\n",
    "    loss_train = loss(W,x_train,B,y_train).numpy()\n",
    "    loss_valid = loss(W,x_valid,B,y_valid).numpy()\n",
    "    loss_list_train.append(loss_train)\n",
    "    loss_list_valid.append(loss_valid)\n",
    "    print(\"epoch:\",(epoch+1),\"train loss:\",loss_train,\"valid loss:\",loss_valid)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 143,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<matplotlib.legend.Legend at 0x1cf88bfd188>"
      ]
     },
     "execution_count": 143,
     "metadata": {},
     "output_type": "execute_result"
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEGCAYAAACKB4k+AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8vihELAAAACXBIWXMAAAsTAAALEwEAmpwYAAAlKklEQVR4nO3de5QV1Z328e+vL/TpG93cRC6SbiIqd8QWMARjRB1Fx0siYhIVDRMyGd9MfDNxJBlXJpNl1pg179Lo0sHR1zjqGC/BMDrqxBjUGNcoDhglKPqCDgZQ5KI0NN2NdPN7/6h9Tp++YQNdfbq7ns9atapqV51zdjXNeXrvqtpl7o6IiAhAXq4rICIivYdCQUREMhQKIiKSoVAQEZEMhYKIiGQU5LoCR2Lo0KFeVVWV62qIiPQpq1ev3uHuwzra1qdDoaqqilWrVuW6GiIifYqZvdfZNnUfiYhIhkJBREQyFAoiIpLRp88piEj/t3//fjZv3kxjY2Ouq9LnpFIpRo8eTWFhYZdfo1AQkV5t8+bNlJeXU1VVhZnlujp9hruzc+dONm/eTHV1dZdfp+4jEenVGhsbGTJkiALhEJkZQ4YMOeQWlkJBRHo9BcLhOZyfWyJD4cUX4frroakp1zUREeldEhkKL78MP/kJNDTkuiYi0hfs2rWLf/7nfz7k182bN49du3YddJ8rr7ySZcuWHWbNul8iQyGViub79uW2HiLSN3QWCk2f0t3w1FNPUVlZGVOt4pHIUCgqiua6wk1EumLJkiW88847TJs2jZNPPpk5c+Zw/vnnM2HCBAAuvPBCTjrpJCZOnMidd96ZeV1VVRU7duxg48aNjB8/nm984xtMnDiRs846i4YOuipWrFjBiSeeyOTJk/n617/OvvCX65IlS5gwYQJTpkzhe9/7HgC//OUvmTRpElOnTuXUU0/ttmNN5CWp6ZaCQkGkb7nmGnjtte59z2nT4Gc/O/g+N954I2vXruW1117j+eef59xzz2Xt2rWZSz1//vOfM3jwYBoaGjj55JP58pe/zJAhQ1q9x/r163nwwQe56667uOSSS3j00Ue57LLLMtsbGxu58sorWbFiBccddxxXXHEFS5cu5fLLL2f58uW89dZbmFmmO+rHP/4xTz/9NKNGjfrULqpDkciWgrqPRORIzJgxo9W1/7feeitTp05l1qxZbNq0ifXr17d7TXV1NdOmTQPgpJNOYuPGja22v/3221RXV3PccccBsHDhQl544QUqKipIpVIsWrSIX/3qV5SUlAAwe/ZsrrzySu666y6am5u77djUUhCRPuPT/qLvKaWlpZnl559/nt/+9re89NJLlJSUcNppp3V4b0BRut8ayM/P77D7qCMFBQW88sorrFixgmXLlnHbbbfx7LPPcscdd7By5UqefPJJTjrpJFavXt2udXI4EhkKOqcgIoeivLycPXv2dLittraWQYMGUVJSwltvvcXLL798WJ9x/PHHs3HjRjZs2MCxxx7L/fffzxe+8AXq6uqor69n3rx5zJ49m7FjxwLwzjvvMHPmTGbOnMl//ud/smnTJoXC4VJLQUQOxZAhQ5g9ezaTJk2iuLiY4cOHZ7adffbZ3HHHHYwfP57jjz+eWbNmHdZnpFIp7rnnHubPn09TUxMnn3wyf/mXf8lHH33EBRdcQGNjI+7OTTfdBMC1117L+vXrcXfmzp3L1KlTu+VYzd275Y1yoaamxg/nITuvvAIzZ8KTT8K8eTFUTES6zbp16xg/fnyuq9FndfTzM7PV7l7T0f6JPNGs7iMRkY4lMhTUfSQi0rFEh4IuSRURaS3RoaCWgohIa4kMBZ1TEBHpWCJDQS0FEZGOJTIU0i0FnVMQkTiUlZUB8P7773PxxRd3uM9pp51G+pL69MB5vUEiQ8EMBgxQS0FE4jVy5Mhe9ayErkhkKEDUhaRQEJGuWLJkCbfffntm/Uc/+hE33HADc+fOZfr06UyePJnHHnus3es2btzIpEmTAGhoaODSSy9l/PjxXHTRRZ2OfXTTTTcxadIkJk2axM/CYE979+7l3HPPZerUqUyaNImHH344U6+2Q2ofqUQOcwEKBZE+KUdjZy9YsIBrrrmGq6++GoBHHnmEp59+mr/+679m4MCB7Nixg1mzZnH++ed3+lzkpUuXUlJSwrp161izZg3Tp09vt8/q1au55557WLlyJe7OzJkz+cIXvsC7777LyJEjefLJJ4FovKWdO3d2OKT2kYq1pWBmlWa2zMzeMrN1ZnaKmQ02s2fMbH2YDwr7mpndamYbzGyNmbX/iXWjVErnFESka0488US2bdvG+++/z+uvv86gQYM4+uij+cEPfsCUKVM444wz2LJlCx9++GGn7/HCCy9knp8wZcoUpkyZ0m6fF198kYsuuojS0lLKysr40pe+xO9//3smT57MM888w3XXXcfvf/97KioqOh1S+0jF3VK4Bfi1u19sZgOAEuAHwAp3v9HMlgBLgOuAc4BxYZoJLA3zWBQVqaUg0ufkcOzs+fPns2zZMrZu3cqCBQt44IEH2L59O6tXr6awsJCqqqoOh8zuDscddxyvvvoqTz31FNdffz1z587lhz/8YYdDah+p2FoKZlYBnArcDeDun7j7LuAC4N6w273AhWH5AuA+j7wMVJrZiLjqp+4jETkUCxYs4KGHHmLZsmXMnz+f2tpajjrqKAoLC3nuued47733Dvr6U089lV/84hcArF27ljVr1rTbZ86cOfz7v/879fX17N27l+XLlzNnzhzef/99SkpKuOyyy7j22mt59dVXqauro7a2lnnz5nHzzTfz+uuvd8txxtlSqAa2A/eY2VRgNfAdYLi7fxD22Qqkx6AdBWzKev3mUPZBVhlmthhYDDBmzJjDrpy6j0TkUEycOJE9e/YwatQoRowYwde+9jX+/M//nMmTJ1NTU8MJJ5xw0Nd/61vf4qqrrmL8+PGMHz+ek046qd0+06dP58orr2TGjBkA/MVf/AUnnngiTz/9NNdeey15eXkUFhaydOlS9uzZ0+GQ2kcqtqGzzawGeBmY7e4rzewWYDfwbXevzNrvY3cfZGZPADe6+4uhfAVwnbt3Ojb24Q6dDTBnDhQWQje0tkQkRho6+8j0pqGzNwOb3X1lWF8GTAc+THcLhfm2sH0LcEzW60eHslio+0hEpL3YQsHdtwKbzOz4UDQXeBN4HFgYyhYC6Yt7HweuCFchzQJqs7qZup1CQUSkvbivPvo28EC48uhd4CqiIHrEzBYB7wGXhH2fAuYBG4D6sG9sdE5BpO9w906v/5fOHc7pgVhDwd1fAzrqt5rbwb4OXB1nfbLpklSRviGVSrFz506GDBmiYDgE7s7OnTtJpUcA7SLd0Swivdro0aPZvHkz27dvz3VV+pxUKsXo0aMP6TWJDgV1H4n0foWFhVRXV+e6GomR2AHx1H0kItJeYkNB3UciIu0lOhSam6GpKdc1ERHpPRIdCqDzCiIi2RIbCulHcqoLSUSkRWJDId1SUCiIiLRIfCio+0hEpEViQ0HdRyIi7SU2FNR9JCLSnkJBoSAikqFQUCiIiGQkNhSKi6N5Q0Nu6yEi0pskNhRKSqJ5fX1u6yEi0pskPhTUUhARaZH4UFBLQUSkhUJBoSAikpHYUEifaFYoiIi0SGwoFBZCQYFCQUQkWzJD4ZZboLycQcWNCgURkSzJDAV3qKtTKIiItJHMUAgnFAalGhQKIiJZEh8Kuk9BRKRFrKFgZhvN7I9m9pqZrQplg83sGTNbH+aDQrmZ2a1mtsHM1pjZ9NgqFgY+qihS95GISLaeaCl80d2nuXtNWF8CrHD3ccCKsA5wDjAuTIuBpbHVKLQUKgao+0hEJFsuuo8uAO4Ny/cCF2aV3+eRl4FKMxsRSw1CS2FgoUJBRCRb3KHgwG/MbLWZLQ5lw939g7C8FRgelkcBm7JeuzmUtWJmi81slZmt2r59++HVKrQUygvVfSQikq0g5vf/vLtvMbOjgGfM7K3sje7uZuaH8obufidwJ0BNTc0hvTYjhEJZvloKIiLZYm0puPuWMN8GLAdmAB+mu4XCfFvYfQtwTNbLR4ey7he6j8oK1FIQEckWWyiYWamZlaeXgbOAtcDjwMKw20LgsbD8OHBFuAppFlCb1c3UvbJaCrokVUSkRZzdR8OB5WaW/pxfuPuvzey/gUfMbBHwHnBJ2P8pYB6wAagHroqtZqGlUJoXdR+5Q1RNEZFkiy0U3P1dYGoH5TuBuR2UO3B1XPVpJbQUSvIaaW6G/fthwIAe+WQRkV4t0Xc0FxP1Hem8gohIJJmhUFQEKBRERNpKZiiYQSpFikZAoSAikpbMUABIpShytRRERLIlNxSKiylqjkJBl6WKiEQSHQoDDqj7SEQkW3JDIZWisFndRyIi2ZIbCsXFFDappSAiki25oZBKUbBfLQURkWzJDYXiYvIVCiIirSQ8FNR9JCKSLbmhkEqRty9qKezdm+O6iIj0EskNheJirKGBVArq6nJdGRGR3iG5oZBKQWMj5eWwZ0+uKyMi0jskNxSKi6GhgfJytRRERNKSHQqNjZSVqaUgIpKW3FBIpeCTT6goa1ZLQUQkSG4ohAftDCpuVEtBRCRIbiiE5zQPLlEoiIikJTcU0i2FVIO6j0REAoVCqkEtBRGRILmhELqPKoqi7iP3HNdHRKQXSG4ohJbCwMIGmpth374c10dEpBdIbiiElsLAwmj8I3UhiYj0QCiYWb6Z/cHMngjr1Wa20sw2mNnDZjYglBeF9Q1he1WsFSspAaA8PxoiVSebRUR6pqXwHWBd1vpPgZvd/VjgY2BRKF8EfBzKbw77xae0FIDyvGiIVLUURERiDgUzGw2cC/zfsG7A6cCysMu9wIVh+YKwTtg+N+wfj7IyAEpRKIiIpMXdUvgZ8LfAgbA+BNjl7k1hfTMwKiyPAjYBhO21Yf9WzGyxma0ys1Xbt28//JqFlkI6FNR9JCISYyiY2XnANndf3Z3v6+53unuNu9cMGzbs8N8ohELxAbUURETSCmJ879nA+WY2D0gBA4FbgEozKwitgdHAlrD/FuAYYLOZFQAVwM7YahdONKeaoiaCQkFEJMaWgrt/391Hu3sVcCnwrLt/DXgOuDjsthB4LCw/HtYJ2591j/GWsrw8KCmhqFndRyIiabm4T+E64LtmtoHonMHdofxuYEgo/y6wJPaalJYyYL+6j0RE0rrUfWRm3wHuAfYQXUl0IrDE3X/Tlde7+/PA82H5XWBGB/s0AvO78n7dprSU/Ma9FBSopSAiAl1vKXzd3XcDZwGDgMuBG2OrVU8pLcXq6vScZhGRoKuhkL5fYB5wv7u/kVXWd5WVwd69eiSniEjQ1VBYbWa/IQqFp82snJZ7D/qu0lLYu5fycnUfiYhA1y9JXQRMA95193ozGwxcFVutekppKXz0EQMHQm1trisjIpJ7XW0pnAK87e67zOwy4HqiO477ttBSqKxUKIiIQNdDYSlQb2ZTgb8B3gHui61WPaWsDOrqqKyEXbtyXRkRkdzraig0hRvJLgBuc/fbgfL4qtVDsloKCgURka6fU9hjZt8nuhR1jpnlAYXxVauHpEOhwtm1y3CHGMdlFRHp9braUlgA7CO6X2Er0ZhF/xRbrXpKaSk0NzOk/BP274eGhlxXSEQkt7oUCiEIHgAqwuinje7eP84pAENT0fWo6kISkaTrUiiY2SXAK0TDUFwCrDSziw/+qj4gDJ89JBWNf6RQEJGk6+o5hb8DTnb3bQBmNgz4LS1PUOubQigMGhCFwscf57IyIiK519VzCnnpQAh2HsJre68QCpWFaimIiEDXWwq/NrOngQfD+gLgqXiq1IPCOYWKAoWCiAh0MRTc/Voz+zLR09QA7nT35fFVq4eElkK56USziAgcwuM43f1R4NEY69LzQiiUopaCiAh8SiiY2R6go0diGuDuPjCWWvWUEAqFn+yluFihICJy0FBw974/lMXBhHMKGupCRCTS968gOhKhpaBB8UREIskOhVQK8vNhzx6FgogISQ8FM6iogNpahYKICEkPBVAoiIhkUSgMHAi7d1NZqWEuREQUCqGlMHQofPQRNDfnukIiIrkTWyiYWcrMXjGz183sDTP7h1BebWYrzWyDmT1sZgNCeVFY3xC2V8VVt1YGDoTaWoYNA3e1FkQk2eJsKewDTnf3qcA04GwzmwX8FLjZ3Y8FPgYWhf0XAR+H8pvDfvGrqIDduxk6NFrdvr1HPlVEpFeKLRQ8UhdWC8PkwOm0DLl9L3BhWL4grBO2zzXrgYdjZnUfAezYEfsnioj0WrGeUzCzfDN7DdgGPAO8A+xy96awy2ZgVFgeBWwCCNtrgSEdvOdiM1tlZqu2d8ef9enuo6HRaB5qKYhIksUaCu7e7O7TiJ7pPAM4oRve8053r3H3mmHDhh3p20UthaYmhpY1AmopiEiy9cjVR+6+C3gOOAWoNLP0mEujgS1heQtwDEDYXkH0MJ94VVQAMLSwFlBLQUSSLc6rj4aZWWVYLgbOBNYRhUP6+c4LgcfC8uNhnbD9WXfvaITW7jUwGug1ta+WsjK1FEQk2br8PIXDMAK418zyicLnEXd/wszeBB4ysxuAPwB3h/3vBu43sw3AR8ClMdatRWgpsHs3w4appSAiyRZbKLj7GuDEDsrfJTq/0La8EZgfV306FVoK6SuQ1FIQkSTTHc3plkK4gU0tBRFJMoVCVveRWgoiknQKhazuI7UURCTpFAptzik0NEB9fW6rJCKSKwqFggIoKclcfQRqLYhIcikUIDP+0dFHR6tbt+a2OiIiuaJQgEwojBwZrW7ZcvDdRUT6K4UCkH4WZzoU3n8/p7UREckZhQLAkCGwcyfDhkF+vkJBRJJLoQBRKOzYQV4ejBihUBCR5FIoQKalADBqlEJBRJJLoQBRKNTXQ2MjI0cqFEQkuRQKEIUCwM6djBypq49EJLkUCtAuFHbt0l3NIpJMCgWAoUOjeQgFgA8+yF11RERyRaEA7VoKoPMKIpJMCgVoFQqjRkWLOq8gIkmkUIBWoTBmTLS4cWPOaiMikjMKBYBUKhopdccOysujUwz/8z+5rpSISM9TKKRl3cBWXa1QEJFkUiiktQmFd9/NcX1ERHJAoZA2dGgmFMaOhT/9CZqbc1wnEZEeplBIa9NS2L9fVyCJSPIoFNKGDs08h3Ps2KhIXUgikjSxhYKZHWNmz5nZm2b2hpl9J5QPNrNnzGx9mA8K5WZmt5rZBjNbY2bT46pbh44+OhrforGR6uqoSCebRSRp4mwpNAF/4+4TgFnA1WY2AVgCrHD3ccCKsA5wDjAuTIuBpTHWrb0RI6L51q2MGQN5eWopiEjyxBYK7v6Bu78alvcA64BRwAXAvWG3e4ELw/IFwH0eeRmoNLMRcdWvnXQofPABhYVQVQXr1/fYp4uI9Ao9ck7BzKqAE4GVwHB3Tw83txUYHpZHAZuyXrY5lLV9r8VmtsrMVm0P5wC6RVYoAEyYAG+80X1vLyLSF8QeCmZWBjwKXOPuu7O3ubsDfijv5+53unuNu9cMGzas+yraJhQmToS334ampu77CBGR3i7WUDCzQqJAeMDdfxWKP0x3C4X5tlC+BTgm6+WjQ1nPGDYsOpGQ1VLYvx82bOixGoiI5FycVx8ZcDewzt1vytr0OLAwLC8EHssqvyJchTQLqM3qZopffj4MH96qpQDqQhKRZImzpTAbuBw43cxeC9M84EbgTDNbD5wR1gGeAt4FNgB3AX8VY906NmJEJhTGjwczePPNHq+FiEjOFMT1xu7+ImCdbJ7bwf4OXB1XfbpkxIjMbcwlJdEVSGopiEiS6I7mbFktBYBJk2DNmhzWR0SkhykUso0YAdu2ZS45OvlkeOst2L37U14nItJPKBSyjRwJ7lEwADNmRKurV+e4XiIiPUShkG306Gj+pz8BUUsBYOXKHNVHRKSHKRSytRkJb/BgGDcOXnklh3USEelBCoVsVVXRPGt41BkzFAoikhwKhWylpXDUUa1C4ZRToqtU33knh/USEekhCoW2qqtbhcIZZ0TzZ57JUX1ERHqQQqGtNqFw3HEwZoxCQUSSQaHQVnV1dPVRczMQDXVx5pmwYoVGTBWR/k+h0FZ1dfTtv3lzpujMM6G2Fl5+OYf1EhHpAQqFtjp4QPM550BREfzylzmqk4hID1EotPXZz0bzrAcpDBwI554LjzyS6VUSEemXFAptfeYzUFzcbnjUSy+FrVvh+edzUy0RkZ6gUGgrLy96ws7ata2KzzsPKirgrrtyVC8RkR6gUOjIpEntWgrFxfCNb8CyZZmhkURE+h2FQkcmToyeq7BzZ6vib387mt96aw7qJCLSAxQKHZk0KZq3aS2MGQNf/Srcdhu8914O6iUiEjOFQkc6CQWAn/wkOu1w3XU9XCcRkR6gUOjIqFFQWQmvv95u0zHHRIHw8MO6b0FE+h+FQkfMojGzX3qpw80/+EG0efHi6HGdIiL9hUKhM5/7HPzxjx0+oLmwEB56KLrL+ayzWt38LCLSpykUOjN7dvSA5k4GPKquhl//GurqYOZMeOGFHq6fiEgMFAqdmTEjOqP8X//V6S7TpkU9TBUVcNpp8Fd/1WocPRGRPqcgrjc2s58D5wHb3H1SKBsMPAxUARuBS9z9YzMz4BZgHlAPXOnur8ZVty4ZOBAmT4YXXzzobscfD3/4Q3SeYelSuPtumD8fFiyA00+PHuYmIofIPRqtOD3t39/x8sG2dbZfc3Pn04EDh7+9O7cdOND5lN7+05/CwoXd/qOPLRSAfwVuA+7LKlsCrHD3G81sSVi/DjgHGBemmcDSMM+tuXOjmxL27IHy8k53KyuLbmj77nfhn/4JHnwQHngA8vNhyhSYPh3Gjo26nEaPjloW6WngwKhBkkvuLdOBA4e/fiSv7Q2flf5Z5Goe23sfcPKb9pG/v5H8/Y3kNX2SmfKbW+bW9An5HZR3Om+O5vlN+8hvV9ZmvfkT8g40RVPz/sxy/oH9HZd77xl5stny8bx8Dlg+bnnRPLOez4EubEtvb11WlFlPvzbzuvT++Xl4QR4HLA8nL7xHNJXXVTM5huONLRTc/QUzq2pTfAFwWli+F3ieKBQuAO5zdwdeNrNKMxvh7h/EVb8uOf98uOkm+M1v4Mtf/tTdq6rg9tvh5pvhueeiRsZLL8F//Ads29b56/LzoaCg9VRYGIVF9hdXZxN0vu3TvgzTr5f4GQcopoES6illLyXUt5pSNHZpKu7ifi3Tvm4/liby+YQBXZzK+YQB7KeQJgpoooD9FNIcltPr7ZatZbm5zT7R9g5e06a8yQpoopAmK8h8XjP5memA5bdabyb6Ms5ehuiCxI7/TYEDbco62LfT1x/BvjcV07dCoRPDs77otwLDw/IoYFPWfptDWW5DYfZsGDwYHn+8S6GQNmAA/NmfRVNafT1s3Ajvvx89sKe2Fnbtii5u6qwl3Nwc/SJ0ZYL2ZXl5uVvvk5+FY40N5NfviaaGOvL2RnOr30v+vnqsYS/5jfVYQz15jS2TNewlr6Eey6yHbfV7sfT6vsbD/lX0AQPwohRelIKiFJ5qvUxRBZ4aHq0XpaC4OLNPQyq9Tyq6ZK6oKPol7cJkRR2sFxZiBfnkAyUGJVm/e+nfw4PNpXfr6VDIcHc3s0P+O9XMFgOLAcaMGdPt9WqloCB6kMITT8Ann0T/MQ5TSQlMmBBN0k3cobGxJWX37Imm2rqW5bq61vODldXVtfQjdUVhYfQPW1oazdNTRQmMGNS+PD11VF5cHE2pVPupqAjLy0PfqdITejoUPkx3C5nZCCDdqbIFOCZrv9GhrB13vxO4E6Cmpib+zo+vfhXuvx+WL4/OHkv32bevpcmU/mJvO3W2LV2+f3/XPqukJDr5U14eTWVlMGxYdKInuyx7nr1cWtr6y7y4OAoFkX6mp0PhcWAhcGOYP5ZV/r/M7CGiE8y1OT+fkHbWWdHT2G6/XaHQkcZG+Phj+OijaEovH2ye/mLf14W+7vLy1mfmjzoKxo2LhiHJLk+fte/oi720NDpxIyKfKs5LUh8kOqk81Mw2A39PFAaPmNki4D3gkrD7U0SXo24guiT1qrjqdcjy8uBb34LvfQ9WrYKamlzXKB719bBjR8uU/pL/tC/4hobO3zMvL/ryHjwYBg2K5tXVHX+hZ0/p7eXl+jIX6WHmffjyk5qaGl+1alX8H1RbG/11esIJ8Lvf9f4zZvv3R8+CSH/Bb9/e+gu/o/KDfbmXlrZ8qafn2cudzXvD9bYi0o6ZrXb3Dv/CzdmJ5j6logJuuAG++U34t3+Dyy/PTT1274YtW6KHRaenDz5ov75jR+fvMXBg1Jc+dCiMHBndSDF0aEvZ0KEwZEg0pb/gj+AEu4j0LQqFrlq0KDrh/M1vRk9mmz69+z9j1y54++3o2tX33oue+5me/+lP0fa2BgyAESPg6KOjO+Q+97lo+aijOv6y1xe8iByEQqGr8vOjBzTX1MAZZ0TLp59+6O9z4ABs2hSNuZ2e1q2L5h9+2HrfysrocW+f+QzMmRPNR41qCYGjj4726e3dWSLSZygUDsXw4dE5hfPOi4bA+MpXoocqzJoVXU+ebdcuWL++ZUoHwNtvt+6/r6yE8eNh3rzonMUJJ0R/8Y8ZE3X1iIj0IIXCoRo7FlauhH/8x2jAowcfjG5yGzYsugxy794oEOrrW15jFv2VP348fPGLLV/+J5wQvU5/6YtIL6Grj47E3r3RuEirVkWDG9XVRVfqVFZGrYpx46Lps59t35IQEckRXX0Ul9JSuOiiaBIR6Qd0EbmIiGQoFEREJEOhICIiGQoFERHJUCiIiEiGQkFERDIUCiIikqFQEBGRjD59R7OZbSd6WM/hGAocZIzpfknHnAw65mQ4kmP+jLsP62hDnw6FI2Fmqzq7zbu/0jEng445GeI6ZnUfiYhIhkJBREQykhwKd+a6AjmgY04GHXMyxHLMiT2nICIi7SW5pSAiIm0oFEREJCORoWBmZ5vZ22a2wcyW5Lo+3cXMfm5m28xsbVbZYDN7xszWh/mgUG5mdmv4Gawxs+m5q/nhM7NjzOw5M3vTzN4ws++E8n573GaWMrNXzOz1cMz/EMqrzWxlOLaHzWxAKC8K6xvC9qqcHsBhMrN8M/uDmT0R1vv18QKY2UYz+6OZvWZmq0JZrL/biQsFM8sHbgfOASYAXzGzCbmtVbf5V+DsNmVLgBXuPg5YEdYhOv5xYVoMLO2hOna3JuBv3H0CMAu4Ovx79ufj3gec7u5TgWnA2WY2C/gpcLO7Hwt8DCwK+y8CPg7lN4f9+qLvAOuy1vv78aZ90d2nZd2TEO/vtrsnagJOAZ7OWv8+8P1c16sbj68KWJu1/jYwIiyPAN4Oy/8CfKWj/fryBDwGnJmU4wZKgFeBmUR3txaE8szvOfA0cEpYLgj7Wa7rfojHOTp8AZ4OPAFYfz7erOPeCAxtUxbr73biWgrAKGBT1vrmUNZfDXf3D8LyVmB4WO53P4fQTXAisJJ+ftyhK+U1YBvwDPAOsMvdm8Iu2ceVOeawvRYY0qMVPnI/A/4WOBDWh9C/jzfNgd+Y2WozWxzKYv3dLjjcmkrf4+5uZv3yGmQzKwMeBa5x991mltnWH4/b3ZuBaWZWCSwHTshtjeJjZucB29x9tZmdluPq9LTPu/sWMzsKeMbM3sreGMfvdhJbCluAY7LWR4ey/upDMxsBEObbQnm/+TmYWSFRIDzg7r8Kxf3+uAHcfRfwHFH3SaWZpf/Qyz6uzDGH7RXAzp6t6RGZDZxvZhuBh4i6kG6h/x5vhrtvCfNtROE/g5h/t5MYCv8NjAtXLgwALgUez3Gd4vQ4sDAsLyTqc0+XXxGuWJgF1GY1SfsMi5oEdwPr3P2mrE399rjNbFhoIWBmxUTnUNYRhcPFYbe2x5z+WVwMPOuh07kvcPfvu/tod68i+v/6rLt/jX56vGlmVmpm5ell4CxgLXH/buf6REqOTt7MA/4fUT/s3+W6Pt14XA8CHwD7ifoTFxH1pa4A1gO/BQaHfY3oKqx3gD8CNbmu/2Ee8+eJ+l3XAK+FaV5/Pm5gCvCHcMxrgR+G8rHAK8AG4JdAUShPhfUNYfvYXB/DERz7acATSTjecHyvh+mN9HdV3L/bGuZCREQykth9JCIinVAoiIhIhkJBREQyFAoiIpKhUBARkQyFgkgPMrPT0qN8ivRGCgUREclQKIh0wMwuC88seM3M/iUMQFdnZjeHZxisMLNhYd9pZvZyGMN+edb49sea2W/Dcw9eNbPPhrcvM7NlZvaWmT0Q7srGzG606LkQa8zs/+To0CXhFAoibZjZeGABMNvdpwHNwNeAUmCVu08Efgf8fXjJfcB17j6F6E7SdPkDwO0ePffgc0R3m0M0kus1RM/zGAvMNrMhwEXAxPA+N8R5jCKdUSiItDcXOAn47zA89VyiL+8DwMNhn38DPm9mFUClu/8ulN8LnBrGrBnl7ssB3L3R3evDPq+4+2Z3P0A0LEcV0fDOjcDdZvYlIL2vSI9SKIi0Z8C9Hj3tapq7H+/uP+pgv8MdI2Zf1nIz0YNimohGwFwGnAf8+jDfW+SIKBRE2lsBXBzGsE8/E/czRP9f0qNyfhV40d1rgY/NbE4ovxz4nbvvATab2YXhPYrMrKSzDwzPg6hw96eA/w1MjeG4RD6VHrIj0oa7v2lm1xM98SqPaNTZq4G9wIywbRvReQeIhi++I3zpvwtcFcovB/7FzH4c3mP+QT62HHjMzFJELZXvdvNhiXSJRkkV6SIzq3P3slzXQyRO6j4SEZEMtRRERCRDLQUREclQKIiISIZCQUREMhQKIiKSoVAQEZGM/w/VUPOmtB3XXAAAAABJRU5ErkJggg==\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {
      "needs_background": "light"
     },
     "output_type": "display_data"
    }
   ],
   "source": [
    "plt.xlabel(\"epochs\")\n",
    "plt.ylabel(\"loss\")\n",
    "plt.plot(loss_list_train,\"blue\",label= \"trainloss\")\n",
    "plt.plot(loss_list_valid,\"red\",label = \"validloss\")\n",
    "plt.legend(loc=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.7"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
