{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# tf 自定义求导"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "2.0.0\n",
      "sys.version_info(major=3, minor=6, micro=10, releaselevel='final', serial=0)\n",
      "matplotlib 3.1.2\n",
      "numpy 1.18.1\n",
      "pandas 0.25.3\n",
      "sklearn 0.22.1\n",
      "tensorflow 2.0.0\n",
      "tensorflow_core.keras 2.2.4-tf\n"
     ]
    }
   ],
   "source": [
    "# 导入\n",
    "import matplotlib as mpl\n",
    "import matplotlib.pyplot as plt\n",
    "%matplotlib inline\n",
    "import numpy as np\n",
    "import sklearn\n",
    "import pandas as pd\n",
    "import os\n",
    "import sys\n",
    "import time\n",
    "import tensorflow as tf\n",
    "from tensorflow import keras\n",
    "\n",
    "print(tf.__version__)\n",
    "print(sys.version_info)\n",
    "for module in mpl,np,pd,sklearn,tf,keras:\n",
    "    print(module.__name__,module.__version__)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 求导回顾--近似求导"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "7.999999999999119\n"
     ]
    }
   ],
   "source": [
    "# 一元函数求导\n",
    "def f(x):\n",
    "    return 3. * x ** 2 + 2. * x - 1\n",
    "\n",
    "def approximate_derivative(f, x, eps=1e-3):\n",
    "    return (f(x + eps) - f(x - eps)) / (2. * eps)\n",
    "\n",
    "print(approximate_derivative(f, 1.))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(8.999999999993236, 41.999999999994486)\n"
     ]
    }
   ],
   "source": [
    "# 多元函数求导\n",
    "def g(x1, x2):\n",
    "    return (x1 + 5) * (x2 ** 2)\n",
    "\n",
    "def approximate_gradient(g, x1, x2, eps=1e-3):\n",
    "    dg_x1 = approximate_derivative(lambda x: g(x, x2), x1, eps)\n",
    "    dg_x2 = approximate_derivative(lambda x: g(x1, x), x2, eps)\n",
    "    return dg_x1,dg_x2\n",
    "\n",
    "print(approximate_gradient(g, 2., 3.))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 在TensorFlow中自定义导数--tf.GradientTape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tf.Tensor(9.0, shape=(), dtype=float32)\n",
      "GradientTape.gradient can only be called once on non-persistent tapes.\n"
     ]
    }
   ],
   "source": [
    "# tape只能使用一次\n",
    "x1 = tf.Variable(2.0)\n",
    "x2 = tf.Variable(3.0)\n",
    "with tf.GradientTape() as tape:\n",
    "    z = g(x1, x2)\n",
    "    \n",
    "dz_x1 = tape.gradient(z, x1)\n",
    "print(dz_x1)\n",
    "\n",
    "try:\n",
    "    dz_x2 = tape.gradient(z, x2)\n",
    "except RuntimeError as ex:\n",
    "    print(ex)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tf.Tensor(9.0, shape=(), dtype=float32) tf.Tensor(42.0, shape=(), dtype=float32)\n"
     ]
    }
   ],
   "source": [
    "# 解决上述问题：将tape保存，设置persistent=True，用完后自己进行删除\n",
    "x1 = tf.Variable(2.0)\n",
    "x2 = tf.Variable(3.0)\n",
    "with tf.GradientTape(persistent=True) as tape:\n",
    "    z = g(x1, x2)\n",
    "    \n",
    "dz_x1 = tape.gradient(z, x1)\n",
    "dz_x2 = tape.gradient(z, x2)\n",
    "print(dz_x1, dz_x2)\n",
    "\n",
    "del tape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[<tf.Tensor: id=89, shape=(), dtype=float32, numpy=9.0>, <tf.Tensor: id=95, shape=(), dtype=float32, numpy=42.0>]\n"
     ]
    }
   ],
   "source": [
    "# 一次性求多个梯度\n",
    "x1 = tf.Variable(2.0)\n",
    "x2 = tf.Variable(3.0)\n",
    "with tf.GradientTape() as tape:\n",
    "    z = g(x1, x2)\n",
    "    \n",
    "dz_x1x2 = tape.gradient(z, [x1, x2])\n",
    "print(dz_x1x2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[None, None]\n"
     ]
    }
   ],
   "source": [
    "# 对常量求导\n",
    "x1 = tf.constant(2.0)\n",
    "x2 = tf.constant(3.0)\n",
    "with tf.GradientTape() as tape:\n",
    "    z = g(x1, x2)\n",
    "    \n",
    "dz_x1x2 = tape.gradient(z, [x1, x2])\n",
    "print(dz_x1x2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[<tf.Tensor: id=111, shape=(), dtype=float32, numpy=9.0>, <tf.Tensor: id=117, shape=(), dtype=float32, numpy=42.0>]\n"
     ]
    }
   ],
   "source": [
    "# 解决上述问题：tape设置关注常量\n",
    "x1 = tf.constant(2.0)\n",
    "x2 = tf.constant(3.0)\n",
    "with tf.GradientTape() as tape:\n",
    "    tape.watch(x1)\n",
    "    tape.watch(x2)\n",
    "    z = g(x1, x2)\n",
    "    \n",
    "dz_x1x2 = tape.gradient(z, [x1, x2])\n",
    "print(dz_x1x2)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "<tf.Tensor: id=140, shape=(), dtype=float32, numpy=13.0>"
      ]
     },
     "execution_count": 9,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 两个目标函数对一个变量求导\n",
    "x = tf.Variable(5.0)\n",
    "with tf.GradientTape() as tape:\n",
    "    z1 = 3 * x\n",
    "    z2 = x ** 2\n",
    "    \n",
    "tape.gradient([z1, z2], x)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[[None, <tf.Tensor: id=177, shape=(), dtype=float32, numpy=6.0>], [<tf.Tensor: id=188, shape=(), dtype=float32, numpy=6.0>, <tf.Tensor: id=186, shape=(), dtype=float32, numpy=14.0>]]\n"
     ]
    }
   ],
   "source": [
    "# 求二阶导数\n",
    "x1 = tf.Variable(2.0)\n",
    "x2 = tf.Variable(3.0)\n",
    "with tf.GradientTape(persistent=True) as outer_tape:\n",
    "    with tf.GradientTape(persistent=True) as inner_tape:\n",
    "        z = g(x1, x2)\n",
    "    inner_grads = inner_tape.gradient(z, [x1, x2])\n",
    "outer_grads = [outer_tape.gradient(inner_grad, [x1, x2]) \n",
    "               for inner_grad in inner_grads]\n",
    "print(outer_grads)\n",
    "\n",
    "del inner_tape\n",
    "del outer_tape"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 模拟实现梯度下降算法"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<tf.Variable 'Variable:0' shape=() dtype=float32, numpy=-0.3333333>\n"
     ]
    }
   ],
   "source": [
    "# 简单的梯度下降模拟\n",
    "learning_rate = 0.1\n",
    "x = tf.Variable(0.0)\n",
    "\n",
    "for _ in range(100):\n",
    "    with tf.GradientTape() as tape:\n",
    "        z = f(x)\n",
    "    dz_dx = tape.gradient(z, x)\n",
    "    x.assign_sub(learning_rate * dz_dx)\n",
    "    \n",
    "print(x)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### GradientTape和optimizer结合使用"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<tf.Variable 'Variable:0' shape=() dtype=float32, numpy=-0.3333333>\n"
     ]
    }
   ],
   "source": [
    "learning_rate = 0.1\n",
    "x = tf.Variable(0.0)\n",
    "optimizer = keras.optimizers.SGD(learning_rate=learning_rate)\n",
    "\n",
    "for _ in range(100):\n",
    "    with tf.GradientTape() as tape:\n",
    "        z = f(x)\n",
    "    dz_dx = tape.gradient(z, x)\n",
    "    optimizer.apply_gradients([(dz_dx, x)])\n",
    "    \n",
    "print(x)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 利用手工求导方式解决回归问题"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      ".. _california_housing_dataset:\n",
      "\n",
      "California Housing dataset\n",
      "--------------------------\n",
      "\n",
      "**Data Set Characteristics:**\n",
      "\n",
      "    :Number of Instances: 20640\n",
      "\n",
      "    :Number of Attributes: 8 numeric, predictive attributes and the target\n",
      "\n",
      "    :Attribute Information:\n",
      "        - MedInc        median income in block\n",
      "        - HouseAge      median house age in block\n",
      "        - AveRooms      average number of rooms\n",
      "        - AveBedrms     average number of bedrooms\n",
      "        - Population    block population\n",
      "        - AveOccup      average house occupancy\n",
      "        - Latitude      house block latitude\n",
      "        - Longitude     house block longitude\n",
      "\n",
      "    :Missing Attribute Values: None\n",
      "\n",
      "This dataset was obtained from the StatLib repository.\n",
      "http://lib.stat.cmu.edu/datasets/\n",
      "\n",
      "The target variable is the median house value for California districts.\n",
      "\n",
      "This dataset was derived from the 1990 U.S. census, using one row per census\n",
      "block group. A block group is the smallest geographical unit for which the U.S.\n",
      "Census Bureau publishes sample data (a block group typically has a population\n",
      "of 600 to 3,000 people).\n",
      "\n",
      "It can be downloaded/loaded using the\n",
      ":func:`sklearn.datasets.fetch_california_housing` function.\n",
      "\n",
      ".. topic:: References\n",
      "\n",
      "    - Pace, R. Kelley and Ronald Barry, Sparse Spatial Autoregressions,\n",
      "      Statistics and Probability Letters, 33 (1997) 291-297\n",
      "\n",
      "(20640, 8)\n",
      "(20640,)\n"
     ]
    }
   ],
   "source": [
    "# 导入数据集 房价预测\n",
    "from sklearn.datasets import fetch_california_housing\n",
    "\n",
    "housing = fetch_california_housing()\n",
    "print(housing.DESCR)\n",
    "print(housing.data.shape)\n",
    "print(housing.target.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(11610, 8) (11610,)\n",
      "(3870, 8) (3870,)\n",
      "(5160, 8) (5160,)\n"
     ]
    }
   ],
   "source": [
    "# 切分数据集\n",
    "from sklearn.model_selection import train_test_split\n",
    "\n",
    "x_train_all,x_test,y_train_all,y_test = train_test_split(\n",
    "    housing.data,housing.target,random_state=7)\n",
    "x_train,x_valid,y_train,y_valid = train_test_split(\n",
    "    x_train_all,y_train_all,random_state=11)\n",
    "\n",
    "print(x_train.shape,y_train.shape)\n",
    "print(x_valid.shape,y_valid.shape)\n",
    "print(x_test.shape,y_test.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 对数据进行归一化\n",
    "from sklearn.preprocessing import StandardScaler\n",
    "\n",
    "scaler = StandardScaler()\n",
    "x_train_scaled = scaler.fit_transform(x_train)\n",
    "x_valid_scaled = scaler.transform(x_valid)\n",
    "x_test_scaled = scaler.transform(x_test)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "metric 使用"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "tf.Tensor(9.0, shape=(), dtype=float32)\n",
      "tf.Tensor(5.0, shape=(), dtype=float32)\n",
      "tf.Tensor(5.0, shape=(), dtype=float32)\n",
      "tf.Tensor(0.0, shape=(), dtype=float32)\n",
      "tf.Tensor(4.0, shape=(), dtype=float32)\n"
     ]
    }
   ],
   "source": [
    "# metric 使用\n",
    "# mse均方差的使用\n",
    "metric = keras.metrics.MeanSquaredError()\n",
    "print(metric([5.], [2.]))\n",
    "print(metric([0.], [1.]))\n",
    "# metric会自动累加\n",
    "print(metric.result())\n",
    "\n",
    "# 如果不想累加，就进行清空\n",
    "metric.reset_states()\n",
    "print(metric.result())\n",
    "metric([1.], [3.])\n",
    "print(metric.result())"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "手动的进行求导，并且在训练集上训练\n",
    "\n",
    "model.fit的步骤\n",
    "1. batch遍历训练集，得到metric\n",
    "   - 这部分要进行自动求导\n",
    "2. 一个epoch结束后，在验证集上进行验证，得到metric"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 准备工作，定义一些用到的变量\n",
    "epochs = 100\n",
    "batch_size = 32\n",
    "steps_per_epoch = len(x_train_scaled) // batch_size\n",
    "optimizer = keras.optimizers.Adam()\n",
    "metric = keras.metrics.MeanSquaredError()\n",
    "\n",
    "# 取数据\n",
    "def random_batch(x, y, batch_size=32):\n",
    "    idx = np.random.randint(0, len(x), size=batch_size)\n",
    "    return x[idx], y[idx]\n",
    "\n",
    "# 构建模型\n",
    "model = keras.models.Sequential([\n",
    "    keras.layers.Dense(30, activation='relu',input_shape=x_train.shape[1:]),\n",
    "    keras.layers.Dense(1)\n",
    "])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "WARNING:tensorflow:Layer dense is casting an input tensor from dtype float64 to the layer's dtype of float32, which is new behavior in TensorFlow 2.  The layer has dtype float32 because it's dtype defaults to floatx.\n",
      "\n",
      "If you intended to run this layer in float32, you can safely ignore this warning. If in doubt, this warning is likely only an issue if you are porting a TensorFlow 1.X model to TensorFlow 2.\n",
      "\n",
      "To change all layers to have dtype float64 by default, call `tf.keras.backend.set_floatx('float64')`. To change just this layer, pass dtype='float64' to the layer constructor. If you are the author of this layer, you can disable autocasting by passing autocast=False to the base Layer constructor.\n",
      "\n",
      "Epoch:  0  train mse:  2.4945946 0  train mse:  3.9841251 0  train mse:  3.5711834 0  train mse:  4.3275003 0  train mse:  3.364888 0  train mse:  3.2155404 0  train mse:  3.0330994 0  train mse:  2.81172 0  train mse:  2.707118 0  train mse:  2.7559512 0  train mse:  2.6659937 0  train mse:  2.58327412.5138779 0  train mse:  2.5741804 0  train mse:  2.5352063 0  train mse:  2.4917145\t valid mse:  1.7801257581802976\n",
      "Epoch:  1  train mse:  1.6175961  train mse:  2.821956 1  train mse:  2.585198 1  train mse:  2.19634  train mse:  2.1405828 1  train mse:  2.0106177 1  train mse:  1.9385847 1  train mse:  1.8852016 1  train mse:  1.84603741.8070872 1  train mse:  1.7687284 1  train mse:  1.7334266 1  train mse:  1.7105762 1  train mse:  1.7090036 1  train mse:  1.6783631 1  train mse:  1.6844273 1  train mse:  1.6599125 1  train mse:  1.639331 1  train mse:  1.6248261 1  train mse:  1.6168331\t valid mse:  1.5155293387635753\n",
      "Epoch:  2  train mse:  1.3421401\t valid mse:  1.446557853217292ain mse:  1.3465428 2  train mse:  1.3630481 2  train mse:  1.3738527 2  train mse:  1.3861898 train mse:  1.3728752 2  train mse:  1.3581455 2  train mse:  1.3664944 train mse:  1.3733693 2  train mse:  1.3778218 2  train mse:  1.3811827 2  train mse:  1.3709284 2  train mse:  1.3727236 2  train mse:  1.3711599 2  train mse:  1.3683205 2  train mse:  1.3643721 2  train mse:  1.3509961 2  train mse:  1.3497355\n",
      "Epoch:  3  train mse:  1.2105167\t valid mse:  1.4218091925230516in mse:  1.2398283 3  train mse:  1.2140273 3  train mse:  1.1951274 3  train mse:  1.1817536 3  train mse:  1.20116423  train mse:  1.2043585 3  train mse:  1.2182587 3  train mse:  1.2064062 3  train mse:  1.2033112 3  train mse:  1.1967642 3  train mse:  1.20654051.208337 3  train mse:  1.2125566 3  train mse:  1.2117215 3  train mse:  1.2128443 3  train mse:  1.2109097\n",
      "Epoch:  4  train mse:  1.26370784  train mse:  1.2760104 4  train mse:  1.2769953 4  train mse:  1.2875564 4  train mse:  1.2823137 4  train mse:  1.2775046 4  train mse:  1.2690676 4  train mse:  1.276482 train mse:  1.2699745 4  train mse:  1.2661325 4  train mse:  1.2689545 4  train mse:  1.2572541 4  train mse:  1.2577678 4  train mse:  1.2590303 4  train mse:  1.2592474\t valid mse:  1.4040955700875322\n",
      "Epoch:  5  train mse:  1.2733024\t valid mse:  1.3989033902391774 mse:  1.2935114  train mse:  1.2624011 5  train mse:  1.2657211 5  train mse:  1.2575312 5  train mse:  1.2746476 5  train mse:  1.2712286 5  train mse:  1.2761316 5  train mse:  1.2708665 5  train mse:  1.2701505 5  train mse:  1.2731861 5  train mse:  1.27375361.2709306 5  train mse:  1.2664531 5  train mse:  1.2704142\n",
      "Epoch:  6  train mse:  1.2480615\t valid mse:  1.395570963549151ain mse:  1.2385768 6  train mse:  1.2585703 6  train mse:  1.257078 6  train mse:  1.2866504 6  train mse:  1.272185 6  train mse:  1.2640966 6  train mse:  1.2620264 6  train mse:  1.25886751.2530452 6  train mse:  1.2494822 6  train mse:  1.2508178 6  train mse:  1.247314 6  train mse:  1.2538939 6  train mse:  1.2533658 6  train mse:  1.2536757 train mse:  1.2518255 6  train mse:  1.246466\n",
      "Epoch:  7  train mse:  1.2644758\t valid mse:  1.3962542881023563in mse:  1.20659077  train mse:  1.2615633 7  train mse:  1.2433447 7  train mse:  1.2276022 7  train mse:  1.2311294 7  train mse:  1.2209784 7  train mse:  1.2323617 7  train mse:  1.2467691 7  train mse:  1.2643714 7  train mse:  1.2646602 7  train mse:  1.2667073 7  train mse:  1.2692174 7  train mse:  1.2662324 7  train mse:  1.2706382 7  train mse:  1.2693839 7  train mse:  1.2700326 7  train mse:  1.2730222 7  train mse:  1.2745286\n",
      "Epoch:  8  train mse:  1.2374368\t valid mse:  1.3910620316823206in mse:  1.1903088  train mse:  1.205654 8  train mse:  1.216156 8  train mse:  1.2125479 8  train mse:  1.2218305 train mse:  1.2187122 8  train mse:  1.2184741 8  train mse:  1.2284068 8  train mse:  1.235452 8  train mse:  1.2323244 8  train mse:  1.2368815\n",
      "Epoch:  9  train mse:  1.2484138\t valid mse:  1.3909726239871263in mse:  1.219291 9  train mse:  1.2240188 9  train mse:  1.2664557 1.248319 9  train mse:  1.2350887 train mse:  1.2325171 9  train mse:  1.2415783 9  train mse:  1.2280924 9  train mse:  1.2476841 9  train mse:  1.2411126 9  train mse:  1.2461957 9  train mse:  1.241361 9  train mse:  1.249165 9  train mse:  1.246889 9  train mse:  1.2470623\n",
      "Epoch:  10  train mse:  1.2403822\t valid mse:  1.3981048690063855rain mse:  1.2649089 10  train mse:  1.2818758 10  train mse:  1.2585775 10  train mse:  1.2294741 10  train mse:  1.2141149 10  train mse:  1.2197225 10  train mse:  1.2374362 10  train mse:  1.2496443 10  train mse:  1.2452406 10  train mse:  1.2444823 10  train mse:  1.2426215 10  train mse:  1.2427176 10  train mse:  1.2381551\n",
      "Epoch:  11  train mse:  1.2714667\t valid mse:  1.3934706731790236rain mse:  1.23586 11  train mse:  1.2175745 11  train mse:  1.2317097 11  train mse:  1.2404904 11  train mse:  1.246846 11  train mse:  1.2496034 11  train mse:  1.2355615 11  train mse:  1.233559 11  train mse:  1.2455868 11  train mse:  1.254241 train mse:  1.2626303 11  train mse:  1.2614541 11  train mse:  1.2785499 11  train mse:  1.2715236\n",
      "Epoch:  12  train mse:  1.2621348\t valid mse:  1.3885524589983431rain mse:  1.2276126 12  train mse:  1.2388405 12  train mse:  1.2366142 train mse:  1.2300511.2294518 12  train mse:  1.2245879 12  train mse:  1.2314062 12  train mse:  1.2575313 12  train mse:  1.2635632 12  train mse:  1.2491311 12  train mse:  1.2442726 train mse:  1.258523 12  train mse:  1.2566569 12  train mse:  1.2547548 12  train mse:  1.2512888 12  train mse:  1.2503442\n",
      "Epoch:  13  train mse:  1.2988993\t valid mse:  1.3874706620198822rain mse:  1.2002951  train mse:  1.2426362 13  train mse:  1.3002318 train mse:  1.2847455 13  train mse:  1.2678169 13  train mse:  1.2683928 13  train mse:  1.2669269  train mse:  1.2725852 13  train mse:  1.3048844 13  train mse:  1.3024799 13  train mse:  1.3095868 13  train mse:  1.3012902\n",
      "Epoch:  14  train mse:  1.24318097 14  train mse:  1.2968658 14  train mse:  1.3185694 14  train mse:  1.2895154 14  train mse:  1.2957064 train mse:  1.2679815 14  train mse:  1.26410261.2474678 14  train mse:  1.2592316 1.2547193 14  train mse:  1.2426674 14  train mse:  1.2389696 14  train mse:  1.2470905 14  train mse:  1.2463316 14  train mse:  1.2521983 14  train mse:  1.2432116\t valid mse:  1.3923625145686096\n",
      "Epoch:  15  train mse:  1.2749435\t valid mse:  1.394332687777801train mse:  1.1693984 15  train mse:  1.1947963 15  train mse:  1.2505845 15  train mse:  1.2514606 15  train mse:  1.2790136 15  train mse:  1.2774113 15  train mse:  1.2760469 train mse:  1.2808044 15  train mse:  1.2833942 1.2782868 15  train mse:  1.270808\n",
      "Epoch:  16  train mse:  1.2729672\t valid mse:  1.4011509477997386n mse:  1.247627 16  train mse:  1.2909676 16  train mse:  1.2584476 16  train mse:  1.2572991 16  train mse:  1.2569556 16  train mse:  1.2716585 16  train mse:  1.2687969 16  train mse:  1.2636542 16  train mse:  1.2701914 16  train mse:  1.2622328\n",
      "Epoch:  17  train mse:  1.254804\t valid mse:  1.3908074211921788train mse:  1.2305644 17  train mse:  1.2399062 17  train mse:  1.2511842 17  train mse:  1.2502162 17  train mse:  1.2542849 17  train mse:  1.2547748 17  train mse:  1.2579359 17  train mse:  1.2415942 17  train mse:  1.2550348 17  train mse:  1.258901517  train mse:  1.2611566 17  train mse:  1.2594616 17  train mse:  1.2537642 1.2571962\n",
      "Epoch:  18  train mse:  1.24764\t valid mse:  1.3882098206753544  train mse:  1.2046518 18  train mse:  1.2002915 18  train mse:  1.2229301 18  train mse:  1.2583191 18  train mse:  1.2502317 18  train mse:  1.246181 18  train mse:  1.2707473 18  train mse:  1.2618325 18  train mse:  1.2619392 18  train mse:  1.2668824 18  train mse:  1.2500758 18  train mse:  1.2454567 train mse:  1.250386  train mse:  1.2453306 18  train mse:  1.2429904\n",
      "Epoch:  19  train mse:  1.2807807\t valid mse:  1.3862938870246984rain mse:  1.2221737 19  train mse:  1.2509665 19  train mse:  1.2463511.2554103 19  train mse:  1.2347275 19  train mse:  1.2231086 19  train mse:  1.2342302 19  train mse:  1.2399385 19  train mse:  1.2583327 19  train mse:  1.2617463 19  train mse:  1.266016 19  train mse:  1.2771978 19  train mse:  1.2797014 19  train mse:  1.2785367\n",
      "Epoch:  20  train mse:  1.2518442\t valid mse:  1.3867045883433955rain mse:  1.2742996 20  train mse:  1.2588638 20  train mse:  1.2806613 20  train mse:  1.2983497 20  train mse:  1.2789645 20  train mse:  1.2701628  train mse:  1.253442220  train mse:  1.2646577 20  train mse:  1.2557137 20  train mse:  1.258235620  train mse:  1.2569194 20  train mse:  1.2585084 20  train mse:  1.2508417 20  train mse:  1.2494782\n",
      "Epoch:  21  train mse:  1.23036\t valid mse:  1.3938352966163188 train mse:  1.2368621 21  train mse:  1.2251741 21  train mse:  1.2136295 21  train mse:  1.1956388 train mse:  1.2249297 21  train mse:  1.2403454  train mse:  1.2471412  train mse:  1.2438568 21  train mse:  1.2335374 21  train mse:  1.2375355 21  train mse:  1.2349763 21  train mse:  1.2358996 21  train mse:  1.23708751.2299486 21  train mse:  1.230265\n",
      "Epoch:  22  train mse:  1.278905\t valid mse:  1.394707458053273 train mse:  1.287322 22  train mse:  1.2872769 22  train mse:  1.2750088 22  train mse:  1.2800325 22  train mse:  1.28590761.2870591 22  train mse:  1.2897935 22  train mse:  1.2894592 22  train mse:  1.2879654 22  train mse:  1.2856047\n",
      "Epoch:  23  train mse:  1.2862709\t valid mse:  1.386412598775141454 23  train mse:  1.2554336 23  train mse:  1.248838  train mse:  1.2559929 23  train mse:  1.2546746 23  train mse:  1.2605201 23  train mse:  1.2708433 23  train mse:  1.2813442 23  train mse:  1.2927109\n",
      "Epoch:  24  train mse:  1.2784739\t valid mse:  1.386114831855061train mse:  1.2733145 24  train mse:  1.2518603 24  train mse:  1.2716072 24  train mse:  1.2732437 1.2742788 24  train mse:  1.2882376 24  train mse:  1.290835 24  train mse:  1.2766774  train mse:  1.2817125\n",
      "Epoch:  25  train mse:  1.2656764\t valid mse:  1.38486221646940954644 25  train mse:  1.3507922 25  train mse:  1.3039627 25  train mse:  1.302138 25  train mse:  1.2954346 25  train mse:  1.2833031 train mse:  1.2737825 25  train mse:  1.2729161  train mse:  1.2774323  train mse:  1.2690485 25  train mse:  1.2796961 25  train mse:  1.2649754 25  train mse:  1.2632985 train mse:  1.2695259 25  train mse:  1.2627195\n",
      "Epoch:  26  train mse:  1.232347\t valid mse:  1.390633892485799 train mse:  1.339455 26  train mse:  1.2912936 26  train mse:  1.2703711 26  train mse:  1.2596064 26  train mse:  1.2424624 26  train mse:  1.2392342 26  train mse:  1.2316213 26  train mse:  1.2317986 26  train mse:  1.2337103 26  train mse:  1.2323022 26  train mse:  1.2290221\n",
      "Epoch:  27  train mse:  1.2807308\t valid mse:  1.3889443520523868782 27  train mse:  1.2541611 27  train mse:  1.235234 27  train mse:  1.26808 27  train mse:  1.2746036 27  train mse:  1.2940183 27  train mse:  1.2959421 27  train mse:  1.3082906 27  train mse:  1.2942411 27  train mse:  1.2993673 27  train mse:  1.2887061 27  train mse:  1.2883288\n",
      "Epoch:  28  train mse:  1.261562\t valid mse:  1.393665808750632 train mse:  1.2369066 28  train mse:  1.236308 28  train mse:  1.2226479 28  train mse:  1.2583894 1.2616947 28  train mse:  1.2616621 28  train mse:  1.2670659 28  train mse:  1.271217 28  train mse:  1.2638081 28  train mse:  1.2605586 28  train mse:  1.2542121 28  train mse:  1.2607186\n",
      "Epoch:  29  train mse:  1.2777542\t valid mse:  1.394192467087762train mse:  1.305354 29  train mse:  1.3028747 29  train mse:  1.2843775 29  train mse:  1.3019772 29  train mse:  1.3090457 29  train mse:  1.2943447 29  train mse:  1.2898717 29  train mse:  1.2755182 29  train mse:  1.2825019 29  train mse:  1.2898879 29  train mse:  1.2792033 29  train mse:  1.2628281 29  train mse:  1.2676581 29  train mse:  1.2741089 29  train mse:  1.2790145 29  train mse:  1.2811062\n",
      "Epoch:  30  train mse:  1.2849946\t valid mse:  1.3855838259696558rain mse:  1.2898095 30  train mse:  1.2838789  train mse:  1.2806804 30  train mse:  1.281363 30  train mse:  1.2928518 30  train mse:  1.3025893 30  train mse:  1.2828352 30  train mse:  1.2796886\n",
      "Epoch:  31  train mse:  1.2736295\t valid mse:  1.3851486384190557rain mse:  1.3453559 31  train mse:  1.255799 31  train mse:  1.2441057 train mse:  1.2685586 31  train mse:  1.2752838 31  train mse:  1.2667987 train mse:  1.2764097 31  train mse:  1.2715682 31  train mse:  1.2731756 31  train mse:  1.2561364 1.2685654 31  train mse:  1.2760642  train mse:  1.2740626 31  train mse:  1.2737803 31  train mse:  1.2740152\n",
      "Epoch:  32  train mse:  1.2405376\t valid mse:  1.3892600764219722rain mse:  1.2787799 32  train mse:  1.250758 32  train mse:  1.2579637 32  train mse:  1.2425209 32  train mse:  1.2337486 32  train mse:  1.237176 32  train mse:  1.2442199  train mse:  1.2462744 32  train mse:  1.2351495 32  train mse:  1.2370017 32  train mse:  1.230454 32  train mse:  1.2352486 1.2392751\n",
      "Epoch:  33  train mse:  1.2664438\t valid mse:  1.3878990671899105train mse:  1.167199433  train mse:  1.195475 33  train mse:  1.2499545 33  train mse:  1.2627423 33  train mse:  1.248618 33  train mse:  1.2626518 33  train mse:  1.2613653 33  train mse:  1.251148  train mse:  1.262957\n",
      "Epoch:  34  train mse:  1.2682027\t valid mse:  1.3877841648029703rain mse:  1.260386 34  train mse:  1.26865434  train mse:  1.2588456 34  train mse:  1.2564191 34  train mse:  1.24607 34  train mse:  1.2506559 34  train mse:  1.2547474 34  train mse:  1.2576752 34  train mse:  1.2600456 34  train mse:  1.2596135 34  train mse:  1.2687979 34  train mse:  1.2691464\n",
      "Epoch:  35  train mse:  1.2666296\t valid mse:  1.389497721356931 train mse:  1.2255183 35  train mse:  1.2146554 35  train mse:  1.2025172 35  train mse:  1.2063977  train mse:  1.2029679 35  train mse:  1.2198596 35  train mse:  1.2192618 35  train mse:  1.2395883 train mse:  1.2446473 35  train mse:  1.2547772 35  train mse:  1.2650508 35  train mse:  1.264142\n",
      "Epoch:  36  train mse:  1.2643406\t valid mse:  1.388287187594681train mse:  1.2791607 train mse:  1.2790644 36  train mse:  1.2778451 36  train mse:  1.264174 36  train mse:  1.270521 36  train mse:  1.2819695 36  train mse:  1.2829897 1.2715697 1.2704865 36  train mse:  1.263192 1.25917891.2589226\n",
      "Epoch:  37  train mse:  1.2803274\t valid mse: e:  1.2450917 37  train mse:  1.226157 37  train mse:  1.2440394 37  train mse:  1.2597948 37  train mse:  1.2571859 37  train mse:  1.2535115 37  train mse:  1.2613082 37  train mse:  1.2609081 37  train mse:  1.2614807 37  train mse:  1.2593553 37  train mse:  1.2637354 37  train mse:  1.2674557 37  train mse:  1.2763485 37  train mse:  1.2801906 1.3864790406240577\n",
      "Epoch:  38  train mse:  1.2701461\t valid mse:  1.38494856513134028196 38  train mse:  1.2836297 38  train mse:  1.2920352 38  train mse:  1.3043369 38  train mse:  1.2643316 38  train mse:  1.2847654 38  train mse:  1.281334 38  train mse:  1.2655915 38  train mse:  1.2640835 38  train mse:  1.262839 38  train mse:  1.2627362  train mse:  1.2678328 38  train mse:  1.2519672 train mse:  1.2657757 38  train mse:  1.2649919 1.2686698 1.2711617\n",
      "Epoch:  39  train mse:  1.2712039\t valid mse:  1.3953606469484017train mse:  1.2366375 39  train mse:  1.2541915 39  train mse:  1.243589 39  train mse:  1.2318367 39  train mse:  1.2393574 39  train mse:  1.2394615 39  train mse:  1.2546111 39  train mse:  1.2653137 39  train mse:  1.2599345 39  train mse:  1.2753626 39  train mse:  1.2694329 39  train mse:  1.2737572 39  train mse:  1.272095 39  train mse:  1.2744755 39  train mse:  1.2739229\n",
      "Epoch:  40  train mse:  1.2720176\t valid mse: e:  1.4068953 40  train mse:  1.3644952 40  train mse:  1.3089349 40  train mse:  1.2633525 40  train mse:  1.2357014 40  train mse:  1.241603 40  train mse:  1.2566956 40  train mse:  1.2456592 40  train mse:  1.2506845 40  train mse:  1.2652367 40  train mse:  1.2603658 40  train mse:  1.2557403 40  train mse:  1.2692742 40  train mse:  1.2711084 40  train mse:  1.2777038 40  train mse:  1.2752984 1.3973742251856522\n",
      "Epoch:  41  train mse:  1.2455987\t valid mse:  1.4018222668424623rain mse:  1.2843701 41  train mse:  1.2742958  train mse:  1.2562171 1.2684287 41  train mse:  1.2649386  train mse:  1.2610583 41  train mse:  1.2594955 train mse:  1.264244 41  train mse:  1.2685714 41  train mse:  1.2642162 41  train mse:  1.2632827 41  train mse:  1.2530935\n",
      "Epoch:  42  train mse:  1.2542225\t valid mse:  1.3919992620933999rain mse:  1.2397337  train mse:  1.2578391 42  train mse:  1.261342  train mse:  1.2548511 42  train mse:  1.2508681 42  train mse:  1.2540418 1.2591101 42  train mse:  1.2583222 42  train mse:  1.2627724 42  train mse:  1.2650269 42  train mse:  1.2585934 42  train mse:  1.2632662 42  train mse:  1.2592683 42  train mse:  1.2608969 42  train mse:  1.2558436\n",
      "Epoch:  43  train mse:  1.2528429\t valid mse:  1.3868657763075922rain mse:  1.2947932 43  train mse:  1.3060356 43  train mse:  1.288166 43  train mse:  1.3023807 43  train mse:  1.2940137 43  train mse:  1.2794096 43  train mse:  1.2689217 43  train mse:  1.2583922 train mse:  1.2636515 43  train mse:  1.2662202 43  train mse:  1.2654554 43  train mse:  1.2636912\n",
      "Epoch:  44  train mse:  1.2520688\t valid mse:  1.4005758532937547rain mse:  1.2467989 44  train mse:  1.2934328 44  train mse:  1.25138651.2547607 44  train mse:  1.2671958 44  train mse:  1.2764808 44  train mse:  1.2710493 44  train mse:  1.2655901 44  train mse:  1.2741244 44  train mse:  1.2758968 44  train mse:  1.266561 44  train mse:  1.2690899 44  train mse:  1.2564375 44  train mse:  1.2544392\n",
      "Epoch:  45  train mse:  1.2485548\t valid mse:  1.389326603811487train mse:  1.3849559 45  train mse:  1.3275806 45  train mse:  1.2852068 45  train mse:  1.2749149  train mse:  1.2471657 45  train mse:  1.2548562 45  train mse:  1.2602471.2565061 45  train mse:  1.2543156 45  train mse:  1.2470813 45  train mse:  1.2418722 45  train mse:  1.2420001 45  train mse:  1.2404261 45  train mse:  1.2407736 45  train mse:  1.2464477 1.25315441.2476054\n",
      "Epoch:  46  train mse:  1.247657\t valid mse:  1.3924964347270974train mse:  1.262154746  train mse:  1.2591455 46  train mse:  1.2543013 46  train mse:  1.2758272 46  train mse:  1.2670496 46  train mse:  1.2542269 46  train mse:  1.2523876 46  train mse:  1.2521896\n",
      "Epoch:  47  train mse:  1.2696811\t valid mse:  1.3912669525855803train mse:  1.2718228 47  train mse:  1.2602929 1.2758646 47  train mse:  1.2706 47  train mse:  1.2610747 47  train mse:  1.26507 47  train mse:  1.2768072 47  train mse:  1.2672892 47  train mse:  1.2731115 47  train mse:  1.2697767 47  train mse:  1.268225 47  train mse:  1.2716022 47  train mse:  1.2686564  train mse:  1.2692715 47  train mse:  1.2713042\n",
      "Epoch:  48  train mse:  1.277597\t valid mse:  1.3860167355659718train mse:  1.3071208 48  train mse:  1.3111573 48  train mse:  1.2952198 48  train mse:  1.2887096 48  train mse:  1.2786968 48  train mse:  1.2608773 48  train mse:  1.2578077 48  train mse:  1.269141 48  train mse:  1.263033 48  train mse:  1.2604787 48  train mse:  1.2727292 48  train mse:  1.2732202 48  train mse:  1.274962 48  train mse:  1.2772322\n",
      "Epoch:  49  train mse:  1.315394\t valid mse:  1.3907855245094647 train mse:  1.3080093 49  train mse:  1.2872818 49  train mse:  1.2928541 49  train mse:  1.3075758 49  train mse:  1.3077774 1.3121158 49  train mse:  1.3114209 49  train mse:  1.3045408 1.3072585 49  train mse:  1.3022109 49  train mse:  1.3022838 1.3138486\n",
      "Epoch:  50  train mse:  1.277739\t valid mse:  1.385116571284634 train mse:  1.3278204 50  train mse:  1.32408671.2953056 50  train mse:  1.2923834 50  train mse:  1.295921 50  train mse:  1.2776299 50  train mse:  1.276821750  train mse:  1.2669767 50  train mse:  1.2681893 50  train mse:  1.2615297 50  train mse:  1.2746208 50  train mse:  1.2707558 50  train mse:  1.2705803 50  train mse:  1.2791837\n",
      "Epoch:  51  train mse:  1.2725255\t valid mse:  1.3840383092595232rain mse:  1.3074164 train mse:  1.2676363 51  train mse:  1.2767053 51  train mse:  1.2696465 51  train mse:  1.2762089 51  train mse:  1.2737627 51  train mse:  1.2799397 51  train mse:  1.2720808 51  train mse:  1.272138 51  train mse:  1.2637624 51  train mse:  1.2674594 51  train mse:  1.2716111 51  train mse:  1.273432\n",
      "Epoch:  52  train mse:  1.2673597\t valid mse:  1.3851315639239898in mse:  1.1787839  train mse:  1.192782 52  train mse:  1.2227051 52  train mse:  1.2394094 52  train mse:  1.2506167 52  train mse:  1.2658492 train mse:  1.2596841 52  train mse:  1.26137291.26585871.2684861 52  train mse:  1.2655661 52  train mse:  1.262751 52  train mse:  1.2654896 52  train mse:  1.2634665 52  train mse:  1.2673078\n",
      "Epoch:  53  train mse:  1.2314934\t valid mse:  1.389153782233598 train mse:  1.1053163 53  train mse:  1.2075067 53  train mse:  1.228102 53  train mse:  1.21462 53  train mse:  1.2266719 53  train mse:  1.2343926 53  train mse:  1.2323148  train mse:  1.22389331.2183446 53  train mse:  1.2261102 53  train mse:  1.2338157 53  train mse:  1.223789 53  train mse:  1.2342186 53  train mse:  1.2355682\n",
      "Epoch:  54  train mse:  1.23971741.4136816 54  train mse:  1.3016272 54  train mse:  1.2874017 54  train mse:  1.2666783 54  train mse:  1.273056 54  train mse:  1.2808503 54  train mse:  1.2805331 54  train mse:  1.2608278 54  train mse:  1.2511598  train mse:  1.2358195 54  train mse:  1.2312667 54  train mse:  1.2184874 54  train mse:  1.2231764 54  train mse:  1.2307082 54  train mse:  1.2349492 54  train mse:  1.2381991 54  train mse:  1.2315294 54  train mse:  1.2371457 54  train mse:  1.2407221\t valid mse:  1.3840440465190191\n",
      "Epoch:  55  train mse:  1.2590227\t valid mse:  1.3855458795787983rain mse:  1.3158865 55  train mse:  1.2721351 55  train mse:  1.266219 55  train mse:  1.2358211 55  train mse:  1.2505888 55  train mse:  1.2380357 55  train mse:  1.242159 55  train mse:  1.2642651 55  train mse:  1.2631644 55  train mse:  1.2635918 55  train mse:  1.256895 55  train mse:  1.2606862\n",
      "Epoch:  56  train mse:  1.2354854\t valid mse:  1.3926787231197586rain mse:  1.2455356 56  train mse:  1.2215493 56  train mse:  1.2138245 56  train mse:  1.1933135 56  train mse:  1.1942937 56  train mse:  1.1845233 56  train mse:  1.1925955 56  train mse:  1.1885165 56  train mse:  1.2006155 56  train mse:  1.24091951.2377307\n",
      "Epoch:  57  train mse:  1.2421851 57  train mse:  1.3164233 57  train mse:  1.2679714 57  train mse:  1.2730229 57  train mse:  1.2710624 57  train mse:  1.246934 57  train mse:  1.2640842 57  train mse:  1.2630314 57  train mse:  1.2721505 57  train mse:  1.2661105 57  train mse:  1.2578362 57  train mse:  1.2507492 57  train mse:  1.2487504 57  train mse:  1.247793457  train mse:  1.2431763 57  train mse:  1.249791\t valid mse:  1.3884061545301345\n",
      "Epoch:  58  train mse:  1.2306287\t valid mse:  1.389323240955813train mse:  1.2487333 58  train mse:  1.1906028 58  train mse:  1.2137368 58  train mse:  1.2372688 58  train mse:  1.2246587 58  train mse:  1.2021719 58  train mse:  1.2133174 58  train mse:  1.224522 58  train mse:  1.21631 58  train mse:  1.217531.2165804 58  train mse:  1.2218163 58  train mse:  1.2214292 train mse:  1.225515 58  train mse:  1.230024 58  train mse:  1.2284086 58  train mse:  1.2301644 58  train mse:  1.2317061 58  train mse:  1.2313757 58  train mse:  1.2316103\n",
      "Epoch:  59  train mse:  1.2890894\t valid mse:  1.3870446366401816in mse:  1.3748416 59  train mse:  1.3230393 59  train mse:  1.3543624 59  train mse:  1.3322296 59  train mse:  1.3374566 59  train mse:  1.323582 59  train mse:  1.3305984 59  train mse:  1.321382 59  train mse:  1.3077189 59  train mse:  1.3134621 59  train mse:  1.31202331.3076378 59  train mse:  1.2969981 59  train mse:  1.2884461\n",
      "Epoch:  60  train mse:  1.2773739\t valid mse:  1.3918747706200745rain mse:  1.2693278 60  train mse:  1.3090669 60  train mse:  1.3045286 60  train mse:  1.3027918 60  train mse:  1.2906618 60  train mse:  1.2800876 60  train mse:  1.2950875 60  train mse:  1.2970772 60  train mse:  1.3054659 60  train mse:  1.2943672  train mse:  1.2839712 60  train mse:  1.290098 60  train mse:  1.2811135\n",
      "Epoch:  61  train mse:  1.2608835\t valid mse:  1.3965170763137098rain mse:  1.2926557 61  train mse:  1.3238937 61  train mse:  1.3096519 61  train mse:  1.3013575 61  train mse:  1.3068471 61  train mse:  1.312663 61  train mse:  1.2972262 train mse:  1.2939086 1.2903702 61  train mse:  1.2915897 61  train mse:  1.2816594 61  train mse:  1.2789266 61  train mse:  1.2698632 61  train mse:  1.2719053  train mse:  1.2695496 61  train mse:  1.2677327\n",
      "Epoch:  62  train mse:  1.2663777\t valid mse:  1.3845330613139666n mse:  1.2534153 62  train mse:  1.2417682 62  train mse:  1.2318419 62  train mse:  1.2265197 62  train mse:  1.2319608 62  train mse:  1.2466788 1.2380931 62  train mse:  1.2558346 62  train mse:  1.2653333\n",
      "Epoch:  63  train mse:  1.2584889\t valid mse:  1.3857243752233208rain mse:  1.3272687 63  train mse:  1.3186722 63  train mse:  1.2874442 63  train mse:  1.262834 63  train mse:  1.2569151 63  train mse:  1.242881 63  train mse:  1.24440821.2460206 63  train mse:  1.2508823 63  train mse:  1.2492864 63  train mse:  1.2558334 63  train mse:  1.261935\n",
      "Epoch:  64  train mse:  1.2568212\t valid mse:  1.3858744295034124ain mse:  1.2261915 64  train mse:  1.2498868 64  train mse:  1.2241378 1.2305102 64  train mse:  1.2439929 64  train mse:  1.2311441 64  train mse:  1.2410369  train mse:  1.2456225 64  train mse:  1.2475994 64  train mse:  1.2431233 64  train mse:  1.2343382 64  train mse:  1.2520624 64  train mse:  1.25393 64  train mse:  1.2597032 64  train mse:  1.2603612\n",
      "Epoch:  65  train mse:  1.2761077\t valid mse:  1.39000840430869893332 65  train mse:  1.3246185 65  train mse:  1.3134761 65  train mse:  1.283395 65  train mse:  1.2815427 65  train mse:  1.2940527 65  train mse:  1.3057337 65  train mse:  1.2945914 65  train mse:  1.2934431 65  train mse:  1.2961028 65  train mse:  1.2926931 65  train mse:  1.2937064 65  train mse:  1.2967496 65  train mse:  1.2888286 65  train mse:  1.2777413 65  train mse:  1.2756077\n",
      "Epoch:  66  train mse:  1.28221326 1.0612139 66  train mse:  1.2256787 66  train mse:  1.2192631 66  train mse:  1.2285676 66  train mse:  1.2456416 66  train mse:  1.2600236 66  train mse:  1.2554313 66  train mse:  1.2525241 66  train mse:  1.2454509 66  train mse:  1.2535374 66  train mse:  1.2552425 66  train mse:  1.25706721.2555239 66  train mse:  1.2576703 66  train mse:  1.2675087 66  train mse:  1.2683753 66  train mse:  1.2750784 66  train mse:  1.2755266 66  train mse:  1.2807834\t valid mse:  1.3828852001557679\n",
      "Epoch:  67  train mse:  1.263731\t valid mse:  1.3841789081972715train mse:  1.2997357 67  train mse:  1.2935661.3071097 67  train mse:  1.2878741 67  train mse:  1.2858714 67  train mse:  1.2947297  train mse:  1.3077105 67  train mse:  1.2854534 67  train mse:  1.2853516 67  train mse:  1.2781106 67  train mse:  1.270549 67  train mse:  1.2650462 67  train mse:  1.263658\n",
      "Epoch:  68  train mse:  1.2497543\t valid mse:  1.398062348415596in mse:  1.2265517 68  train mse:  1.2340173 68  train mse:  1.2505964 68  train mse:  1.2481502 68  train mse:  1.2328227 68  train mse:  1.23863 68  train mse:  1.2369255 68  train mse:  1.2441536 68  train mse:  1.2421117 68  train mse:  1.2404637 1.242731 68  train mse:  1.250189468  train mse:  1.2549887 68  train mse:  1.2548736 68  train mse:  1.2533808\n",
      "Epoch:  69  train mse:  1.2669847\t valid mse:  1.3845766822946999rain mse:  1.2831025 69  train mse:  1.2654955 69  train mse:  1.261907 69  train mse:  1.2619945 69  train mse:  1.2601942 69  train mse:  1.2440144  train mse:  1.2459468 69  train mse:  1.2490875 69  train mse:  1.2615874 69  train mse:  1.268319\n",
      "Epoch:  70  train mse:  1.2578108\t valid mse:  1.3840532639098833ain mse:  1.2304096 70  train mse:  1.2252408 70  train mse:  1.2442461 70  train mse:  1.2536352 70  train mse:  1.2561744 70  train mse:  1.2674816 70  train mse:  1.2618492 70  train mse:  1.2663089 70  train mse:  1.2543966 70  train mse:  1.2534589 70  train mse:  1.2548814 70  train mse:  1.2563071 70  train mse:  1.2524674\n",
      "Epoch:  71  train mse:  1.2807814\t valid mse:  1.3914300146261276rain mse:  1.2596126 71  train mse:  1.2963166 71  train mse:  1.3009048 71  train mse:  1.2988453 71  train mse:  1.3033162 71  train mse:  1.2872528 71  train mse:  1.277696 71  train mse:  1.2688185  train mse:  1.2596713 71  train mse:  1.2704353 train mse:  1.279409\n",
      "Epoch:  72  train mse:  1.2496319\t valid mse:   train mse:  1.229895 72  train mse:  1.2048416  train mse:  1.2242669 72  train mse:  1.2349561 72  train mse:  1.2515122 72  train mse:  1.2671511 72  train mse:  1.2653944 72  train mse:  1.2457652 72  train mse:  1.2561903 72  train mse:  1.2564135 72  train mse:  1.2564886 72  train mse:  1.2576984 72  train mse:  1.2539597 1.3864715604736986\n",
      "Epoch:  73  train mse:  1.2609525\t valid mse:  1.3840321306255274in mse:  1.3554609 73  train mse:  1.2759622 73  train mse:  1.2563714 73  train mse:  1.2527983 73  train mse:  1.2594925 73  train mse:  1.2576311 73  train mse:  1.2446516 73  train mse:  1.24171 73  train mse:  1.2540623 73  train mse:  1.2497042 73  train mse:  1.2517132 73  train mse:  1.2529867 73  train mse:  1.2509059 73  train mse:  1.2581412\n",
      "Epoch:  74  train mse:  1.284272\t valid mse:  1.3883838202478136train mse:  1.2956865 1.293445 74  train mse:  1.2972494 74  train mse:  1.2996938 74  train mse:  1.2990241 74  train mse:  1.2915609 74  train mse:  1.2810402 1.2857238 74  train mse:  1.2826132 74  train mse:  1.2880508 74  train mse:  1.285036 74  train mse:  1.2786944 74  train mse:  1.2796644\n",
      "Epoch:  75  train mse:  1.2930036 75  train mse:  1.440375 75  train mse:  1.3010393 75  train mse:  1.2819961 75  train mse:  1.3002161 1.2781899 75  train mse:  1.3019884 75  train mse:  1.3035514 75  train mse:  1.2973006 75  train mse:  1.2923244 75  train mse:  1.2984896  train mse:  1.2921733 75  train mse:  1.2979039 75  train mse:  1.2922641 75  train mse:  1.2947176 75  train mse:  1.2945739 75  train mse:  1.2927879\t valid mse:  1.3890405018989913\n",
      "Epoch:  76  train mse:  1.2545391\t valid mse:  1.3862094962711973rain mse:  1.2902962 76  train mse:  1.2885088 76  train mse:  1.3091271 76  train mse:  1.2854501 76  train mse:  1.2832515 76  train mse:  1.2555962 76  train mse:  1.2565423 76  train mse:  1.2668347 76  train mse:  1.2431171 76  train mse:  1.2434065 76  train mse:  1.246525 76  train mse:  1.2537885 76  train mse:  1.2495917 76  train mse:  1.251414\n",
      "Epoch:  77  train mse:  1.2594986 77  train mse:  1.3053193 77  train mse:  1.2716442 77  train mse:  1.2629433 77  train mse:  1.2733122 77  train mse:  1.2681195 77  train mse:  1.2695261 77  train mse:  1.2670869 77  train mse:  1.261426 77  train mse:  1.2609868 77  train mse:  1.2583883 77  train mse:  1.2584878 77  train mse:  1.2593523\t valid mse:  1.3888911291073167\n",
      "Epoch:  78  train mse:  1.2439928\t valid mse:  1.3867572556617491n mse:  1.1113632 78  train mse:  1.205204 78  train mse:  1.1937834 78  train mse:  1.20367 78  train mse:  1.2129977 78  train mse:  1.2396302 78  train mse:  1.2431465 78  train mse:  1.2542223 78  train mse:  1.2542976 78  train mse:  1.2552255 78  train mse:  1.2537454 78  train mse:  1.2508074 1.2408267\n",
      "Epoch:  79  train mse:  1.2620304\t valid mse:  1.3865228693290061rain mse:  1.4062387 79  train mse:  1.273469 1.2752303 79  train mse:  1.2859459 79  train mse:  1.2544243 79  train mse:  1.255844 79  train mse:  1.2548766 79  train mse:  1.2510684 79  train mse:  1.248852 79  train mse:  1.2396464 79  train mse:  1.245217 79  train mse:  1.2469327 79  train mse:  1.2486339 79  train mse:  1.2482244 79  train mse:  1.2443109 79  train mse:  1.2494037 79  train mse:  1.26059721.2600709\n",
      "Epoch:  80  train mse:  1.2648418\t valid mse:  1.385681612326904train mse:  1.1744938 train mse:  1.2088088 80  train mse:  1.2085767 80  train mse:  1.2267591 80  train mse:  1.248037 80  train mse:  1.2538272 80  train mse:  1.2612051 80  train mse:  1.2634225 train mse:  1.2655177  train mse:  1.2656262 80  train mse:  1.2586904 80  train mse:  1.264048 80  train mse:  1.2664802 80  train mse:  1.2582284 train mse:  1.2560421\n",
      "Epoch:  81  train mse:  1.2825264\t valid mse:  1.3844279048945165rain mse:  1.2666645 81  train mse:  1.2668037 81  train mse:  1.2581261 81  train mse:  1.27419951.277812 81  train mse:  1.2733377 81  train mse:  1.2804308 81  train mse:  1.2735696 81  train mse:  1.2771254 81  train mse:  1.2790625 81  train mse:  1.2813137 81  train mse:  1.2844608 81  train mse:  1.2831556\n",
      "Epoch:  82  train mse:  1.2743794\t valid mse: :  1.200115 82  train mse:  1.234659 82  train mse:  1.2623338 82  train mse:  1.2627207 82  train mse:  1.2663515 82  train mse:  1.27795521.266382 82  train mse:  1.2811526 82  train mse:  1.2812188 82  train mse:  1.2810972 82  train mse:  1.2699726 82  train mse:  1.2681136 82  train mse:  1.2691458 82  train mse:  1.2669708 82  train mse:  1.27066 1.3832499152397837\n",
      "Epoch:  83  train mse:  1.2564595\t valid mse:  1.3928828205571293rain mse:  1.255546 83  train mse:  1.2379992 83  train mse:  1.2483695 83  train mse:  1.2461464 83  train mse:  1.244107 83  train mse:  1.2504632 83  train mse:  1.25174061.2589862 83  train mse:  1.2586114 83  train mse:  1.26178291.2578522 83  train mse:  1.2601625\n",
      "Epoch:  84  train mse:  1.2714021\t valid mse:  1.3940999359453587rain mse:  1.3655813 84  train mse:  1.303229 84  train mse:  1.2920457 84  train mse:  1.2823128 84  train mse:  1.2796062 84  train mse:  1.2765235 84  train mse:  1.2570342 84  train mse:  1.2620695 84  train mse:  1.2580752  train mse:  1.2639074 84  train mse:  1.2715539 84  train mse:  1.275425384  train mse:  1.2691797 84  train mse:  1.2779378 84  train mse:  1.2784988 84  train mse:  1.2810781 84  train mse:  1.2717307\n",
      "Epoch:  85  train mse:  1.2149528 85  train mse:  1.299056 85  train mse:  1.2174809 85  train mse:  1.2229352 85  train mse:  1.2126898 85  train mse:  1.23504191.2306504 85  train mse:  1.2260746 85  train mse:  1.2231576  train mse:  1.2183852 85  train mse:  1.2114367 85  train mse:  1.2087221 85  train mse:  1.211037 85  train mse:  1.2134465 85  train mse:  1.2130077 85  train mse:  1.2156051\t valid mse:  1.3855894789795709\n",
      "Epoch:  86  train mse:  1.2754855\t valid mse:  1.3857081723858073rain mse:  1.1792606 86  train mse:  1.1597147 86  train mse:  1.184906 86  train mse:  1.2132235 86  train mse:  1.2265806 86  train mse:  1.2291728 86  train mse:  1.2581848 86  train mse:  1.2518663 86  train mse:  1.2577472 86  train mse:  1.2589967 86  train mse:  1.2641222 86  train mse:  1.27792541.2736176 86  train mse:  1.2677312 86  train mse:  1.276167\n",
      "Epoch:  87  train mse:  1.2938083\t valid mse:  1.3876383983248828train mse:  1.2890385 87  train mse:  1.3125887 87  train mse:  1.3150816 87  train mse:  1.3164331 87  train mse:  1.346321 87  train mse:  1.3368071 87  train mse:  1.3136276 87  train mse:  1.301964 87  train mse:  1.2921369 87  train mse:  1.28885 87  train mse:  1.2827842 87  train mse:  1.290471 87  train mse:  1.2980576 87  train mse:  1.2980416 87  train mse:  1.2974141 87  train mse:  1.2950997 87  train mse:  1.291962987  train mse:  1.2935758\n",
      "Epoch:  88  train mse:  1.2615207\t valid mse:  1.384858650727616n mse:  1.293645 88  train mse:  1.2585541 88  train mse:  1.2325612 88  train mse:  1.2199111 88  train mse:  1.2178736 88  train mse:  1.2416362 88  train mse:  1.2428281 88  train mse:  1.2387234 88  train mse:  1.241306488  train mse:  1.254453 88  train mse:  1.2576739 88  train mse:  1.261796 88  train mse:  1.254486 88  train mse:  1.2566838\n",
      "Epoch:  89  train mse:  1.2563976\t valid mse:  1.386542056285168train mse:  1.3050382 89  train mse:  1.2798206 89  train mse:  1.2894988 89  train mse:  1.2931471 89  train mse:  1.2853609 89  train mse:  1.2703724 89  train mse:  1.2629675 89  train mse:  1.26434 89  train mse:  1.2656883 89  train mse:  1.2626146 89  train mse:  1.2585413 89  train mse:  1.2664349 89  train mse:  1.2648363  train mse:  1.2587337 train mse:  1.2575818\n",
      "Epoch:  90  train mse:  1.2519182\t valid mse:  1.392867933614601train mse:  1.3379272 90  train mse:  1.2825477 90  train mse:  1.2455953 90  train mse:  1.2398877 90  train mse:  1.2397226 90  train mse:  1.2415414 90  train mse:  1.247689890  train mse:  1.262876 90  train mse:  1.2585524 90  train mse:  1.2612942 90  train mse:  1.265127  train mse:  1.2525172\n",
      "Epoch:  91  train mse:  1.2309684\t valid mse: se:  1.2210596 91  train mse:  1.245893 91  train mse:  1.2569417 91  train mse:  1.2412893 91  train mse:  1.2431504 91  train mse:  1.245124 91  train mse:  1.2494836 91  train mse:  1.2543893 91  train mse:  1.250855691  train mse:  1.2475778 91  train mse:  1.2415874 91  train mse:  1.2267289 91  train mse:  1.224774 91  train mse:  1.2339653 91  train mse:  1.2324145 1.3845948404773034\n",
      "Epoch:  92  train mse:  1.2762161\t valid mse:  1.384831676451073 train mse:  1.2769233 92  train mse:  1.2938082 train mse:  1.3001761 92  train mse:  1.2825977 92  train mse:  1.2832537 92  train mse:  1.2685013 92  train mse:  1.2630858 92  train mse:  1.2623003 92  train mse:  1.2761616 92  train mse:  1.2710594 92  train mse:  1.2708404\n",
      "Epoch:  93  train mse:  1.2748473\t valid mse:  1.39151467716367733601 93  train mse:  1.2565867 93  train mse:  1.2441798 train mse:  1.2692171 93  train mse:  1.2747113 93  train mse:  1.2834796 93  train mse:  1.2829664 93  train mse:  1.27516421.2830805 93  train mse:  1.2833927 93  train mse:  1.2763301\n",
      "Epoch:  94  train mse:  1.2704363\t valid mse:  1.3843736148721708rain mse:  1.3320642 94  train mse:  1.3464698 94  train mse:  1.3200327 94  train mse:  1.3166851.2946087 94  train mse:  1.3177783 94  train mse:  1.3014063 94  train mse:  1.2886713 94  train mse:  1.2878019 94  train mse:  1.29350394  train mse:  1.2780977 94  train mse:  1.2682399 94  train mse:  1.26997511.2768344 94  train mse:  1.2676798 94  train mse:  1.2640617 94  train mse:  1.269569\n",
      "Epoch:  95  train mse:  1.2633982\t valid mse:  1.38457909768464937932 95  train mse:  1.2426333 95  train mse:  1.2644686 95  train mse:  1.2689179 95  train mse:  1.275800595  train mse:  1.2674503 95  train mse:  1.2587265 95  train mse:  1.2422538 95  train mse:  1.2551943 95  train mse:  1.2528874 95  train mse:  1.248601 95  train mse:  1.2494571 95  train mse:  1.2546077 95  train mse:  1.2581043 95  train mse:  1.2597144 95  train mse:  1.2556328 95  train mse:  1.2615184\n",
      "Epoch:  96  train mse:  1.2704934\t valid mse:  1.3904205433842578rain mse:  1.287748 96  train mse:  1.2993976 96  train mse:  1.2872379 96  train mse:  1.2645403  train mse:  1.2445762 96  train mse:  1.2611718 96  train mse:  1.2646252 96  train mse:  1.2640359 96  train mse:  1.2558166 96  train mse:  1.2767121 96  train mse:  1.2751547 96  train mse:  1.2787173 96  train mse:  1.2685047 96  train mse:  1.2711679 96  train mse:  1.270298\n",
      "Epoch:  97  train mse:  1.2461985\t valid mse:  1.3896000090263492rain mse:  1.4330664 train mse:  1.3083113 97  train mse:  1.2809843 97  train mse:  1.2637438 1.2592859 97  train mse:  1.2493303 97  train mse:  1.25874 97  train mse:  1.2660246 97  train mse:  1.2809808 97  train mse:  1.2743845 97  train mse:  1.2563809 1.2458478\n",
      "Epoch:  98  train mse:  1.2647783\t valid mse:  1.3866280602895464rain mse:  1.3027849 98  train mse:  1.3334006  train mse:  1.2962738 98  train mse:  1.2909973 98  train mse:  1.2913388 98  train mse:  1.2963485 98  train mse:  1.2879238 98  train mse:  1.2989813 98  train mse:  1.2941352 98  train mse:  1.28847311.2850174 98  train mse:  1.2721207 98  train mse:  1.2713734 98  train mse:  1.2702049\n",
      "Epoch:  99  train mse:  1.2530388\t valid mse:  1.3848635770107331rain mse:  1.2273322 99  train mse:  1.2320684 99  train mse:  1.2193549 99  train mse:  1.2072914 99  train mse:  1.2176608 99  train mse:  1.2187532 99  train mse:  1.2299132 99  train mse:  1.2338086 99  train mse:  1.2367055 99  train mse:  1.2341977 99  train mse:  1.2322551 99  train mse:  1.2485226 99  train mse:  1.2469602 99  train mse:  1.246279\n"
     ]
    }
   ],
   "source": [
    "# 模型训练，fit函数实现\n",
    "for epoch in range(epochs):\n",
    "    metric.reset_states() # 每轮清空metric\n",
    "    # 1.遍历训练集，得到 metric\n",
    "    for step in range(steps_per_epoch):\n",
    "        x_batch, y_batch = random_batch(x_train_scaled, y_train, batch_size) # 手动取数据\n",
    "        # 打开tape\n",
    "        with tf.GradientTape() as tape:\n",
    "            y_pred = model(x_batch) # 得到预测值\n",
    "            loss = tf.reduce_mean(\n",
    "                keras.losses.mean_squared_error(y_batch, y_pred)) # 得到loss值\n",
    "            metric(y_batch, y_pred) # 累积计算metric\n",
    "        grads = tape.gradient(loss, model.variables) # 手动求梯度\n",
    "        grads_and_vars = zip(grads, model.variables) # 将求得的梯度和模型变量绑定\n",
    "        optimizer.apply_gradients(grads_and_vars) # apply到optimizer上\n",
    "    print('\\rEpoch:', epoch, ' train mse:', metric.result().numpy(), end='')\n",
    "    # 2.一个epoch结束后，在验证集上进行验证\n",
    "    y_valid_pred = model(x_valid_scaled)\n",
    "    valid_loss = tf.reduce_mean(\n",
    "        keras.losses.mean_squared_error(y_valid_pred, y_valid)) # 得到验证集loss值\n",
    "    print('\\t valid mse:', valid_loss.numpy())"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.10"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}
