{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Using TensorFlow backend.\n"
     ]
    }
   ],
   "source": [
    "import warnings\n",
    "warnings.filterwarnings(\"ignore\")\n",
    "import numpy as np\n",
    "import pandas as pd #缩写\n",
    "import os\n",
    "import math\n",
    "from keras.models import Sequential #建立一个序贯型模型：理解上更倾向于解决问题的思路\n",
    "from keras.layers import Dense#全连接网络\n",
    "from keras.layers import LSTM #时间序列神经网络  RNN-gru lstm rnn \n",
    "from keras.layers import Dropout #休眠神经网络\n",
    "from keras.callbacks import EarlyStopping\n",
    "from sklearn.preprocessing import MinMaxScaler  #scikit-learn\n",
    "from sklearn.metrics import mean_squared_error #均方误差\n",
    "from keras.models import load_model #加载模型函数\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#create_data_set 的举例，用20个样本举例得到训练数据集的过程\n",
    "# 1\n",
    "# 2\n",
    "# 3\n",
    "# 4\n",
    "# 5\n",
    "# 6\n",
    "# 7\n",
    "# 8\n",
    "# 9\n",
    "# 10\n",
    "# 11\n",
    "# 12\n",
    "# 12\n",
    "# 13\n",
    "# 45\n",
    "# 58\n",
    "# 41\n",
    "# 42\n",
    "# 52\n",
    "# 12\n",
    "\n",
    "#  14 i=0~13  20-5-2 = 13\n",
    "# 12345 67\n",
    "# 23456 78\n",
    "# 34567 89\n",
    "#  。。"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "class RNNModel(object):\n",
    "    def __init__(self,df,look_back=1, pred_length=1, epochs=20, batch_size=1, verbose=2, patience=10,model_name='best_model.h5',scaler_name='s.pkl'):\n",
    "        self.look_back = look_back  #训练数据的项数\n",
    "        self.pred_length = pred_length #预测数据的项数\n",
    "        self.epochs = epochs #训练的次数，适当就好，不是越多越好，一般在50-100即可了,数据达到收敛\n",
    "        self.batch_size = batch_size #批次的样本数，即每单次训练用多少组训练数据  总训练次数 = 样本数(N)/批次数(batch_size)*训练次数(epochs)\n",
    "        self.verbose = verbose #显示训练过程 0 是不显示，1是显示查看 log\n",
    "        self.patience = patience #提前停止训练条件的损失数量，若该值为10，意味着模型的损失10次不变，就可以停止训练了  损失值收敛\n",
    "        self.stock = df #输入的数据\n",
    "        self.model_name = model_name #训练后的模型名字\n",
    "        self.scaler_name = scaler_name #数据处理后的处理函数名字\n",
    "\n",
    "    #加载数据，并且做归一化处理     data_frame是要训练的数据，eg:temp_df.csv\n",
    "    def access_data(self, data_frame):\n",
    "        #加载数据\n",
    "        data_set = data_frame.values #第一列的数据[[2],[3],[4]...]\n",
    "        data_set = data_set.astype('float32')\n",
    "        print(data_set.shape) #输出dataset的大小\n",
    "        #归一化处理，归一化到0-1之间\n",
    "        scaler = MinMaxScaler(feature_range=(0, 1)) \n",
    "        data_set = scaler.fit_transform(data_set)\n",
    "        # reshape into X=t and Y=t+1，得到训练和测试数据\n",
    "        train_x, train_y, test = self.create_data_set(data_set)\n",
    "        # reshape input to be [samples, time steps, features] 将样本转化成可以输入到lstm中的格式\n",
    "        #[[1,2,3,4,5]] 1*5-->1*1*5[[[1,2,3,4,5]],[[2,3,4,5,6]]] [[6],[7]]\n",
    "        train_x = np.reshape(train_x, (train_x.shape[0], 1, train_x.shape[1]))\n",
    "        return train_x, train_y, test, scaler #训练数据集、训练结果集、测试数据集、归一化函数\n",
    "\n",
    "    # 数据分割，将数据分成训练集和测试集\n",
    "    def create_data_set(self, data_set):\n",
    "        data_x, data_y = [], []\n",
    "        for i in range(len(data_set)-self.look_back - self.pred_length):\n",
    "            a = data_set[i:(i + self.look_back), 0]\n",
    "            data_x.append(a)\n",
    "            data_y.append(list(data_set[i + self.look_back: i + self.look_back + self.pred_length, 0]))\n",
    "        print(data_x[:2])\n",
    "        print('*********************')\n",
    "        print(data_y[:2])\n",
    "        print('*********************')\n",
    "        print( 'test:',data_set[-self.look_back:, 0].reshape(1, 1, self.look_back))\n",
    "        return np.array(data_x), np.array(data_y), data_set[-self.look_back:, 0].reshape(1, 1, self.look_back)\n",
    "    #构建rnn模型，运用双层的rnn模型用来训练模型\n",
    "    #默认的阈值设置：样本数0-1w  一层lstm+参数 32\n",
    "    #样本数2-10w  两层lstm+参数 64+32\n",
    "    #样本数10w-30w  两层lstm+参数 64+64\n",
    "    def rnn_model(self, train_x, train_y,te_x,te_y, epochs):\n",
    "        model = Sequential()\n",
    "        model.add(LSTM(64, input_shape=(1, self.look_back), return_sequences=True))#30w的数据集需要把里面的参数扩大2倍\n",
    "        model.add(LSTM(32, return_sequences=False))#30w的数据集需要把里面的参数扩大2倍\n",
    "        model.add(Dense(16))#30w的数据集需要把里面的参数扩大2倍\n",
    "        model.add(Dense(self.pred_length))\n",
    "        model.compile(loss='mean_squared_error', optimizer='adam',metrics=['mae','mse']) #绝对值平均误差 均方误差\n",
    "#         model.summary()\n",
    "        early_stopping = EarlyStopping('loss', patience=self.patience)\n",
    "        history = model.fit(train_x, train_y,validation_data=(te_x, te_y), epochs=epochs, batch_size=self.batch_size, verbose=self.verbose, callbacks=[early_stopping])\n",
    "        return model,history\n",
    "    #对样本进行预测，得到预测结果\n",
    "    def predict(self, model, data):\n",
    "        prediction = model.predict(data)\n",
    "        return prediction\n",
    "    #开始跑模型，得到模型结果和最终预测结果\n",
    "    def run(self):\n",
    "        train_x, train_y, test, scaler = self.access_data(self.stock)\n",
    "        tr_x,te_x,tr_y,te_y = train_x[:10000],train_x[10000:],train_y[:10000],train_y[10000:]\n",
    "        model,history = self.rnn_model(tr_x, tr_y,te_x,te_y,self.epochs)\n",
    "        #保存模型和scaler\n",
    "        model.save(self.model_name)\n",
    "        import pickle\n",
    "        with open(self.scaler_name, 'wb') as fw:\n",
    "            pickle.dump(scaler, fw)\n",
    "        predict = self.predict(model, test)\n",
    "        #得到未来预测的数据\n",
    "        stock = scaler.inverse_transform(predict).reshape(self.pred_length, 1)\n",
    "        print('test predict:',stock)\n",
    "        return np.squeeze(stock) #输出的结果"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "ename": "FileNotFoundError",
     "evalue": "File b'humd_df.csv' does not exist",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mFileNotFoundError\u001b[0m                         Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-5-c1edd5031ce6>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[0;31m#查看下关于湿度的数据\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      2\u001b[0m \u001b[0mp\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34mr'humd_df.csv'\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m \u001b[0mdf\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpd\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mread_csv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mp\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      4\u001b[0m \u001b[0mdf\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mhead\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hz/anaconda3/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36mparser_f\u001b[0;34m(filepath_or_buffer, sep, delimiter, header, names, index_col, usecols, squeeze, prefix, mangle_dupe_cols, dtype, engine, converters, true_values, false_values, skipinitialspace, skiprows, nrows, na_values, keep_default_na, na_filter, verbose, skip_blank_lines, parse_dates, infer_datetime_format, keep_date_col, date_parser, dayfirst, iterator, chunksize, compression, thousands, decimal, lineterminator, quotechar, quoting, escapechar, comment, encoding, dialect, tupleize_cols, error_bad_lines, warn_bad_lines, skipfooter, skip_footer, doublequote, delim_whitespace, as_recarray, compact_ints, use_unsigned, low_memory, buffer_lines, memory_map, float_precision)\u001b[0m\n\u001b[1;32m    653\u001b[0m                     skip_blank_lines=skip_blank_lines)\n\u001b[1;32m    654\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 655\u001b[0;31m         \u001b[0;32mreturn\u001b[0m \u001b[0m_read\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfilepath_or_buffer\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    656\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    657\u001b[0m     \u001b[0mparser_f\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m__name__\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mname\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hz/anaconda3/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36m_read\u001b[0;34m(filepath_or_buffer, kwds)\u001b[0m\n\u001b[1;32m    403\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    404\u001b[0m     \u001b[0;31m# Create the parser.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 405\u001b[0;31m     \u001b[0mparser\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mTextFileReader\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfilepath_or_buffer\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    406\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    407\u001b[0m     \u001b[0;32mif\u001b[0m \u001b[0mchunksize\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0miterator\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hz/anaconda3/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, f, engine, **kwds)\u001b[0m\n\u001b[1;32m    760\u001b[0m             \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moptions\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'has_index_names'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mkwds\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'has_index_names'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    761\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 762\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_make_engine\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mengine\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    763\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    764\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0mclose\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hz/anaconda3/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36m_make_engine\u001b[0;34m(self, engine)\u001b[0m\n\u001b[1;32m    964\u001b[0m     \u001b[0;32mdef\u001b[0m \u001b[0m_make_engine\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mengine\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'c'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    965\u001b[0m         \u001b[0;32mif\u001b[0m \u001b[0mengine\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'c'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 966\u001b[0;31m             \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_engine\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mCParserWrapper\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mf\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moptions\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    967\u001b[0m         \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    968\u001b[0m             \u001b[0;32mif\u001b[0m \u001b[0mengine\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;34m'python'\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m/home/hz/anaconda3/lib/python3.6/site-packages/pandas/io/parsers.py\u001b[0m in \u001b[0;36m__init__\u001b[0;34m(self, src, **kwds)\u001b[0m\n\u001b[1;32m   1580\u001b[0m         \u001b[0mkwds\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'allow_leading_cols'\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mindex_col\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mFalse\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1581\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1582\u001b[0;31m         \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_reader\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mparsers\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTextReader\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msrc\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1583\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1584\u001b[0m         \u001b[0;31m# XXX\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.TextReader.__cinit__ (pandas/_libs/parsers.c:4209)\u001b[0;34m()\u001b[0m\n",
      "\u001b[0;32mpandas/_libs/parsers.pyx\u001b[0m in \u001b[0;36mpandas._libs.parsers.TextReader._setup_parser_source (pandas/_libs/parsers.c:8873)\u001b[0;34m()\u001b[0m\n",
      "\u001b[0;31mFileNotFoundError\u001b[0m: File b'humd_df.csv' does not exist"
     ]
    }
   ],
   "source": [
    "#查看下关于湿度的数据\n",
    "p = r'humd_df.csv'\n",
    "df = pd.read_csv(p)\n",
    "df.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(21714, 1)\n",
      "[array([0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 3.19182902e-04,\n",
      "       3.29822325e-03, 1.07458243e-02, 2.70241518e-02, 4.71326746e-02,\n",
      "       6.50069192e-02, 8.17108229e-02, 1.36397496e-01, 1.54271737e-01,\n",
      "       1.85764447e-01, 1.72997132e-01, 2.14065328e-01, 2.86413461e-01,\n",
      "       3.65996391e-01, 3.88551980e-01, 5.58676481e-01, 5.82083225e-01,\n",
      "       5.26651800e-01, 3.48760515e-01, 5.28992474e-01, 5.61868310e-01,\n",
      "       5.41653395e-01, 4.65049475e-01, 4.63134378e-01, 2.63113111e-01,\n",
      "       4.54942018e-01, 3.16842228e-01, 3.48334938e-01, 4.00255352e-01,\n",
      "       4.01957661e-01, 4.36323017e-01, 4.37280566e-01, 4.27705079e-01,\n",
      "       4.57708269e-01, 4.42919463e-01, 4.90584105e-01, 4.33769554e-01,\n",
      "       4.75369722e-01, 5.74848413e-01, 5.76763511e-01, 4.51750189e-01,\n",
      "       3.15459102e-01, 2.40663901e-01, 3.55676144e-01, 3.16203862e-01,\n",
      "       4.57495481e-01, 4.38238114e-01, 4.50047880e-01, 5.31971514e-01,\n",
      "       6.73795104e-01, 6.27407193e-01, 4.08447713e-01, 4.36642200e-01,\n",
      "       5.05479336e-01, 5.33461034e-01, 6.84008956e-01, 6.77412510e-01,\n",
      "       6.68900967e-01, 5.08458376e-01, 4.77178425e-01, 5.16331553e-01,\n",
      "       5.84104717e-01, 5.12288570e-01, 3.72699231e-01, 3.47377390e-01,\n",
      "       3.53229076e-01, 4.18767959e-01, 4.88669008e-01, 5.40163875e-01,\n",
      "       3.58655185e-01, 2.41727844e-01, 1.80125549e-01, 1.50547937e-01,\n",
      "       1.24694116e-01, 1.03840835e-01, 3.93658914e-02, 7.66038941e-03,\n",
      "       1.06394303e-03, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 1.06394303e-03, 8.40514991e-03,\n",
      "       2.04277057e-02, 3.73443998e-02, 5.20268120e-02, 6.20278753e-02,\n",
      "       8.51154402e-02, 1.14905842e-01, 1.67996600e-01, 2.17044368e-01,\n",
      "       2.65985757e-01, 2.36940101e-01, 2.51409739e-01, 2.96201736e-01,\n",
      "       3.29503149e-01, 3.33758920e-01, 3.81742746e-01, 3.78657311e-01,\n",
      "       4.42706674e-01, 4.43557829e-01, 4.84306842e-01, 5.68571150e-01,\n",
      "       5.86445391e-01, 5.87934911e-01, 5.19629776e-01, 4.79093522e-01,\n",
      "       3.76529425e-01, 3.50356430e-01, 3.81210774e-01, 4.04936701e-01,\n",
      "       4.00468141e-01, 4.87605065e-01, 5.40270269e-01, 5.23885548e-01,\n",
      "       4.87285882e-01, 5.40163875e-01, 5.50271332e-01, 5.01117170e-01,\n",
      "       4.81434196e-01, 5.50590515e-01, 4.85902756e-01, 4.70475584e-01,\n",
      "       5.76657116e-01, 4.97712523e-01, 4.95371848e-01, 5.10799050e-01,\n",
      "       6.40174508e-01, 6.21768296e-01, 6.33684456e-01, 5.93041837e-01,\n",
      "       4.59623367e-01, 4.21321422e-01, 5.95382512e-01, 5.81444860e-01,\n",
      "       6.70177698e-01, 7.22310901e-01, 7.08692431e-01, 7.43589759e-01],\n",
      "      dtype=float32), array([0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 3.19182902e-04, 3.29822325e-03,\n",
      "       1.07458243e-02, 2.70241518e-02, 4.71326746e-02, 6.50069192e-02,\n",
      "       8.17108229e-02, 1.36397496e-01, 1.54271737e-01, 1.85764447e-01,\n",
      "       1.72997132e-01, 2.14065328e-01, 2.86413461e-01, 3.65996391e-01,\n",
      "       3.88551980e-01, 5.58676481e-01, 5.82083225e-01, 5.26651800e-01,\n",
      "       3.48760515e-01, 5.28992474e-01, 5.61868310e-01, 5.41653395e-01,\n",
      "       4.65049475e-01, 4.63134378e-01, 2.63113111e-01, 4.54942018e-01,\n",
      "       3.16842228e-01, 3.48334938e-01, 4.00255352e-01, 4.01957661e-01,\n",
      "       4.36323017e-01, 4.37280566e-01, 4.27705079e-01, 4.57708269e-01,\n",
      "       4.42919463e-01, 4.90584105e-01, 4.33769554e-01, 4.75369722e-01,\n",
      "       5.74848413e-01, 5.76763511e-01, 4.51750189e-01, 3.15459102e-01,\n",
      "       2.40663901e-01, 3.55676144e-01, 3.16203862e-01, 4.57495481e-01,\n",
      "       4.38238114e-01, 4.50047880e-01, 5.31971514e-01, 6.73795104e-01,\n",
      "       6.27407193e-01, 4.08447713e-01, 4.36642200e-01, 5.05479336e-01,\n",
      "       5.33461034e-01, 6.84008956e-01, 6.77412510e-01, 6.68900967e-01,\n",
      "       5.08458376e-01, 4.77178425e-01, 5.16331553e-01, 5.84104717e-01,\n",
      "       5.12288570e-01, 3.72699231e-01, 3.47377390e-01, 3.53229076e-01,\n",
      "       4.18767959e-01, 4.88669008e-01, 5.40163875e-01, 3.58655185e-01,\n",
      "       2.41727844e-01, 1.80125549e-01, 1.50547937e-01, 1.24694116e-01,\n",
      "       1.03840835e-01, 3.93658914e-02, 7.66038941e-03, 1.06394303e-03,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 0.00000000e+00, 0.00000000e+00, 0.00000000e+00,\n",
      "       0.00000000e+00, 1.06394303e-03, 8.40514991e-03, 2.04277057e-02,\n",
      "       3.73443998e-02, 5.20268120e-02, 6.20278753e-02, 8.51154402e-02,\n",
      "       1.14905842e-01, 1.67996600e-01, 2.17044368e-01, 2.65985757e-01,\n",
      "       2.36940101e-01, 2.51409739e-01, 2.96201736e-01, 3.29503149e-01,\n",
      "       3.33758920e-01, 3.81742746e-01, 3.78657311e-01, 4.42706674e-01,\n",
      "       4.43557829e-01, 4.84306842e-01, 5.68571150e-01, 5.86445391e-01,\n",
      "       5.87934911e-01, 5.19629776e-01, 4.79093522e-01, 3.76529425e-01,\n",
      "       3.50356430e-01, 3.81210774e-01, 4.04936701e-01, 4.00468141e-01,\n",
      "       4.87605065e-01, 5.40270269e-01, 5.23885548e-01, 4.87285882e-01,\n",
      "       5.40163875e-01, 5.50271332e-01, 5.01117170e-01, 4.81434196e-01,\n",
      "       5.50590515e-01, 4.85902756e-01, 4.70475584e-01, 5.76657116e-01,\n",
      "       4.97712523e-01, 4.95371848e-01, 5.10799050e-01, 6.40174508e-01,\n",
      "       6.21768296e-01, 6.33684456e-01, 5.93041837e-01, 4.59623367e-01,\n",
      "       4.21321422e-01, 5.95382512e-01, 5.81444860e-01, 6.70177698e-01,\n",
      "       7.22310901e-01, 7.08692431e-01, 7.43589759e-01, 6.70603275e-01],\n",
      "      dtype=float32)]\n",
      "*********************\n",
      "[[0.6706033, 0.68007237, 0.67092246, 0.6232578, 0.67507184, 0.59250987, 0.5679328, 0.34205768, 0.4888818, 0.45866582, 0.32556656, 0.21917225, 0.18076392, 0.104585595, 0.11362911, 0.03340781, 0.007128418, 0.0035110118, 0.0009575487, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], [0.68007237, 0.67092246, 0.6232578, 0.67507184, 0.59250987, 0.5679328, 0.34205768, 0.4888818, 0.45866582, 0.32556656, 0.21917225, 0.18076392, 0.104585595, 0.11362911, 0.03340781, 0.007128418, 0.0035110118, 0.0009575487, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]]\n",
      "*********************\n",
      "test: [[[1.06394303e-03 7.55399512e-03 1.77678484e-02 3.33014168e-02\n",
      "   4.86221947e-02 5.64953722e-02 7.62847140e-02 9.47973207e-02\n",
      "   1.48739234e-01 1.61932126e-01 1.98212579e-01 2.57155031e-01\n",
      "   2.55665511e-01 2.92477936e-01 2.77582735e-01 3.46845418e-01\n",
      "   2.95988947e-01 3.58655185e-01 5.46015561e-01 5.02287507e-01\n",
      "   3.56420904e-01 4.41323549e-01 3.66741151e-01 5.93361020e-01\n",
      "   4.56644326e-01 6.72837555e-01 4.65687841e-01 4.41323549e-01\n",
      "   3.88445586e-01 5.82827985e-01 7.17523158e-01 5.14522851e-01\n",
      "   5.03883421e-01 6.65709138e-01 5.47079504e-01 3.86424094e-01\n",
      "   4.93350357e-01 4.77710396e-01 4.07702953e-01 4.44834560e-01\n",
      "   7.68486023e-01 4.71752316e-01 5.92403471e-01 6.59963846e-01\n",
      "   4.47388023e-01 4.19512719e-01 4.18023199e-01 3.83019477e-01\n",
      "   4.30684119e-01 4.09086078e-01 3.67698699e-01 3.65038842e-01\n",
      "   3.54931384e-01 3.70784134e-01 3.91211838e-01 2.84391969e-01\n",
      "   3.58761579e-01 3.16416651e-01 2.10979894e-01 2.11405471e-01\n",
      "   1.81083098e-01 1.09905310e-01 1.04053624e-01 1.09692521e-01\n",
      "   5.09628691e-02 2.85136718e-02 7.76678370e-03 3.29822325e-03\n",
      "   4.25577193e-04 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 1.06394303e-03\n",
      "   2.02149176e-03 1.18097672e-02 1.68102998e-02 2.43642945e-02\n",
      "   2.26619858e-02 3.23438682e-02 3.78763713e-02 5.84104694e-02\n",
      "   8.94776061e-02 1.17246516e-01 1.00968190e-01 1.29269078e-01\n",
      "   1.14799447e-01 1.17246516e-01 1.57037988e-01 1.38099805e-01\n",
      "   1.33205667e-01 1.20863922e-01 1.07564636e-01 1.27992347e-01\n",
      "   1.21502288e-01 1.56080440e-01 2.89073318e-01 2.03532293e-01\n",
      "   2.13107780e-01 1.72465160e-01 1.97042242e-01 1.51505485e-01\n",
      "   1.94275990e-01 1.15863390e-01 1.40546873e-01 1.63315251e-01\n",
      "   2.00872436e-01 1.87147573e-01 3.28226417e-01 1.75550595e-01\n",
      "   1.51399091e-01 2.32684329e-01 1.56506017e-01 1.82891801e-01\n",
      "   2.59389311e-01 1.37567833e-01 1.27460375e-01 9.25630406e-02\n",
      "   1.59272268e-01 1.32035330e-01 1.68847755e-01 1.22247048e-01\n",
      "   1.88211516e-01 1.62144914e-01 1.50228754e-01 1.24162145e-01\n",
      "   8.99031833e-02 1.05436750e-01 8.36259201e-02 7.20289424e-02\n",
      "   6.90499023e-02 5.53250350e-02 5.98999895e-02 4.41536345e-02\n",
      "   2.86200661e-02 2.10660715e-02 1.74486656e-02 1.35120759e-02\n",
      "   4.89413785e-03 1.59591448e-03 2.12788596e-04 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00\n",
      "   0.00000000e+00 0.00000000e+00 0.00000000e+00 0.00000000e+00]]]\n",
      "Train on 10000 samples, validate on 11426 samples\n",
      "Epoch 1/80\n",
      "10000/10000 [==============================] - 1s 108us/step - loss: 0.0283 - mae: 0.1052 - mse: 0.0283 - val_loss: 0.0065 - val_mae: 0.0567 - val_mse: 0.0065\n",
      "Epoch 2/80\n",
      "10000/10000 [==============================] - 0s 50us/step - loss: 0.0082 - mae: 0.0609 - mse: 0.0082 - val_loss: 0.0047 - val_mae: 0.0477 - val_mse: 0.0047\n",
      "Epoch 3/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0072 - mae: 0.0556 - mse: 0.0072 - val_loss: 0.0046 - val_mae: 0.0460 - val_mse: 0.0046\n",
      "Epoch 4/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0067 - mae: 0.0517 - mse: 0.0067 - val_loss: 0.0042 - val_mae: 0.0422 - val_mse: 0.0042\n",
      "Epoch 5/80\n",
      "10000/10000 [==============================] - 0s 50us/step - loss: 0.0064 - mae: 0.0491 - mse: 0.0064 - val_loss: 0.0040 - val_mae: 0.0397 - val_mse: 0.0040\n",
      "Epoch 6/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0062 - mae: 0.0476 - mse: 0.0062 - val_loss: 0.0039 - val_mae: 0.0390 - val_mse: 0.0039\n",
      "Epoch 7/80\n",
      "10000/10000 [==============================] - 0s 49us/step - loss: 0.0061 - mae: 0.0465 - mse: 0.0061 - val_loss: 0.0039 - val_mae: 0.0379 - val_mse: 0.0039\n",
      "Epoch 8/80\n",
      "10000/10000 [==============================] - 0s 45us/step - loss: 0.0060 - mae: 0.0457 - mse: 0.0060 - val_loss: 0.0037 - val_mae: 0.0368 - val_mse: 0.0037\n",
      "Epoch 9/80\n",
      "10000/10000 [==============================] - 0s 48us/step - loss: 0.0059 - mae: 0.0449 - mse: 0.0059 - val_loss: 0.0037 - val_mae: 0.0361 - val_mse: 0.0037\n",
      "Epoch 10/80\n",
      "10000/10000 [==============================] - 0s 48us/step - loss: 0.0059 - mae: 0.0448 - mse: 0.0059 - val_loss: 0.0036 - val_mae: 0.0355 - val_mse: 0.0036\n",
      "Epoch 11/80\n",
      "10000/10000 [==============================] - 0s 49us/step - loss: 0.0058 - mae: 0.0441 - mse: 0.0058 - val_loss: 0.0037 - val_mae: 0.0354 - val_mse: 0.0037\n",
      "Epoch 12/80\n",
      "10000/10000 [==============================] - 0s 49us/step - loss: 0.0057 - mae: 0.0432 - mse: 0.0057 - val_loss: 0.0038 - val_mae: 0.0360 - val_mse: 0.0038\n",
      "Epoch 13/80\n",
      "10000/10000 [==============================] - 0s 49us/step - loss: 0.0057 - mae: 0.0431 - mse: 0.0057 - val_loss: 0.0038 - val_mae: 0.0359 - val_mse: 0.0038\n",
      "Epoch 14/80\n",
      "10000/10000 [==============================] - 0s 48us/step - loss: 0.0057 - mae: 0.0429 - mse: 0.0057 - val_loss: 0.0037 - val_mae: 0.0352 - val_mse: 0.0037\n",
      "Epoch 15/80\n",
      "10000/10000 [==============================] - 0s 50us/step - loss: 0.0057 - mae: 0.0426 - mse: 0.0057 - val_loss: 0.0037 - val_mae: 0.0353 - val_mse: 0.0037\n",
      "Epoch 16/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0056 - mae: 0.0424 - mse: 0.0056 - val_loss: 0.0037 - val_mae: 0.0352 - val_mse: 0.0037\n",
      "Epoch 17/80\n",
      "10000/10000 [==============================] - 0s 49us/step - loss: 0.0056 - mae: 0.0420 - mse: 0.0056 - val_loss: 0.0037 - val_mae: 0.0354 - val_mse: 0.0037\n",
      "Epoch 18/80\n",
      "10000/10000 [==============================] - 0s 50us/step - loss: 0.0056 - mae: 0.0420 - mse: 0.0056 - val_loss: 0.0036 - val_mae: 0.0345 - val_mse: 0.0036\n",
      "Epoch 19/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0056 - mae: 0.0419 - mse: 0.0056 - val_loss: 0.0042 - val_mae: 0.0383 - val_mse: 0.0042\n",
      "Epoch 20/80\n",
      "10000/10000 [==============================] - 1s 50us/step - loss: 0.0055 - mae: 0.0416 - mse: 0.0055 - val_loss: 0.0037 - val_mae: 0.0348 - val_mse: 0.0037\n",
      "Epoch 21/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0055 - mae: 0.0414 - mse: 0.0055 - val_loss: 0.0036 - val_mae: 0.0341 - val_mse: 0.0036\n",
      "Epoch 22/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0055 - mae: 0.0412 - mse: 0.0055 - val_loss: 0.0036 - val_mae: 0.0342 - val_mse: 0.0036\n",
      "Epoch 23/80\n",
      "10000/10000 [==============================] - 0s 50us/step - loss: 0.0055 - mae: 0.0413 - mse: 0.0055 - val_loss: 0.0036 - val_mae: 0.0343 - val_mse: 0.0036\n",
      "Epoch 24/80\n",
      "10000/10000 [==============================] - 1s 50us/step - loss: 0.0054 - mae: 0.0410 - mse: 0.0054 - val_loss: 0.0037 - val_mae: 0.0346 - val_mse: 0.0037\n",
      "Epoch 25/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0054 - mae: 0.0411 - mse: 0.0054 - val_loss: 0.0038 - val_mae: 0.0353 - val_mse: 0.0038\n",
      "Epoch 26/80\n",
      "10000/10000 [==============================] - 1s 50us/step - loss: 0.0054 - mae: 0.0408 - mse: 0.0054 - val_loss: 0.0038 - val_mae: 0.0356 - val_mse: 0.0038\n",
      "Epoch 27/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0055 - mae: 0.0410 - mse: 0.0055 - val_loss: 0.0037 - val_mae: 0.0347 - val_mse: 0.0037\n",
      "Epoch 28/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0054 - mae: 0.0405 - mse: 0.0054 - val_loss: 0.0037 - val_mae: 0.0341 - val_mse: 0.0037\n",
      "Epoch 29/80\n",
      "10000/10000 [==============================] - 1s 50us/step - loss: 0.0054 - mae: 0.0405 - mse: 0.0054 - val_loss: 0.0038 - val_mae: 0.0351 - val_mse: 0.0038\n",
      "Epoch 30/80\n",
      "10000/10000 [==============================] - 1s 50us/step - loss: 0.0054 - mae: 0.0405 - mse: 0.0054 - val_loss: 0.0038 - val_mae: 0.0353 - val_mse: 0.0038\n",
      "Epoch 31/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0054 - mae: 0.0404 - mse: 0.0054 - val_loss: 0.0037 - val_mae: 0.0343 - val_mse: 0.0037\n",
      "Epoch 32/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0053 - mae: 0.0400 - mse: 0.0053 - val_loss: 0.0037 - val_mae: 0.0341 - val_mse: 0.0037\n",
      "Epoch 33/80\n",
      "10000/10000 [==============================] - 1s 52us/step - loss: 0.0054 - mae: 0.0406 - mse: 0.0054 - val_loss: 0.0038 - val_mae: 0.0343 - val_mse: 0.0038\n",
      "Epoch 34/80\n",
      "10000/10000 [==============================] - 1s 52us/step - loss: 0.0053 - mae: 0.0401 - mse: 0.0053 - val_loss: 0.0036 - val_mae: 0.0335 - val_mse: 0.0036\n",
      "Epoch 35/80\n",
      "10000/10000 [==============================] - 1s 50us/step - loss: 0.0053 - mae: 0.0401 - mse: 0.0053 - val_loss: 0.0037 - val_mae: 0.0344 - val_mse: 0.0037\n",
      "Epoch 36/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0053 - mae: 0.0399 - mse: 0.0053 - val_loss: 0.0040 - val_mae: 0.0358 - val_mse: 0.0040\n",
      "Epoch 37/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0053 - mae: 0.0399 - mse: 0.0053 - val_loss: 0.0038 - val_mae: 0.0345 - val_mse: 0.0038\n",
      "Epoch 38/80\n",
      "10000/10000 [==============================] - 0s 50us/step - loss: 0.0053 - mae: 0.0402 - mse: 0.0053 - val_loss: 0.0038 - val_mae: 0.0352 - val_mse: 0.0038\n",
      "Epoch 39/80\n",
      "10000/10000 [==============================] - 1s 52us/step - loss: 0.0053 - mae: 0.0398 - mse: 0.0053 - val_loss: 0.0040 - val_mae: 0.0358 - val_mse: 0.0040\n",
      "Epoch 40/80\n",
      "10000/10000 [==============================] - 0s 48us/step - loss: 0.0052 - mae: 0.0397 - mse: 0.0052 - val_loss: 0.0038 - val_mae: 0.0344 - val_mse: 0.0038\n",
      "Epoch 41/80\n",
      "10000/10000 [==============================] - 0s 49us/step - loss: 0.0052 - mae: 0.0395 - mse: 0.0052 - val_loss: 0.0038 - val_mae: 0.0343 - val_mse: 0.0038\n",
      "Epoch 42/80\n",
      "10000/10000 [==============================] - 0s 48us/step - loss: 0.0052 - mae: 0.0394 - mse: 0.0052 - val_loss: 0.0040 - val_mae: 0.0353 - val_mse: 0.0040\n",
      "Epoch 43/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0053 - mae: 0.0398 - mse: 0.0053 - val_loss: 0.0036 - val_mae: 0.0331 - val_mse: 0.0036\n",
      "Epoch 44/80\n",
      "10000/10000 [==============================] - 0s 49us/step - loss: 0.0052 - mae: 0.0395 - mse: 0.0052 - val_loss: 0.0038 - val_mae: 0.0349 - val_mse: 0.0038\n",
      "Epoch 45/80\n",
      "10000/10000 [==============================] - 1s 50us/step - loss: 0.0052 - mae: 0.0393 - mse: 0.0052 - val_loss: 0.0038 - val_mae: 0.0342 - val_mse: 0.0038\n",
      "Epoch 46/80\n",
      "10000/10000 [==============================] - 0s 48us/step - loss: 0.0052 - mae: 0.0393 - mse: 0.0052 - val_loss: 0.0039 - val_mae: 0.0349 - val_mse: 0.0039\n",
      "Epoch 47/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0051 - mae: 0.0392 - mse: 0.0051 - val_loss: 0.0039 - val_mae: 0.0347 - val_mse: 0.0039\n",
      "Epoch 48/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0051 - mae: 0.0392 - mse: 0.0051 - val_loss: 0.0036 - val_mae: 0.0330 - val_mse: 0.0036\n",
      "Epoch 49/80\n",
      "10000/10000 [==============================] - 0s 49us/step - loss: 0.0051 - mae: 0.0391 - mse: 0.0051 - val_loss: 0.0037 - val_mae: 0.0336 - val_mse: 0.0037\n",
      "Epoch 50/80\n",
      "10000/10000 [==============================] - 1s 50us/step - loss: 0.0051 - mae: 0.0390 - mse: 0.0051 - val_loss: 0.0037 - val_mae: 0.0342 - val_mse: 0.0037\n",
      "Epoch 51/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0051 - mae: 0.0392 - mse: 0.0051 - val_loss: 0.0037 - val_mae: 0.0338 - val_mse: 0.0037\n",
      "Epoch 52/80\n",
      "10000/10000 [==============================] - 1s 50us/step - loss: 0.0051 - mae: 0.0388 - mse: 0.0051 - val_loss: 0.0039 - val_mae: 0.0352 - val_mse: 0.0039\n",
      "Epoch 53/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0051 - mae: 0.0390 - mse: 0.0051 - val_loss: 0.0038 - val_mae: 0.0344 - val_mse: 0.0038\n",
      "Epoch 54/80\n",
      "10000/10000 [==============================] - 1s 52us/step - loss: 0.0050 - mae: 0.0389 - mse: 0.0050 - val_loss: 0.0037 - val_mae: 0.0337 - val_mse: 0.0037\n",
      "Epoch 55/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0050 - mae: 0.0387 - mse: 0.0050 - val_loss: 0.0041 - val_mae: 0.0362 - val_mse: 0.0041\n",
      "Epoch 56/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0050 - mae: 0.0386 - mse: 0.0050 - val_loss: 0.0038 - val_mae: 0.0341 - val_mse: 0.0038\n",
      "Epoch 57/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0050 - mae: 0.0387 - mse: 0.0050 - val_loss: 0.0039 - val_mae: 0.0349 - val_mse: 0.0039\n",
      "Epoch 58/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0050 - mae: 0.0388 - mse: 0.0050 - val_loss: 0.0039 - val_mae: 0.0346 - val_mse: 0.0039\n",
      "Epoch 59/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0050 - mae: 0.0385 - mse: 0.0050 - val_loss: 0.0040 - val_mae: 0.0360 - val_mse: 0.0040\n",
      "Epoch 60/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0050 - mae: 0.0388 - mse: 0.0050 - val_loss: 0.0038 - val_mae: 0.0343 - val_mse: 0.0038\n",
      "Epoch 61/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0050 - mae: 0.0384 - mse: 0.0050 - val_loss: 0.0037 - val_mae: 0.0333 - val_mse: 0.0037\n",
      "Epoch 62/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0050 - mae: 0.0387 - mse: 0.0050 - val_loss: 0.0037 - val_mae: 0.0333 - val_mse: 0.0037\n",
      "Epoch 63/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0049 - mae: 0.0382 - mse: 0.0049 - val_loss: 0.0038 - val_mae: 0.0343 - val_mse: 0.0038\n",
      "Epoch 64/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0049 - mae: 0.0384 - mse: 0.0049 - val_loss: 0.0041 - val_mae: 0.0365 - val_mse: 0.0041\n",
      "Epoch 65/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0049 - mae: 0.0384 - mse: 0.0049 - val_loss: 0.0038 - val_mae: 0.0344 - val_mse: 0.0038\n",
      "Epoch 66/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0049 - mae: 0.0382 - mse: 0.0049 - val_loss: 0.0039 - val_mae: 0.0352 - val_mse: 0.0039\n",
      "Epoch 67/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0049 - mae: 0.0379 - mse: 0.0049 - val_loss: 0.0038 - val_mae: 0.0344 - val_mse: 0.0038\n",
      "Epoch 68/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0049 - mae: 0.0381 - mse: 0.0049 - val_loss: 0.0040 - val_mae: 0.0350 - val_mse: 0.0040\n",
      "Epoch 69/80\n",
      "10000/10000 [==============================] - 1s 50us/step - loss: 0.0048 - mae: 0.0380 - mse: 0.0048 - val_loss: 0.0041 - val_mae: 0.0361 - val_mse: 0.0041\n",
      "Epoch 70/80\n",
      "10000/10000 [==============================] - 1s 50us/step - loss: 0.0049 - mae: 0.0380 - mse: 0.0049 - val_loss: 0.0042 - val_mae: 0.0363 - val_mse: 0.0042\n",
      "Epoch 71/80\n",
      "10000/10000 [==============================] - 0s 47us/step - loss: 0.0048 - mae: 0.0381 - mse: 0.0048 - val_loss: 0.0038 - val_mae: 0.0339 - val_mse: 0.0038\n",
      "Epoch 72/80\n",
      "10000/10000 [==============================] - 0s 49us/step - loss: 0.0048 - mae: 0.0380 - mse: 0.0048 - val_loss: 0.0038 - val_mae: 0.0334 - val_mse: 0.0038\n",
      "Epoch 73/80\n",
      "10000/10000 [==============================] - 0s 49us/step - loss: 0.0048 - mae: 0.0378 - mse: 0.0048 - val_loss: 0.0041 - val_mae: 0.0355 - val_mse: 0.0041\n",
      "Epoch 74/80\n",
      "10000/10000 [==============================] - 0s 49us/step - loss: 0.0048 - mae: 0.0376 - mse: 0.0048 - val_loss: 0.0039 - val_mae: 0.0342 - val_mse: 0.0039\n",
      "Epoch 75/80\n",
      "10000/10000 [==============================] - 0s 47us/step - loss: 0.0047 - mae: 0.0375 - mse: 0.0047 - val_loss: 0.0038 - val_mae: 0.0340 - val_mse: 0.0038\n",
      "Epoch 76/80\n",
      "10000/10000 [==============================] - 0s 49us/step - loss: 0.0048 - mae: 0.0377 - mse: 0.0048 - val_loss: 0.0040 - val_mae: 0.0349 - val_mse: 0.0040\n",
      "Epoch 77/80\n",
      "10000/10000 [==============================] - 0s 48us/step - loss: 0.0048 - mae: 0.0376 - mse: 0.0048 - val_loss: 0.0040 - val_mae: 0.0353 - val_mse: 0.0040\n",
      "Epoch 78/80\n",
      "10000/10000 [==============================] - 0s 48us/step - loss: 0.0047 - mae: 0.0375 - mse: 0.0047 - val_loss: 0.0038 - val_mae: 0.0336 - val_mse: 0.0038\n",
      "Epoch 79/80\n",
      "10000/10000 [==============================] - 1s 51us/step - loss: 0.0047 - mae: 0.0374 - mse: 0.0047 - val_loss: 0.0039 - val_mae: 0.0350 - val_mse: 0.0039\n",
      "Epoch 80/80\n",
      "10000/10000 [==============================] - 1s 50us/step - loss: 0.0047 - mae: 0.0371 - mse: 0.0047 - val_loss: 0.0040 - val_mae: 0.0347 - val_mse: 0.0040\n",
      "test predict: [[108.708046 ]\n",
      " [ 66.752304 ]\n",
      " [-20.90021  ]\n",
      " [-18.539495 ]\n",
      " [-31.803709 ]\n",
      " [ -7.889705 ]\n",
      " [-32.109943 ]\n",
      " [-14.746147 ]\n",
      " [-21.08249  ]\n",
      " [-39.467365 ]\n",
      " [-39.70882  ]\n",
      " [-40.098946 ]\n",
      " [-24.246567 ]\n",
      " [ -2.6890054]\n",
      " [-40.89765  ]\n",
      " [-47.95469  ]\n",
      " [-75.31008  ]\n",
      " [ 25.392996 ]\n",
      " [-41.55868  ]\n",
      " [ 24.469255 ]\n",
      " [ 47.70784  ]\n",
      " [-13.3379545]\n",
      " [  1.910469 ]\n",
      " [-16.010958 ]\n",
      " [ 21.400068 ]\n",
      " [ 58.409485 ]\n",
      " [ -3.110644 ]\n",
      " [ 27.423597 ]\n",
      " [ 37.860676 ]\n",
      " [ 22.77927  ]\n",
      " [-25.411833 ]\n",
      " [-14.130462 ]\n",
      " [ 35.918064 ]\n",
      " [-15.89128  ]\n",
      " [ 33.489388 ]\n",
      " [-49.667084 ]\n",
      " [ 37.50357  ]\n",
      " [-40.332    ]\n",
      " [-48.873104 ]\n",
      " [ 11.687954 ]\n",
      " [-36.22955  ]\n",
      " [-34.537674 ]\n",
      " [-30.442295 ]\n",
      " [-21.514845 ]\n",
      " [ 42.199787 ]\n",
      " [ 30.498808 ]\n",
      " [ 91.67255  ]\n",
      " [185.60054  ]]\n"
     ]
    }
   ],
   "source": [
    "#训练湿度的模型，并且保存模型\n",
    "p = r'110553.csv'\n",
    "df = pd.read_csv(p)\n",
    "\n",
    "column = 'illumination'\n",
    "\n",
    "t = pd.DataFrame(df[column])\n",
    "mod_name = './result/' + column + '_model.h5'\n",
    "sca_name = './result/' + column + '_scaler.pk1'\n",
    "initiation = RNNModel(t,look_back=240, pred_length=48, epochs=80, batch_size=128, verbose=1, patience=10,model_name=mod_name,scaler_name=sca_name)\n",
    "humd_preds = initiation.run()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "ename": "NameError",
     "evalue": "name 'RNNModel' is not defined",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mNameError\u001b[0m                                 Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-2-79ffdd99cc10>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m      7\u001b[0m \u001b[0;31m# epochs 训练的次数\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      8\u001b[0m \u001b[0;31m# batch_size 每个批次的样本数(从源数据中分批获取)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 9\u001b[0;31m \u001b[0minitiation\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mRNNModel\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mt\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mlook_back\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m50\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpred_length\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m5\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mepochs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m60\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mbatch_size\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m64\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mverbose\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mpatience\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m10\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mmodel_name\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'stem_model.h5'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mscaler_name\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'stem_s.pkl'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     10\u001b[0m \u001b[0mstem_preds\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0minitiation\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     11\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstem_preds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mNameError\u001b[0m: name 'RNNModel' is not defined"
     ]
    }
   ],
   "source": [
    "#训练stem模型，并且保存模型\n",
    "p = r'stem_df.csv'\n",
    "df = pd.read_csv(p)\n",
    "t = pd.DataFrame(df['number'])\n",
    "# look_back 训练源数据项数\n",
    "# pred_length 预测的数据项数\n",
    "# epochs 训练的次数\n",
    "# batch_size 每个批次的样本数(从源数据中分批获取)\n",
    "initiation = RNNModel(t,look_back=50, pred_length=5, epochs=60, batch_size=64, verbose=1, patience=10,model_name='stem_model.h5',scaler_name='stem_s.pkl')\n",
    "stem_preds = initiation.run()\n",
    "print(stem_preds)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import pickle\n",
    "\n",
    "#加载scaler\n",
    "with open('humd_s.pkl', 'rb') as fr:\n",
    "    humd_s = pickle.load(fr)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "humd_s.transform([[86.2]])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.1"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
