{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "import numpy as np\n",
    "from joblib import dump, load, Parallel, delayed\n",
    "from sklearn import preprocessing\n",
    "import xlearn\n",
    "import time\n",
    "start = time.clock()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "train_org = pd.read_csv(\"train\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 对数据进行特征工程\n",
    "# 第一步，去除‘id’\n",
    "train_org.drop('id', axis = 1, inplace=True )"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "train = train_org"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 减少数据量，方便运算\n",
    "from sklearn.model_selection import train_test_split\n",
    "X_choose, X_dump, Y_choose, Y_dump=train_test_split(train.drop('click',axis=1),train.loc[:,'click'],random_state=42, test_size=0.9)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(4042896, 22) (4042896,)\n"
     ]
    }
   ],
   "source": [
    "print(X_choose.shape, Y_choose.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "train = X_choose.reset_index(drop=True)\n",
    "train['click'] = Y_choose.values.reshape(-1,1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 第二步，对时间hour特征进行处理\n",
    "# day 信息，每天在一周的信息\n",
    "day = train.hour % 10000 // 100\n",
    "train['day'] = day % 7\n",
    "\n",
    "# 累计时间信息，用于校验集分类，过后删除\n",
    "hr = (day - 21) * 24 + train.hour % 100\n",
    "train.hour = hr\n",
    "\n",
    "# 每天的时段信息\n",
    "train['hour_range'] = hr % 24"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 第三步，对user_id类信息进行处理\n",
    "# 创建个新变量site_app来判断访问的来源\n",
    "train['site_app'] = 0\n",
    "train.loc[train['site_id'] == 'ecad2386', 'site_app'] = 1\n",
    "\n",
    "# 合并user_id\n",
    "user_id_list = ['site_id', 'site_domain', 'site_category',\\\n",
    "                'app_id', 'app_domain', 'app_category',]\n",
    "train['user_id'] = ''\n",
    "for each_id in user_id_list:\n",
    "    train['user_id'] = train['user_id']+ train[each_id]                        "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 合并device_id类信息\n",
    "device_id_list = ['device_id','device_ip','device_model']\n",
    "train['device_new_id'] = ''\n",
    "for each_id in device_id_list:\n",
    "    train['device_new_id'] = train['device_new_id']+ train[each_id] "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 删除合并前的原特征\n",
    "train = train.drop(['site_id', 'site_domain', 'site_category',\\\n",
    "                              'app_id', 'app_domain', 'app_category',\\\n",
    "                             'device_id','device_ip','device_model'], axis = 1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "user_id device_new_id "
     ]
    }
   ],
   "source": [
    "# 'user_id', 'device_new_id'进行lebelencoder\n",
    "feature_list_B = ['user_id', 'device_new_id']\n",
    "# feature_list_B = ['site_id']\n",
    "for each_feature in feature_list_B:\n",
    "    print(each_feature+\" \", end='')\n",
    "    le = preprocessing.LabelEncoder()    \n",
    "    le_data = le.fit_transform(train[each_feature].values.ravel())    \n",
    "    train[each_feature] = le_data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 进行数据分离，前8天作为训练数据，第9、10天作为校验数据\n",
    "# 0-8*24 --- 0 - 191\n",
    "period = 192\n",
    "train_data = train.loc[(train.hour>=0) & (train.hour<=period-1)]\n",
    "train_data = train_data.drop('hour',axis = 1)\n",
    "valid_data = train.loc[(train.hour>=period)]\n",
    "valid_data = valid_data.drop('hour',axis = 1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0    0\n",
       "1    0\n",
       "2    1\n",
       "3    0\n",
       "4    0\n",
       "Name: click, dtype: int64"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_data.loc[:,'click'].head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(1619160, 17) (1619161, 17)\n"
     ]
    }
   ],
   "source": [
    "# 随机分配数据给两个不同的训练集，一个进行xgboost，一个是LR 或 FFM\n",
    "from sklearn.model_selection import train_test_split\n",
    "X_xgb, X_ffm, Y_xgb, Y_ffm=train_test_split(train_data.drop('click',axis=1),train_data.loc[:,'click'],random_state=42, test_size=0.5)\n",
    "print(X_xgb.shape, X_ffm.shape)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [],
   "source": [
    "X_valid = valid_data.drop('click',axis = 1)\n",
    "Y_valid = valid_data.loc[:,'click'].values"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "array([0, 0, 1, ..., 0, 0, 0])"
      ]
     },
     "execution_count": 17,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "Y_ffm.values"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "[0]\ttrain-logloss:0.443174\tvalid-logloss:0.435516\n",
      "[1]\ttrain-logloss:0.429964\tvalid-logloss:0.424743\n",
      "[2]\ttrain-logloss:0.426017\tvalid-logloss:0.42122\n",
      "[3]\ttrain-logloss:0.42293\tvalid-logloss:0.414241\n",
      "[4]\ttrain-logloss:0.420898\tvalid-logloss:0.413507\n",
      "[5]\ttrain-logloss:0.419516\tvalid-logloss:0.412236\n",
      "[6]\ttrain-logloss:0.418648\tvalid-logloss:0.411905\n",
      "[7]\ttrain-logloss:0.417847\tvalid-logloss:0.411199\n",
      "[8]\ttrain-logloss:0.417397\tvalid-logloss:0.410508\n",
      "[9]\ttrain-logloss:0.416515\tvalid-logloss:0.409725\n"
     ]
    }
   ],
   "source": [
    "# 进行xgboost训练\n",
    "import xgboost as xgb\n",
    "valid_dm = xgb.DMatrix(X_valid.values, label = Y_valid.reshape(-1,1))\n",
    "xgb_dm = xgb.DMatrix(X_xgb.values, label = Y_xgb.values.reshape(-1,1))\n",
    "param = {'max_depth':4, 'eta':1, 'objective':'binary:logistic', 'verbose':0,\n",
    "            'subsample':1, 'min_child_weight':50, 'gamma':0,\n",
    "            'colsample_bytree':1, 'seed': 999}\n",
    "plst = list(param.items()) + [('eval_metric', 'logloss')]\n",
    "watchlist = [(xgb_dm, 'train'),(valid_dm,'valid')]\n",
    "xgb_model = None\n",
    "xgb1 = xgb.train(plst,xgb_dm, 10, watchlist, xgb_model = xgb_model)\n",
    "#     Y_pred = xgb1.predict(test_X_dm)\n",
    "xgb1.save_model('xgb.model')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "All finished\n"
     ]
    }
   ],
   "source": [
    "ffm_dm = xgb.DMatrix(X_ffm.values, label=Y_ffm.values.reshape(-1,1))\n",
    "ffm_new_feature = xgb1.predict(ffm_dm,pred_leaf=True)\n",
    "valid_new_feature = xgb1.predict(valid_dm,pred_leaf=True)\n",
    "# test_new_feature = xgb1.predict(test_X_dm,pred_leaf=True)\n",
    "# valid_X_dm = xgb.DMatrix(X_valid.values, label = Y_valid.values.reshape(-1,1))\n",
    "# train_new_feature = np.concatenate((train_new_feature, xgb1.predict(valid_X_dm,pred_leaf=True)), axis = 0)\n",
    "print(\"All finished\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "ffm:  (1619161, 10) valid:  (804575, 10)\n"
     ]
    }
   ],
   "source": [
    "print('ffm: ', ffm_new_feature.shape,'valid: ',valid_new_feature.shape)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 为新特征添加特征名\n",
    "\n",
    "column_list = []\n",
    "for i in range(ffm_new_feature.shape[1]):\n",
    "    column_list.append('nf'+'_'+str(i))\n",
    "ffm_mx = pd.DataFrame(ffm_new_feature, columns=column_list)\n",
    "valid_mx = pd.DataFrame(valid_new_feature, columns=column_list)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [],
   "source": [
    "X_ffm.reset_index(drop=True, inplace=True)\n",
    "ffm_mx.reset_index(drop=True, inplace=True)\n",
    "X_valid.reset_index(drop=True,inplace=True)\n",
    "valid_mx.reset_index(drop=True, inplace=True)\n",
    "ffm_data = pd.concat([X_ffm,ffm_mx],axis=1 )\n",
    "valid_ffm_data = pd.concat([X_valid,valid_mx],axis=1)\n",
    "ffm_data['click'] = Y_ffm.values\n",
    "valid_ffm_data['click'] = Y_valid"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(1619161, 28) (804575, 28)\n"
     ]
    }
   ],
   "source": [
    "print(ffm_data.shape, valid_ffm_data.shape)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "0 C1\n",
      "1 banner_pos\n",
      "2 device_type\n",
      "3 device_conn_type\n",
      "4 C14\n",
      "5 C15\n",
      "6 C16\n",
      "7 C17\n",
      "8 C18\n",
      "9 C19\n",
      "10 C20\n",
      "11 C21\n",
      "12 day\n",
      "13 hour_range\n",
      "14 site_app\n",
      "15 user_id\n",
      "16 device_new_id\n",
      "17 nf_0\n",
      "18 nf_1\n",
      "19 nf_2\n",
      "20 nf_3\n",
      "21 nf_4\n",
      "22 nf_5\n",
      "23 nf_6\n",
      "24 nf_7\n",
      "25 nf_8\n",
      "26 nf_9\n",
      "27 click\n"
     ]
    }
   ],
   "source": [
    "for i,n in enumerate(ffm_data.columns):\n",
    "    print(i,n)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 删除['user_id', 'device_new_id']两个特征，种类太多\n",
    "feature_list_B = ['user_id', 'device_new_id']\n",
    "ffm_data = ffm_data.drop(feature_list_B, axis = 1)\n",
    "valid_ffm_data = valid_ffm_data.drop(feature_list_B, axis = 1)\n",
    "\n",
    "data_fit = pd.concat([ffm_data, valid_ffm_data], axis = 0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "(2423736, 26)"
      ]
     },
     "execution_count": 26,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "data_fit.shape"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [],
   "source": [
    "data_fit = data_fit.replace(-1, 0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "OneHotEncoder(categorical_features='all', dtype=<class 'numpy.float64'>,\n",
       "       handle_unknown='error', n_values='auto', sparse=True)"
      ]
     },
     "execution_count": 28,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from sklearn.preprocessing import OneHotEncoder\n",
    "enc = OneHotEncoder()\n",
    "enc.fit(data_fit.drop('click',axis = 1))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [],
   "source": [
    "ffm_data = ffm_data.replace(-1,0)\n",
    "valid_ffm_data = valid_ffm_data.replace(-1,0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {},
   "outputs": [],
   "source": [
    "ffm_ftrl = enc.transform(ffm_data.drop('click',axis = 1)).toarray()\n",
    "valid_ftrl = enc.transform(valid_ffm_data.drop('click',axis = 1)).toarray()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "3331\n"
     ]
    }
   ],
   "source": [
    "dimension = ffm_ftrl.shape[1]\n",
    "print(dimension)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 41,
   "metadata": {},
   "outputs": [],
   "source": [
    "import tensorflow as tf\n",
    "#定义place holder 输入 输出\n",
    "x = tf.placeholder('float',[None,dimension])\n",
    "y_ = tf.placeholder('float',[None,1])\n",
    "\n",
    "#定义权重W， b\n",
    "W = tf.Variable(tf.random_uniform([dimension,1],-1,1))\n",
    "b = tf.Variable(tf.zeros([1]))\n",
    "y = tf.sigmoid(tf.matmul(x, W) + b)\n",
    "# clipping y to avoid log(y) become infinite\n",
    "y = tf.clip_by_value(y, 1e-10, 1-1e-10)\n",
    "\n",
    "# 定义损失函数\n",
    "# loss = tf.losses.log_loss(predictions = y_, labels = y)\n",
    "loss = (-tf.matmul(tf.transpose(y_), tf.log(y)) - tf.matmul(tf.transpose(1-y_), tf.log(1-y)))\n",
    "train = tf.train.FtrlOptimizer(0.1, l1_regularization_strength=0.01, l2_regularization_strength=0.01).minimize(loss)\n",
    "# train = optimizer.minimize(loss)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 42,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 0000 train log loss: [[1.6960405]] valid log loss: [[1.870174]]\n",
      "epoch: 0001 train log loss: [[1.0277225]] valid log loss: [[1.1050777]]\n",
      "epoch: 0002 train log loss: [[0.77897424]] valid log loss: [[0.8065672]]\n",
      "epoch: 0003 train log loss: [[0.67473346]] valid log loss: [[0.67864466]]\n",
      "epoch: 0004 train log loss: [[0.62339455]] valid log loss: [[0.61551124]]\n",
      "epoch: 0005 train log loss: [[0.5939056]] valid log loss: [[0.58009267]]\n",
      "epoch: 0006 train log loss: [[0.57458645]] valid log loss: [[0.5580081]]\n",
      "epoch: 0007 train log loss: [[0.56057465]] valid log loss: [[0.5430199]]\n",
      "epoch: 0008 train log loss: [[0.54965186]] valid log loss: [[0.5321554]]\n",
      "epoch: 0009 train log loss: [[0.5407248]] valid log loss: [[0.5238688]]\n",
      "epoch: 0010 train log loss: [[0.533183]] valid log loss: [[0.51728594]]\n",
      "epoch: 0011 train log loss: [[0.52667916]] valid log loss: [[0.51188534]]\n",
      "epoch: 0012 train log loss: [[0.5209837]] valid log loss: [[0.5073303]]\n",
      "epoch: 0013 train log loss: [[0.51592803]] valid log loss: [[0.5034011]]\n",
      "epoch: 0014 train log loss: [[0.5114046]] valid log loss: [[0.49994552]]\n",
      "epoch: 0015 train log loss: [[0.507317]] valid log loss: [[0.49685895]]\n",
      "epoch: 0016 train log loss: [[0.5035974]] valid log loss: [[0.494065]]\n",
      "epoch: 0017 train log loss: [[0.500188]] valid log loss: [[0.4915052]]\n",
      "epoch: 0018 train log loss: [[0.49705234]] valid log loss: [[0.4891381]]\n",
      "epoch: 0019 train log loss: [[0.49414542]] valid log loss: [[0.48693827]]\n",
      "epoch: 0020 train log loss: [[0.4914398]] valid log loss: [[0.48487926]]\n",
      "epoch: 0021 train log loss: [[0.48891014]] valid log loss: [[0.48294458]]\n",
      "epoch: 0022 train log loss: [[0.4865381]] valid log loss: [[0.48111755]]\n",
      "epoch: 0023 train log loss: [[0.48430893]] valid log loss: [[0.47938988]]\n",
      "epoch: 0024 train log loss: [[0.48220143]] valid log loss: [[0.477752]]\n",
      "epoch: 0025 train log loss: [[0.48020804]] valid log loss: [[0.47619432]]\n",
      "epoch: 0026 train log loss: [[0.47831777]] valid log loss: [[0.47471303]]\n",
      "epoch: 0027 train log loss: [[0.47652757]] valid log loss: [[0.4732991]]\n",
      "epoch: 0028 train log loss: [[0.47481272]] valid log loss: [[0.47195032]]\n",
      "epoch: 0029 train log loss: [[0.47318363]] valid log loss: [[0.47065955]]\n",
      "epoch: 0030 train log loss: [[0.47162986]] valid log loss: [[0.46942958]]\n",
      "epoch: 0031 train log loss: [[0.47014064]] valid log loss: [[0.46824315]]\n",
      "epoch: 0032 train log loss: [[0.46871796]] valid log loss: [[0.46711174]]\n",
      "epoch: 0033 train log loss: [[0.4673522]] valid log loss: [[0.46602443]]\n",
      "epoch: 0034 train log loss: [[0.4660432]] valid log loss: [[0.46497908]]\n",
      "epoch: 0035 train log loss: [[0.4647844]] valid log loss: [[0.46397614]]\n",
      "epoch: 0036 train log loss: [[0.4635779]] valid log loss: [[0.46300793]]\n",
      "epoch: 0037 train log loss: [[0.46241564]] valid log loss: [[0.46207428]]\n",
      "epoch: 0038 train log loss: [[0.46129847]] valid log loss: [[0.46117726]]\n",
      "epoch: 0039 train log loss: [[0.46022364]] valid log loss: [[0.46031308]]\n",
      "epoch: 0040 train log loss: [[0.45918244]] valid log loss: [[0.45947874]]\n",
      "epoch: 0041 train log loss: [[0.4581832]] valid log loss: [[0.4586667]]\n",
      "epoch: 0042 train log loss: [[0.45721674]] valid log loss: [[0.45788947]]\n",
      "epoch: 0043 train log loss: [[0.4562836]] valid log loss: [[0.45713374]]\n",
      "epoch: 0044 train log loss: [[0.45538303]] valid log loss: [[0.45640168]]\n",
      "epoch: 0045 train log loss: [[0.45451173]] valid log loss: [[0.45569375]]\n",
      "epoch: 0046 train log loss: [[0.45367044]] valid log loss: [[0.45500848]]\n",
      "epoch: 0047 train log loss: [[0.4528519]] valid log loss: [[0.45434046]]\n",
      "epoch: 0048 train log loss: [[0.45206428]] valid log loss: [[0.4536947]]\n",
      "epoch: 0049 train log loss: [[0.45129672]] valid log loss: [[0.4530658]]\n",
      "epoch: 0050 train log loss: [[0.4505587]] valid log loss: [[0.4524585]]\n",
      "epoch: 0051 train log loss: [[0.4498425]] valid log loss: [[0.4518644]]\n",
      "epoch: 0052 train log loss: [[0.449146]] valid log loss: [[0.45128536]]\n",
      "epoch: 0053 train log loss: [[0.4484682]] valid log loss: [[0.45072448]]\n",
      "epoch: 0054 train log loss: [[0.44781286]] valid log loss: [[0.45017865]]\n",
      "epoch: 0055 train log loss: [[0.4471742]] valid log loss: [[0.44964427]]\n",
      "epoch: 0056 train log loss: [[0.44655806]] valid log loss: [[0.44912452]]\n",
      "epoch: 0057 train log loss: [[0.4459565]] valid log loss: [[0.44862092]]\n",
      "epoch: 0058 train log loss: [[0.4453735]] valid log loss: [[0.44812793]]\n",
      "epoch: 0059 train log loss: [[0.4448056]] valid log loss: [[0.44764605]]\n",
      "epoch: 0060 train log loss: [[0.44425288]] valid log loss: [[0.44717637]]\n",
      "epoch: 0061 train log loss: [[0.44371304]] valid log loss: [[0.44671446]]\n",
      "epoch: 0062 train log loss: [[0.44319284]] valid log loss: [[0.44626758]]\n",
      "epoch: 0063 train log loss: [[0.4426821]] valid log loss: [[0.44582918]]\n",
      "epoch: 0064 train log loss: [[0.44218612]] valid log loss: [[0.44539958]]\n",
      "epoch: 0065 train log loss: [[0.441703]] valid log loss: [[0.44497946]]\n",
      "epoch: 0066 train log loss: [[0.44123372]] valid log loss: [[0.44456905]]\n",
      "epoch: 0067 train log loss: [[0.4407734]] valid log loss: [[0.44416958]]\n",
      "epoch: 0068 train log loss: [[0.44032487]] valid log loss: [[0.44377613]]\n",
      "epoch: 0069 train log loss: [[0.43989164]] valid log loss: [[0.44338936]]\n",
      "epoch: 0070 train log loss: [[0.43946713]] valid log loss: [[0.44301462]]\n",
      "epoch: 0071 train log loss: [[0.43905053]] valid log loss: [[0.44264656]]\n",
      "epoch: 0072 train log loss: [[0.438644]] valid log loss: [[0.44228163]]\n",
      "epoch: 0073 train log loss: [[0.43824822]] valid log loss: [[0.44192848]]\n",
      "epoch: 0074 train log loss: [[0.4378616]] valid log loss: [[0.4415821]]\n",
      "epoch: 0075 train log loss: [[0.4374863]] valid log loss: [[0.44124085]]\n",
      "epoch: 0076 train log loss: [[0.43711975]] valid log loss: [[0.4409076]]\n",
      "epoch: 0077 train log loss: [[0.43676212]] valid log loss: [[0.44057956]]\n",
      "epoch: 0078 train log loss: [[0.43640587]] valid log loss: [[0.44025797]]\n",
      "epoch: 0079 train log loss: [[0.4360609]] valid log loss: [[0.43994296]]\n",
      "epoch: 0080 train log loss: [[0.43572453]] valid log loss: [[0.43963286]]\n",
      "epoch: 0081 train log loss: [[0.43539762]] valid log loss: [[0.43933174]]\n",
      "epoch: 0082 train log loss: [[0.43507817]] valid log loss: [[0.43903115]]\n",
      "epoch: 0083 train log loss: [[0.43476042]] valid log loss: [[0.43873832]]\n",
      "epoch: 0084 train log loss: [[0.43445364]] valid log loss: [[0.43844947]]\n",
      "epoch: 0085 train log loss: [[0.43415302]] valid log loss: [[0.43816793]]\n",
      "epoch: 0086 train log loss: [[0.4338574]] valid log loss: [[0.4378922]]\n",
      "epoch: 0087 train log loss: [[0.43356964]] valid log loss: [[0.4376203]]\n",
      "epoch: 0088 train log loss: [[0.43328834]] valid log loss: [[0.437351]]\n",
      "epoch: 0089 train log loss: [[0.4330097]] valid log loss: [[0.43708983]]\n",
      "epoch: 0090 train log loss: [[0.43274048]] valid log loss: [[0.43682963]]\n",
      "epoch: 0091 train log loss: [[0.4324776]] valid log loss: [[0.4365759]]\n",
      "epoch: 0092 train log loss: [[0.4322158]] valid log loss: [[0.43632847]]\n",
      "epoch: 0093 train log loss: [[0.43196175]] valid log loss: [[0.43608385]]\n",
      "epoch: 0094 train log loss: [[0.4317124]] valid log loss: [[0.4358402]]\n",
      "epoch: 0095 train log loss: [[0.4314684]] valid log loss: [[0.4356025]]\n",
      "epoch: 0096 train log loss: [[0.43122664]] valid log loss: [[0.43536842]]\n",
      "epoch: 0097 train log loss: [[0.4309932]] valid log loss: [[0.43513703]]\n",
      "epoch: 0098 train log loss: [[0.43076214]] valid log loss: [[0.43491244]]\n",
      "epoch: 0099 train log loss: [[0.4305399]] valid log loss: [[0.43469143]]\n",
      "epoch: 0100 train log loss: [[0.43031985]] valid log loss: [[0.4344737]]\n",
      "epoch: 0101 train log loss: [[0.4301007]] valid log loss: [[0.43425778]]\n",
      "epoch: 0102 train log loss: [[0.4298849]] valid log loss: [[0.43404597]]\n",
      "epoch: 0103 train log loss: [[0.42967877]] valid log loss: [[0.4338365]]\n",
      "epoch: 0104 train log loss: [[0.42947307]] valid log loss: [[0.43363327]]\n",
      "epoch: 0105 train log loss: [[0.42927113]] valid log loss: [[0.433434]]\n",
      "epoch: 0106 train log loss: [[0.42907098]] valid log loss: [[0.43323275]]\n",
      "epoch: 0107 train log loss: [[0.4288796]] valid log loss: [[0.43303692]]\n",
      "epoch: 0108 train log loss: [[0.42869163]] valid log loss: [[0.43284413]]\n",
      "epoch: 0109 train log loss: [[0.42850327]] valid log loss: [[0.43265572]]\n",
      "epoch: 0110 train log loss: [[0.42832094]] valid log loss: [[0.43247092]]\n",
      "epoch: 0111 train log loss: [[0.4281418]] valid log loss: [[0.43228534]]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 0112 train log loss: [[0.42796496]] valid log loss: [[0.43210545]]\n",
      "epoch: 0113 train log loss: [[0.42779115]] valid log loss: [[0.43192682]]\n",
      "epoch: 0114 train log loss: [[0.4276205]] valid log loss: [[0.43175012]]\n",
      "epoch: 0115 train log loss: [[0.4274504]] valid log loss: [[0.43157792]]\n",
      "epoch: 0116 train log loss: [[0.42728692]] valid log loss: [[0.43140924]]\n",
      "epoch: 0117 train log loss: [[0.42712572]] valid log loss: [[0.4312425]]\n",
      "epoch: 0118 train log loss: [[0.4269669]] valid log loss: [[0.43107727]]\n",
      "epoch: 0119 train log loss: [[0.42681128]] valid log loss: [[0.43091327]]\n",
      "epoch: 0120 train log loss: [[0.4266571]] valid log loss: [[0.4307533]]\n",
      "epoch: 0121 train log loss: [[0.42650637]] valid log loss: [[0.43059617]]\n",
      "epoch: 0122 train log loss: [[0.42635912]] valid log loss: [[0.4304394]]\n",
      "epoch: 0123 train log loss: [[0.4262116]] valid log loss: [[0.43028623]]\n",
      "epoch: 0124 train log loss: [[0.42606598]] valid log loss: [[0.43013555]]\n",
      "epoch: 0125 train log loss: [[0.42592433]] valid log loss: [[0.42998704]]\n",
      "epoch: 0126 train log loss: [[0.42578644]] valid log loss: [[0.42983958]]\n",
      "epoch: 0127 train log loss: [[0.42565045]] valid log loss: [[0.42969427]]\n",
      "epoch: 0128 train log loss: [[0.4255176]] valid log loss: [[0.4295526]]\n",
      "epoch: 0129 train log loss: [[0.42538568]] valid log loss: [[0.4294125]]\n",
      "epoch: 0130 train log loss: [[0.4252562]] valid log loss: [[0.42927518]]\n",
      "epoch: 0131 train log loss: [[0.4251265]] valid log loss: [[0.42913896]]\n",
      "epoch: 0132 train log loss: [[0.42500034]] valid log loss: [[0.42900294]]\n",
      "epoch: 0133 train log loss: [[0.4248766]] valid log loss: [[0.42886934]]\n",
      "epoch: 0134 train log loss: [[0.42475447]] valid log loss: [[0.42873806]]\n",
      "epoch: 0135 train log loss: [[0.42463562]] valid log loss: [[0.4286092]]\n",
      "epoch: 0136 train log loss: [[0.4245157]] valid log loss: [[0.42848265]]\n",
      "epoch: 0137 train log loss: [[0.4244016]] valid log loss: [[0.42835757]]\n",
      "epoch: 0138 train log loss: [[0.42428777]] valid log loss: [[0.42823118]]\n",
      "epoch: 0139 train log loss: [[0.42417285]] valid log loss: [[0.42810932]]\n",
      "epoch: 0140 train log loss: [[0.4240649]] valid log loss: [[0.4279897]]\n",
      "epoch: 0141 train log loss: [[0.42395452]] valid log loss: [[0.4278696]]\n",
      "epoch: 0142 train log loss: [[0.4238467]] valid log loss: [[0.42775398]]\n",
      "epoch: 0143 train log loss: [[0.42374206]] valid log loss: [[0.42763808]]\n",
      "epoch: 0144 train log loss: [[0.4236386]] valid log loss: [[0.42752376]]\n",
      "epoch: 0145 train log loss: [[0.4235344]] valid log loss: [[0.4274101]]\n",
      "epoch: 0146 train log loss: [[0.4234331]] valid log loss: [[0.42730018]]\n",
      "epoch: 0147 train log loss: [[0.42333275]] valid log loss: [[0.42718866]]\n",
      "epoch: 0148 train log loss: [[0.4232347]] valid log loss: [[0.42708173]]\n",
      "epoch: 0149 train log loss: [[0.42313865]] valid log loss: [[0.42697462]]\n",
      "epoch: 0150 train log loss: [[0.42304254]] valid log loss: [[0.42686883]]\n",
      "epoch: 0151 train log loss: [[0.42294884]] valid log loss: [[0.4267633]]\n",
      "epoch: 0152 train log loss: [[0.4228575]] valid log loss: [[0.42666158]]\n",
      "epoch: 0153 train log loss: [[0.42276645]] valid log loss: [[0.4265598]]\n",
      "epoch: 0154 train log loss: [[0.42267433]] valid log loss: [[0.42646077]]\n",
      "epoch: 0155 train log loss: [[0.4225856]] valid log loss: [[0.4263624]]\n",
      "epoch: 0156 train log loss: [[0.42249703]] valid log loss: [[0.4262637]]\n",
      "epoch: 0157 train log loss: [[0.4224108]] valid log loss: [[0.4261666]]\n",
      "epoch: 0158 train log loss: [[0.422327]] valid log loss: [[0.4260719]]\n",
      "epoch: 0159 train log loss: [[0.42224258]] valid log loss: [[0.4259783]]\n",
      "epoch: 0160 train log loss: [[0.4221608]] valid log loss: [[0.4258836]]\n",
      "epoch: 0161 train log loss: [[0.42207932]] valid log loss: [[0.42579275]]\n",
      "epoch: 0162 train log loss: [[0.42199957]] valid log loss: [[0.42570385]]\n",
      "epoch: 0163 train log loss: [[0.42192006]] valid log loss: [[0.42561382]]\n",
      "epoch: 0164 train log loss: [[0.4218394]] valid log loss: [[0.42552486]]\n",
      "epoch: 0165 train log loss: [[0.42176175]] valid log loss: [[0.42543736]]\n",
      "epoch: 0166 train log loss: [[0.42168403]] valid log loss: [[0.4253514]]\n",
      "epoch: 0167 train log loss: [[0.4216115]] valid log loss: [[0.42526487]]\n",
      "epoch: 0168 train log loss: [[0.42153853]] valid log loss: [[0.42518237]]\n",
      "epoch: 0169 train log loss: [[0.4214664]] valid log loss: [[0.42510003]]\n",
      "epoch: 0170 train log loss: [[0.42139477]] valid log loss: [[0.4250174]]\n",
      "epoch: 0171 train log loss: [[0.421322]] valid log loss: [[0.42493433]]\n",
      "epoch: 0172 train log loss: [[0.42125332]] valid log loss: [[0.42485514]]\n",
      "epoch: 0173 train log loss: [[0.42118424]] valid log loss: [[0.4247766]]\n",
      "epoch: 0174 train log loss: [[0.4211159]] valid log loss: [[0.42469698]]\n",
      "epoch: 0175 train log loss: [[0.42104533]] valid log loss: [[0.4246232]]\n",
      "epoch: 0176 train log loss: [[0.4209773]] valid log loss: [[0.42454684]]\n",
      "epoch: 0177 train log loss: [[0.42091227]] valid log loss: [[0.42446992]]\n",
      "epoch: 0178 train log loss: [[0.4208471]] valid log loss: [[0.42439726]]\n",
      "epoch: 0179 train log loss: [[0.42078274]] valid log loss: [[0.42432323]]\n",
      "epoch: 0180 train log loss: [[0.42071965]] valid log loss: [[0.42424804]]\n",
      "epoch: 0181 train log loss: [[0.42065802]] valid log loss: [[0.42417562]]\n",
      "epoch: 0182 train log loss: [[0.4205984]] valid log loss: [[0.42410678]]\n",
      "epoch: 0183 train log loss: [[0.42053622]] valid log loss: [[0.42403525]]\n",
      "epoch: 0184 train log loss: [[0.4204767]] valid log loss: [[0.42396724]]\n",
      "epoch: 0185 train log loss: [[0.4204167]] valid log loss: [[0.423898]]\n",
      "epoch: 0186 train log loss: [[0.42035654]] valid log loss: [[0.42382917]]\n",
      "epoch: 0187 train log loss: [[0.42029646]] valid log loss: [[0.42376113]]\n",
      "epoch: 0188 train log loss: [[0.42024136]] valid log loss: [[0.42369676]]\n",
      "epoch: 0189 train log loss: [[0.4201852]] valid log loss: [[0.42362893]]\n",
      "epoch: 0190 train log loss: [[0.42013007]] valid log loss: [[0.42356393]]\n",
      "epoch: 0191 train log loss: [[0.4200737]] valid log loss: [[0.42349768]]\n",
      "epoch: 0192 train log loss: [[0.420019]] valid log loss: [[0.42343494]]\n",
      "epoch: 0193 train log loss: [[0.41996503]] valid log loss: [[0.42337105]]\n",
      "epoch: 0194 train log loss: [[0.419911]] valid log loss: [[0.42330995]]\n",
      "epoch: 0195 train log loss: [[0.41985875]] valid log loss: [[0.4232488]]\n",
      "epoch: 0196 train log loss: [[0.41980603]] valid log loss: [[0.42318758]]\n",
      "epoch: 0197 train log loss: [[0.41975516]] valid log loss: [[0.42312747]]\n",
      "epoch: 0198 train log loss: [[0.41970435]] valid log loss: [[0.4230681]]\n",
      "epoch: 0199 train log loss: [[0.41965437]] valid log loss: [[0.4230106]]\n",
      "epoch: 0200 train log loss: [[0.4196022]] valid log loss: [[0.42295098]]\n",
      "epoch: 0201 train log loss: [[0.41955423]] valid log loss: [[0.42289382]]\n",
      "epoch: 0202 train log loss: [[0.41950515]] valid log loss: [[0.4228359]]\n",
      "epoch: 0203 train log loss: [[0.4194577]] valid log loss: [[0.42277974]]\n",
      "epoch: 0204 train log loss: [[0.41940987]] valid log loss: [[0.42272258]]\n",
      "epoch: 0205 train log loss: [[0.41936305]] valid log loss: [[0.42266715]]\n",
      "epoch: 0206 train log loss: [[0.41931665]] valid log loss: [[0.42261198]]\n",
      "epoch: 0207 train log loss: [[0.4192715]] valid log loss: [[0.4225582]]\n",
      "epoch: 0208 train log loss: [[0.4192253]] valid log loss: [[0.42250332]]\n",
      "epoch: 0209 train log loss: [[0.4191776]] valid log loss: [[0.42245236]]\n",
      "epoch: 0210 train log loss: [[0.41913396]] valid log loss: [[0.42240024]]\n",
      "epoch: 0211 train log loss: [[0.41909146]] valid log loss: [[0.42234802]]\n",
      "epoch: 0212 train log loss: [[0.41904852]] valid log loss: [[0.42229635]]\n",
      "epoch: 0213 train log loss: [[0.41900596]] valid log loss: [[0.42224658]]\n",
      "epoch: 0214 train log loss: [[0.41896144]] valid log loss: [[0.42219546]]\n",
      "epoch: 0215 train log loss: [[0.41891733]] valid log loss: [[0.42214397]]\n",
      "epoch: 0216 train log loss: [[0.41887698]] valid log loss: [[0.4220948]]\n",
      "epoch: 0217 train log loss: [[0.41883558]] valid log loss: [[0.42204705]]\n",
      "epoch: 0218 train log loss: [[0.41879177]] valid log loss: [[0.4219978]]\n",
      "epoch: 0219 train log loss: [[0.41875112]] valid log loss: [[0.4219497]]\n",
      "epoch: 0220 train log loss: [[0.41871035]] valid log loss: [[0.4219045]]\n",
      "epoch: 0221 train log loss: [[0.41867185]] valid log loss: [[0.4218569]]\n",
      "epoch: 0222 train log loss: [[0.4186329]] valid log loss: [[0.42180878]]\n",
      "epoch: 0223 train log loss: [[0.4185935]] valid log loss: [[0.42176148]]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 0224 train log loss: [[0.41855526]] valid log loss: [[0.42171755]]\n",
      "epoch: 0225 train log loss: [[0.41851875]] valid log loss: [[0.42167166]]\n",
      "epoch: 0226 train log loss: [[0.41847974]] valid log loss: [[0.42162675]]\n",
      "epoch: 0227 train log loss: [[0.41844094]] valid log loss: [[0.42158222]]\n",
      "epoch: 0228 train log loss: [[0.4184055]] valid log loss: [[0.4215388]]\n",
      "epoch: 0229 train log loss: [[0.41836855]] valid log loss: [[0.42149767]]\n",
      "epoch: 0230 train log loss: [[0.4183312]] valid log loss: [[0.42145348]]\n",
      "epoch: 0231 train log loss: [[0.4182945]] valid log loss: [[0.4214109]]\n",
      "epoch: 0232 train log loss: [[0.41825897]] valid log loss: [[0.42136624]]\n",
      "epoch: 0233 train log loss: [[0.41822478]] valid log loss: [[0.4213239]]\n",
      "epoch: 0234 train log loss: [[0.41819042]] valid log loss: [[0.42128456]]\n",
      "epoch: 0235 train log loss: [[0.41815665]] valid log loss: [[0.42124358]]\n",
      "epoch: 0236 train log loss: [[0.41812208]] valid log loss: [[0.42120287]]\n",
      "epoch: 0237 train log loss: [[0.4180878]] valid log loss: [[0.42116126]]\n",
      "epoch: 0238 train log loss: [[0.41805473]] valid log loss: [[0.42112133]]\n",
      "epoch: 0239 train log loss: [[0.4180222]] valid log loss: [[0.42108122]]\n",
      "epoch: 0240 train log loss: [[0.41798776]] valid log loss: [[0.42104098]]\n",
      "epoch: 0241 train log loss: [[0.41795385]] valid log loss: [[0.42100278]]\n",
      "epoch: 0242 train log loss: [[0.41792345]] valid log loss: [[0.4209649]]\n",
      "epoch: 0243 train log loss: [[0.41789195]] valid log loss: [[0.42092595]]\n",
      "epoch: 0244 train log loss: [[0.4178599]] valid log loss: [[0.42088866]]\n",
      "epoch: 0245 train log loss: [[0.41782886]] valid log loss: [[0.42085156]]\n",
      "epoch: 0246 train log loss: [[0.41779828]] valid log loss: [[0.42081523]]\n",
      "epoch: 0247 train log loss: [[0.41776758]] valid log loss: [[0.42077962]]\n",
      "epoch: 0248 train log loss: [[0.41773707]] valid log loss: [[0.42074063]]\n",
      "epoch: 0249 train log loss: [[0.41770646]] valid log loss: [[0.42070454]]\n",
      "epoch: 0250 train log loss: [[0.41767558]] valid log loss: [[0.42066908]]\n",
      "epoch: 0251 train log loss: [[0.4176453]] valid log loss: [[0.42063382]]\n",
      "epoch: 0252 train log loss: [[0.41761473]] valid log loss: [[0.42059648]]\n",
      "epoch: 0253 train log loss: [[0.4175852]] valid log loss: [[0.42056084]]\n",
      "epoch: 0254 train log loss: [[0.4175569]] valid log loss: [[0.4205268]]\n",
      "epoch: 0255 train log loss: [[0.41753]] valid log loss: [[0.42049336]]\n",
      "epoch: 0256 train log loss: [[0.41750064]] valid log loss: [[0.42045984]]\n",
      "epoch: 0257 train log loss: [[0.41747174]] valid log loss: [[0.42042524]]\n",
      "epoch: 0258 train log loss: [[0.41744205]] valid log loss: [[0.42039284]]\n",
      "epoch: 0259 train log loss: [[0.4174145]] valid log loss: [[0.42035836]]\n",
      "epoch: 0260 train log loss: [[0.41738865]] valid log loss: [[0.42032522]]\n",
      "epoch: 0261 train log loss: [[0.41736078]] valid log loss: [[0.4202943]]\n",
      "epoch: 0262 train log loss: [[0.4173345]] valid log loss: [[0.42026317]]\n",
      "epoch: 0263 train log loss: [[0.41730848]] valid log loss: [[0.42022952]]\n",
      "epoch: 0264 train log loss: [[0.41728]] valid log loss: [[0.4201976]]\n",
      "epoch: 0265 train log loss: [[0.4172543]] valid log loss: [[0.42016608]]\n",
      "epoch: 0266 train log loss: [[0.41722777]] valid log loss: [[0.42013457]]\n",
      "epoch: 0267 train log loss: [[0.41720077]] valid log loss: [[0.4201022]]\n",
      "epoch: 0268 train log loss: [[0.4171743]] valid log loss: [[0.42007053]]\n",
      "epoch: 0269 train log loss: [[0.4171487]] valid log loss: [[0.4200409]]\n",
      "epoch: 0270 train log loss: [[0.4171228]] valid log loss: [[0.42001078]]\n",
      "epoch: 0271 train log loss: [[0.4170982]] valid log loss: [[0.4199795]]\n",
      "epoch: 0272 train log loss: [[0.41707167]] valid log loss: [[0.419953]]\n",
      "epoch: 0273 train log loss: [[0.41704786]] valid log loss: [[0.41992295]]\n",
      "epoch: 0274 train log loss: [[0.4170243]] valid log loss: [[0.41989246]]\n",
      "epoch: 0275 train log loss: [[0.41699892]] valid log loss: [[0.41986403]]\n",
      "epoch: 0276 train log loss: [[0.4169752]] valid log loss: [[0.41983393]]\n",
      "epoch: 0277 train log loss: [[0.4169519]] valid log loss: [[0.419805]]\n",
      "epoch: 0278 train log loss: [[0.41692913]] valid log loss: [[0.4197761]]\n",
      "epoch: 0279 train log loss: [[0.41690558]] valid log loss: [[0.4197489]]\n",
      "epoch: 0280 train log loss: [[0.41688284]] valid log loss: [[0.41972002]]\n",
      "epoch: 0281 train log loss: [[0.41685915]] valid log loss: [[0.4196943]]\n",
      "epoch: 0282 train log loss: [[0.41683602]] valid log loss: [[0.4196684]]\n",
      "epoch: 0283 train log loss: [[0.41681194]] valid log loss: [[0.41963962]]\n",
      "epoch: 0284 train log loss: [[0.41678816]] valid log loss: [[0.41961217]]\n",
      "epoch: 0285 train log loss: [[0.4167668]] valid log loss: [[0.41958445]]\n",
      "epoch: 0286 train log loss: [[0.41674456]] valid log loss: [[0.41955712]]\n",
      "epoch: 0287 train log loss: [[0.41672114]] valid log loss: [[0.41953152]]\n",
      "epoch: 0288 train log loss: [[0.41669852]] valid log loss: [[0.4195049]]\n",
      "epoch: 0289 train log loss: [[0.41667622]] valid log loss: [[0.4194789]]\n",
      "epoch: 0290 train log loss: [[0.41665366]] valid log loss: [[0.419453]]\n",
      "epoch: 0291 train log loss: [[0.41663247]] valid log loss: [[0.4194275]]\n",
      "epoch: 0292 train log loss: [[0.41661054]] valid log loss: [[0.41940215]]\n",
      "epoch: 0293 train log loss: [[0.41658804]] valid log loss: [[0.41937715]]\n",
      "epoch: 0294 train log loss: [[0.41656825]] valid log loss: [[0.41935366]]\n",
      "epoch: 0295 train log loss: [[0.41654682]] valid log loss: [[0.41932914]]\n",
      "epoch: 0296 train log loss: [[0.41652638]] valid log loss: [[0.41930446]]\n",
      "epoch: 0297 train log loss: [[0.41650698]] valid log loss: [[0.4192795]]\n",
      "epoch: 0298 train log loss: [[0.4164867]] valid log loss: [[0.41925436]]\n",
      "epoch: 0299 train log loss: [[0.41646641]] valid log loss: [[0.4192293]]\n",
      "epoch: 0300 train log loss: [[0.4164461]] valid log loss: [[0.4192058]]\n",
      "epoch: 0301 train log loss: [[0.41642603]] valid log loss: [[0.4191833]]\n",
      "epoch: 0302 train log loss: [[0.41640565]] valid log loss: [[0.4191605]]\n",
      "epoch: 0303 train log loss: [[0.41638604]] valid log loss: [[0.41913652]]\n",
      "epoch: 0304 train log loss: [[0.41636676]] valid log loss: [[0.41911224]]\n",
      "epoch: 0305 train log loss: [[0.4163483]] valid log loss: [[0.41908824]]\n",
      "epoch: 0306 train log loss: [[0.41632798]] valid log loss: [[0.41906533]]\n",
      "epoch: 0307 train log loss: [[0.41630965]] valid log loss: [[0.41904262]]\n",
      "epoch: 0308 train log loss: [[0.41629106]] valid log loss: [[0.41902092]]\n",
      "epoch: 0309 train log loss: [[0.41627213]] valid log loss: [[0.41899833]]\n",
      "epoch: 0310 train log loss: [[0.4162528]] valid log loss: [[0.41897622]]\n",
      "epoch: 0311 train log loss: [[0.41623384]] valid log loss: [[0.4189533]]\n",
      "epoch: 0312 train log loss: [[0.41621548]] valid log loss: [[0.41893333]]\n",
      "epoch: 0313 train log loss: [[0.41619787]] valid log loss: [[0.41891015]]\n",
      "epoch: 0314 train log loss: [[0.4161788]] valid log loss: [[0.41888756]]\n",
      "epoch: 0315 train log loss: [[0.41616005]] valid log loss: [[0.41886526]]\n",
      "epoch: 0316 train log loss: [[0.41614145]] valid log loss: [[0.41884425]]\n",
      "epoch: 0317 train log loss: [[0.41612172]] valid log loss: [[0.41882342]]\n",
      "epoch: 0318 train log loss: [[0.41610432]] valid log loss: [[0.41880238]]\n",
      "epoch: 0319 train log loss: [[0.4160868]] valid log loss: [[0.4187827]]\n",
      "epoch: 0320 train log loss: [[0.41606903]] valid log loss: [[0.41876164]]\n",
      "epoch: 0321 train log loss: [[0.41605017]] valid log loss: [[0.4187408]]\n",
      "epoch: 0322 train log loss: [[0.41603234]] valid log loss: [[0.41872013]]\n",
      "epoch: 0323 train log loss: [[0.4160146]] valid log loss: [[0.41870093]]\n",
      "epoch: 0324 train log loss: [[0.41599658]] valid log loss: [[0.4186812]]\n",
      "epoch: 0325 train log loss: [[0.41597986]] valid log loss: [[0.41866022]]\n",
      "epoch: 0326 train log loss: [[0.4159638]] valid log loss: [[0.41864035]]\n",
      "epoch: 0327 train log loss: [[0.41594648]] valid log loss: [[0.41862062]]\n",
      "epoch: 0328 train log loss: [[0.41593054]] valid log loss: [[0.418602]]\n",
      "epoch: 0329 train log loss: [[0.41591355]] valid log loss: [[0.41858238]]\n",
      "epoch: 0330 train log loss: [[0.41589773]] valid log loss: [[0.41856337]]\n",
      "epoch: 0331 train log loss: [[0.41588157]] valid log loss: [[0.41854343]]\n",
      "epoch: 0332 train log loss: [[0.41586503]] valid log loss: [[0.41852304]]\n",
      "epoch: 0333 train log loss: [[0.41585028]] valid log loss: [[0.41850555]]\n",
      "epoch: 0334 train log loss: [[0.41583502]] valid log loss: [[0.4184877]]\n",
      "epoch: 0335 train log loss: [[0.4158184]] valid log loss: [[0.4184682]]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 0336 train log loss: [[0.41580164]] valid log loss: [[0.41845024]]\n",
      "epoch: 0337 train log loss: [[0.4157859]] valid log loss: [[0.41843176]]\n",
      "epoch: 0338 train log loss: [[0.4157705]] valid log loss: [[0.4184138]]\n",
      "epoch: 0339 train log loss: [[0.415756]] valid log loss: [[0.41839388]]\n",
      "epoch: 0340 train log loss: [[0.41574]] valid log loss: [[0.41837555]]\n",
      "epoch: 0341 train log loss: [[0.41572565]] valid log loss: [[0.41835812]]\n",
      "epoch: 0342 train log loss: [[0.41570982]] valid log loss: [[0.41834053]]\n",
      "epoch: 0343 train log loss: [[0.4156942]] valid log loss: [[0.41832328]]\n",
      "epoch: 0344 train log loss: [[0.41568026]] valid log loss: [[0.4183058]]\n",
      "epoch: 0345 train log loss: [[0.41566437]] valid log loss: [[0.41828895]]\n",
      "epoch: 0346 train log loss: [[0.41564953]] valid log loss: [[0.4182708]]\n",
      "epoch: 0347 train log loss: [[0.41563347]] valid log loss: [[0.41825178]]\n",
      "epoch: 0348 train log loss: [[0.41561875]] valid log loss: [[0.41823488]]\n",
      "epoch: 0349 train log loss: [[0.41560358]] valid log loss: [[0.41821814]]\n",
      "epoch: 0350 train log loss: [[0.41558823]] valid log loss: [[0.41820073]]\n",
      "epoch: 0351 train log loss: [[0.41557372]] valid log loss: [[0.41818333]]\n",
      "epoch: 0352 train log loss: [[0.41556048]] valid log loss: [[0.41816762]]\n",
      "epoch: 0353 train log loss: [[0.41554615]] valid log loss: [[0.41815105]]\n",
      "epoch: 0354 train log loss: [[0.4155302]] valid log loss: [[0.41813543]]\n",
      "epoch: 0355 train log loss: [[0.41551632]] valid log loss: [[0.41811723]]\n",
      "epoch: 0356 train log loss: [[0.41550183]] valid log loss: [[0.41810155]]\n",
      "epoch: 0357 train log loss: [[0.41548884]] valid log loss: [[0.418086]]\n",
      "epoch: 0358 train log loss: [[0.41547373]] valid log loss: [[0.41806942]]\n",
      "epoch: 0359 train log loss: [[0.41545853]] valid log loss: [[0.4180536]]\n",
      "epoch: 0360 train log loss: [[0.41544482]] valid log loss: [[0.41803816]]\n",
      "epoch: 0361 train log loss: [[0.41543004]] valid log loss: [[0.4180238]]\n",
      "epoch: 0362 train log loss: [[0.41541654]] valid log loss: [[0.41800824]]\n",
      "epoch: 0363 train log loss: [[0.4154032]] valid log loss: [[0.41799274]]\n",
      "epoch: 0364 train log loss: [[0.41538975]] valid log loss: [[0.4179771]]\n",
      "epoch: 0365 train log loss: [[0.41537565]] valid log loss: [[0.41796142]]\n",
      "epoch: 0366 train log loss: [[0.4153618]] valid log loss: [[0.41794416]]\n",
      "epoch: 0367 train log loss: [[0.41534883]] valid log loss: [[0.41792917]]\n",
      "epoch: 0368 train log loss: [[0.4153356]] valid log loss: [[0.41791463]]\n",
      "epoch: 0369 train log loss: [[0.41532227]] valid log loss: [[0.41789895]]\n",
      "epoch: 0370 train log loss: [[0.4153096]] valid log loss: [[0.4178828]]\n",
      "epoch: 0371 train log loss: [[0.41529658]] valid log loss: [[0.41786826]]\n",
      "epoch: 0372 train log loss: [[0.4152835]] valid log loss: [[0.41785514]]\n",
      "epoch: 0373 train log loss: [[0.41527167]] valid log loss: [[0.41784]]\n",
      "epoch: 0374 train log loss: [[0.4152594]] valid log loss: [[0.41782606]]\n",
      "epoch: 0375 train log loss: [[0.41524616]] valid log loss: [[0.41781023]]\n",
      "epoch: 0376 train log loss: [[0.41523355]] valid log loss: [[0.41779625]]\n",
      "epoch: 0377 train log loss: [[0.41522074]] valid log loss: [[0.41778156]]\n",
      "epoch: 0378 train log loss: [[0.4152094]] valid log loss: [[0.4177683]]\n",
      "epoch: 0379 train log loss: [[0.4151953]] valid log loss: [[0.41775504]]\n",
      "epoch: 0380 train log loss: [[0.41518313]] valid log loss: [[0.41774186]]\n",
      "epoch: 0381 train log loss: [[0.41517025]] valid log loss: [[0.41772687]]\n",
      "epoch: 0382 train log loss: [[0.41515717]] valid log loss: [[0.41771257]]\n",
      "epoch: 0383 train log loss: [[0.41514498]] valid log loss: [[0.41770044]]\n",
      "epoch: 0384 train log loss: [[0.4151332]] valid log loss: [[0.41768533]]\n",
      "epoch: 0385 train log loss: [[0.41512024]] valid log loss: [[0.4176718]]\n",
      "epoch: 0386 train log loss: [[0.41510814]] valid log loss: [[0.41765916]]\n",
      "epoch: 0387 train log loss: [[0.41509587]] valid log loss: [[0.417644]]\n",
      "epoch: 0388 train log loss: [[0.41508305]] valid log loss: [[0.41762984]]\n",
      "epoch: 0389 train log loss: [[0.41507116]] valid log loss: [[0.41761798]]\n",
      "epoch: 0390 train log loss: [[0.41506013]] valid log loss: [[0.41760463]]\n",
      "epoch: 0391 train log loss: [[0.41504875]] valid log loss: [[0.41759208]]\n",
      "epoch: 0392 train log loss: [[0.41503716]] valid log loss: [[0.4175802]]\n",
      "epoch: 0393 train log loss: [[0.41502556]] valid log loss: [[0.4175665]]\n",
      "epoch: 0394 train log loss: [[0.41501448]] valid log loss: [[0.4175539]]\n",
      "epoch: 0395 train log loss: [[0.41500336]] valid log loss: [[0.41754118]]\n",
      "epoch: 0396 train log loss: [[0.41499212]] valid log loss: [[0.41752574]]\n",
      "epoch: 0397 train log loss: [[0.41498116]] valid log loss: [[0.41751394]]\n",
      "epoch: 0398 train log loss: [[0.41497105]] valid log loss: [[0.41750145]]\n",
      "epoch: 0399 train log loss: [[0.41495976]] valid log loss: [[0.41748908]]\n",
      "epoch: 0400 train log loss: [[0.41494834]] valid log loss: [[0.41747776]]\n",
      "epoch: 0401 train log loss: [[0.41493687]] valid log loss: [[0.41746426]]\n",
      "epoch: 0402 train log loss: [[0.4149261]] valid log loss: [[0.41745144]]\n",
      "epoch: 0403 train log loss: [[0.41491514]] valid log loss: [[0.41743803]]\n",
      "epoch: 0404 train log loss: [[0.4149035]] valid log loss: [[0.41742584]]\n",
      "epoch: 0405 train log loss: [[0.41489288]] valid log loss: [[0.41741407]]\n",
      "epoch: 0406 train log loss: [[0.41488218]] valid log loss: [[0.41740188]]\n",
      "epoch: 0407 train log loss: [[0.41487128]] valid log loss: [[0.41739184]]\n",
      "epoch: 0408 train log loss: [[0.41486007]] valid log loss: [[0.41737902]]\n",
      "epoch: 0409 train log loss: [[0.41484904]] valid log loss: [[0.41736853]]\n",
      "epoch: 0410 train log loss: [[0.41483867]] valid log loss: [[0.417356]]\n",
      "epoch: 0411 train log loss: [[0.41482702]] valid log loss: [[0.41734514]]\n",
      "epoch: 0412 train log loss: [[0.41481692]] valid log loss: [[0.4173328]]\n",
      "epoch: 0413 train log loss: [[0.4148066]] valid log loss: [[0.41732085]]\n",
      "epoch: 0414 train log loss: [[0.4147965]] valid log loss: [[0.41730827]]\n",
      "epoch: 0415 train log loss: [[0.41478568]] valid log loss: [[0.41729775]]\n",
      "epoch: 0416 train log loss: [[0.4147742]] valid log loss: [[0.4172861]]\n",
      "epoch: 0417 train log loss: [[0.4147644]] valid log loss: [[0.4172751]]\n",
      "epoch: 0418 train log loss: [[0.4147545]] valid log loss: [[0.41726258]]\n",
      "epoch: 0419 train log loss: [[0.41474327]] valid log loss: [[0.41725206]]\n",
      "epoch: 0420 train log loss: [[0.41473198]] valid log loss: [[0.41724232]]\n",
      "epoch: 0421 train log loss: [[0.41472304]] valid log loss: [[0.41722986]]\n",
      "epoch: 0422 train log loss: [[0.41471308]] valid log loss: [[0.4172194]]\n",
      "epoch: 0423 train log loss: [[0.4147025]] valid log loss: [[0.41720814]]\n",
      "epoch: 0424 train log loss: [[0.41469285]] valid log loss: [[0.41719827]]\n",
      "epoch: 0425 train log loss: [[0.41468367]] valid log loss: [[0.4171871]]\n",
      "epoch: 0426 train log loss: [[0.41467422]] valid log loss: [[0.417176]]\n",
      "epoch: 0427 train log loss: [[0.41466385]] valid log loss: [[0.41716674]]\n",
      "epoch: 0428 train log loss: [[0.41465345]] valid log loss: [[0.41715512]]\n",
      "epoch: 0429 train log loss: [[0.41464418]] valid log loss: [[0.41714436]]\n",
      "epoch: 0430 train log loss: [[0.41463357]] valid log loss: [[0.41713348]]\n",
      "epoch: 0431 train log loss: [[0.4146237]] valid log loss: [[0.41712183]]\n",
      "epoch: 0432 train log loss: [[0.4146141]] valid log loss: [[0.41711193]]\n",
      "epoch: 0433 train log loss: [[0.41460368]] valid log loss: [[0.4171023]]\n",
      "epoch: 0434 train log loss: [[0.4145928]] valid log loss: [[0.41709238]]\n",
      "epoch: 0435 train log loss: [[0.41458446]] valid log loss: [[0.41708156]]\n",
      "epoch: 0436 train log loss: [[0.41457435]] valid log loss: [[0.41707096]]\n",
      "epoch: 0437 train log loss: [[0.4145656]] valid log loss: [[0.417062]]\n",
      "epoch: 0438 train log loss: [[0.41455686]] valid log loss: [[0.41705087]]\n",
      "epoch: 0439 train log loss: [[0.4145478]] valid log loss: [[0.4170404]]\n",
      "epoch: 0440 train log loss: [[0.41453865]] valid log loss: [[0.417029]]\n",
      "epoch: 0441 train log loss: [[0.4145293]] valid log loss: [[0.4170196]]\n",
      "epoch: 0442 train log loss: [[0.41452125]] valid log loss: [[0.4170098]]\n",
      "epoch: 0443 train log loss: [[0.41451165]] valid log loss: [[0.41700026]]\n",
      "epoch: 0444 train log loss: [[0.41450363]] valid log loss: [[0.41699]]\n",
      "epoch: 0445 train log loss: [[0.41449383]] valid log loss: [[0.41698083]]\n",
      "epoch: 0446 train log loss: [[0.41448498]] valid log loss: [[0.41697136]]\n",
      "epoch: 0447 train log loss: [[0.41447577]] valid log loss: [[0.41696173]]\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "epoch: 0448 train log loss: [[0.4144662]] valid log loss: [[0.41695198]]\n",
      "epoch: 0449 train log loss: [[0.41445717]] valid log loss: [[0.41694278]]\n",
      "epoch: 0450 train log loss: [[0.41444921]] valid log loss: [[0.41693303]]\n",
      "epoch: 0451 train log loss: [[0.41444004]] valid log loss: [[0.416925]]\n",
      "epoch: 0452 train log loss: [[0.41443107]] valid log loss: [[0.41691622]]\n",
      "epoch: 0453 train log loss: [[0.4144222]] valid log loss: [[0.4169084]]\n",
      "epoch: 0454 train log loss: [[0.41441277]] valid log loss: [[0.41689888]]\n",
      "epoch: 0455 train log loss: [[0.41440478]] valid log loss: [[0.4168892]]\n",
      "epoch: 0456 train log loss: [[0.4143964]] valid log loss: [[0.4168792]]\n",
      "epoch: 0457 train log loss: [[0.41438684]] valid log loss: [[0.4168691]]\n",
      "epoch: 0458 train log loss: [[0.41437823]] valid log loss: [[0.41686043]]\n",
      "epoch: 0459 train log loss: [[0.4143697]] valid log loss: [[0.41685233]]\n",
      "epoch: 0460 train log loss: [[0.41436067]] valid log loss: [[0.4168431]]\n",
      "epoch: 0461 train log loss: [[0.41435224]] valid log loss: [[0.4168333]]\n",
      "epoch: 0462 train log loss: [[0.414345]] valid log loss: [[0.41682506]]\n",
      "epoch: 0463 train log loss: [[0.41433623]] valid log loss: [[0.41681704]]\n",
      "epoch: 0464 train log loss: [[0.41432828]] valid log loss: [[0.4168073]]\n",
      "epoch: 0465 train log loss: [[0.4143199]] valid log loss: [[0.41679892]]\n",
      "epoch: 0466 train log loss: [[0.4143114]] valid log loss: [[0.4167892]]\n",
      "epoch: 0467 train log loss: [[0.41430396]] valid log loss: [[0.41677943]]\n",
      "epoch: 0468 train log loss: [[0.4142952]] valid log loss: [[0.41677138]]\n",
      "epoch: 0469 train log loss: [[0.41428763]] valid log loss: [[0.41676185]]\n",
      "epoch: 0470 train log loss: [[0.4142802]] valid log loss: [[0.4167537]]\n",
      "epoch: 0471 train log loss: [[0.41427222]] valid log loss: [[0.41674513]]\n",
      "epoch: 0472 train log loss: [[0.41426408]] valid log loss: [[0.41673654]]\n",
      "epoch: 0473 train log loss: [[0.41425484]] valid log loss: [[0.41672888]]\n",
      "epoch: 0474 train log loss: [[0.4142462]] valid log loss: [[0.4167213]]\n",
      "epoch: 0475 train log loss: [[0.41423732]] valid log loss: [[0.41671276]]\n",
      "epoch: 0476 train log loss: [[0.41423014]] valid log loss: [[0.41670382]]\n",
      "epoch: 0477 train log loss: [[0.414222]] valid log loss: [[0.4166966]]\n",
      "epoch: 0478 train log loss: [[0.41421422]] valid log loss: [[0.41668716]]\n",
      "epoch: 0479 train log loss: [[0.4142063]] valid log loss: [[0.4166788]]\n",
      "epoch: 0480 train log loss: [[0.41419938]] valid log loss: [[0.41667083]]\n",
      "epoch: 0481 train log loss: [[0.41419178]] valid log loss: [[0.41666123]]\n",
      "epoch: 0482 train log loss: [[0.4141835]] valid log loss: [[0.4166538]]\n",
      "epoch: 0483 train log loss: [[0.4141752]] valid log loss: [[0.41664568]]\n",
      "epoch: 0484 train log loss: [[0.41416812]] valid log loss: [[0.41663733]]\n",
      "epoch: 0485 train log loss: [[0.41416016]] valid log loss: [[0.41662958]]\n",
      "epoch: 0486 train log loss: [[0.41415164]] valid log loss: [[0.41662225]]\n",
      "epoch: 0487 train log loss: [[0.41414365]] valid log loss: [[0.41661403]]\n",
      "epoch: 0488 train log loss: [[0.41413492]] valid log loss: [[0.41660547]]\n",
      "epoch: 0489 train log loss: [[0.41412893]] valid log loss: [[0.4165981]]\n",
      "epoch: 0490 train log loss: [[0.41412053]] valid log loss: [[0.41659042]]\n",
      "epoch: 0491 train log loss: [[0.4141129]] valid log loss: [[0.41658288]]\n",
      "epoch: 0492 train log loss: [[0.41410536]] valid log loss: [[0.41657472]]\n",
      "epoch: 0493 train log loss: [[0.41409826]] valid log loss: [[0.4165674]]\n",
      "epoch: 0494 train log loss: [[0.4140914]] valid log loss: [[0.41656]]\n",
      "epoch: 0495 train log loss: [[0.41408336]] valid log loss: [[0.41655266]]\n",
      "epoch: 0496 train log loss: [[0.4140752]] valid log loss: [[0.41654435]]\n",
      "epoch: 0497 train log loss: [[0.41406858]] valid log loss: [[0.4165357]]\n",
      "epoch: 0498 train log loss: [[0.4140609]] valid log loss: [[0.41652817]]\n",
      "epoch: 0499 train log loss: [[0.41405347]] valid log loss: [[0.41652125]]\n",
      "epoch: 0500 train log loss: [[0.41404647]] valid log loss: [[0.4165135]]\n",
      "epoch: 0501 train log loss: [[0.41403943]] valid log loss: [[0.41650692]]\n",
      "epoch: 0502 train log loss: [[0.4140327]] valid log loss: [[0.41649914]]\n",
      "epoch: 0503 train log loss: [[0.4140253]] valid log loss: [[0.41649076]]\n",
      "epoch: 0504 train log loss: [[0.41401836]] valid log loss: [[0.41648406]]\n",
      "epoch: 0505 train log loss: [[0.4140112]] valid log loss: [[0.41647756]]\n",
      "epoch: 0506 train log loss: [[0.41400406]] valid log loss: [[0.4164707]]\n",
      "epoch: 0507 train log loss: [[0.41399756]] valid log loss: [[0.41646394]]\n",
      "epoch: 0508 train log loss: [[0.4139903]] valid log loss: [[0.41645733]]\n",
      "epoch: 0509 train log loss: [[0.41398352]] valid log loss: [[0.4164498]]\n",
      "epoch: 0510 train log loss: [[0.4139762]] valid log loss: [[0.4164433]]\n",
      "epoch: 0511 train log loss: [[0.41396964]] valid log loss: [[0.41643623]]\n",
      "epoch: 0512 train log loss: [[0.41396233]] valid log loss: [[0.41642955]]\n",
      "epoch: 0513 train log loss: [[0.41395587]] valid log loss: [[0.4164224]]\n",
      "epoch: 0514 train log loss: [[0.41394776]] valid log loss: [[0.41641498]]\n",
      "epoch: 0515 train log loss: [[0.41394162]] valid log loss: [[0.41640922]]\n",
      "epoch: 0516 train log loss: [[0.4139352]] valid log loss: [[0.41640285]]\n",
      "epoch: 0517 train log loss: [[0.41392767]] valid log loss: [[0.4163959]]\n",
      "epoch: 0518 train log loss: [[0.41392136]] valid log loss: [[0.41638824]]\n",
      "epoch: 0519 train log loss: [[0.41391435]] valid log loss: [[0.4163828]]\n",
      "epoch: 0520 train log loss: [[0.41390747]] valid log loss: [[0.4163753]]\n",
      "epoch: 0521 train log loss: [[0.41390103]] valid log loss: [[0.4163694]]\n",
      "epoch: 0522 train log loss: [[0.41389418]] valid log loss: [[0.41636175]]\n",
      "epoch: 0523 train log loss: [[0.4138884]] valid log loss: [[0.41635364]]\n",
      "epoch: 0524 train log loss: [[0.4138815]] valid log loss: [[0.41634688]]\n",
      "epoch: 0525 train log loss: [[0.4138751]] valid log loss: [[0.4163411]]\n",
      "epoch: 0526 train log loss: [[0.41386962]] valid log loss: [[0.41633478]]\n"
     ]
    },
    {
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m                         Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-42-dcab651b77f9>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m      6\u001b[0m     \u001b[0mtrain_W\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msess\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mW\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m      7\u001b[0m     \u001b[0mtrain_b\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msess\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mb\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 8\u001b[0;31m     \u001b[0mtrain_cost\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msess\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mloss\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0mffm_data\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m{\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mffm_ftrl\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mffm_data\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'click'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreshape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m      9\u001b[0m     \u001b[0mvalid_cost\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msess\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mloss\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0mvalid_ffm_data\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshape\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m{\u001b[0m\u001b[0mx\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mvalid_ftrl\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0mvalid_ffm_data\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m'click'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mreshape\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     10\u001b[0m \u001b[0;31m#                 print(\"sample:\", \"%d\" % (sample_index+1000000), \"train log loss:\", train_cost, \"valid log loss:\",valid_cost)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m    898\u001b[0m     \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    899\u001b[0m       result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[0;32m--> 900\u001b[0;31m                          run_metadata_ptr)\n\u001b[0m\u001b[1;32m    901\u001b[0m       \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    902\u001b[0m         \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_run\u001b[0;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m   1102\u001b[0m             \u001b[0mfeed_handles\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0msubfeed_t\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msubfeed_val\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1103\u001b[0m           \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1104\u001b[0;31m             \u001b[0mnp_val\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0masarray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msubfeed_val\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdtype\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0msubfeed_dtype\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m   1105\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m   1106\u001b[0m           if (not is_tensor_handle_feed and\n",
      "\u001b[0;32m~/anaconda3/lib/python3.6/site-packages/numpy/core/numeric.py\u001b[0m in \u001b[0;36masarray\u001b[0;34m(a, dtype, order)\u001b[0m\n\u001b[1;32m    490\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    491\u001b[0m     \"\"\"\n\u001b[0;32m--> 492\u001b[0;31m     \u001b[0;32mreturn\u001b[0m \u001b[0marray\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0ma\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdtype\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcopy\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;32mFalse\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0morder\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0morder\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m    493\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m    494\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
     ]
    }
   ],
   "source": [
    "init = tf.global_variables_initializer()\n",
    "sess = tf.Session()\n",
    "sess.run(init)\n",
    "for epoch in range(1000):\n",
    "    sess.run(train, {x:ffm_ftrl, y_:ffm_data['click'].values.reshape(-1,1)})\n",
    "    train_W = sess.run(W)\n",
    "    train_b = sess.run(b)\n",
    "    train_cost = sess.run(loss/ffm_data.shape[0],{x:ffm_ftrl, y_:ffm_data['click'].values.reshape(-1,1)}) \n",
    "    valid_cost = sess.run(loss/valid_ffm_data.shape[0],{x:valid_ftrl, y_:valid_ffm_data['click'].values.reshape(-1,1)})\n",
    "#                 print(\"sample:\", \"%d\" % (sample_index+1000000), \"train log loss:\", train_cost, \"valid log loss:\",valid_cost)\n",
    "    print(\"epoch:\", \"%04d\" % epoch, \"train log loss:\", train_cost, \"valid log loss:\",valid_cost)\n",
    "print(\"Optimization Finished!\")"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.5"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
