{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "import pandas as pd\n",
    "from sklearn.model_selection import train_test_split\n",
    "from sklearn.impute import SimpleImputer\n",
    "\n",
    "data = pd.read_csv('./data/train.csv')\n",
    "data.dropna(axis=0, subset=['SalePrice'], inplace=True)\n",
    "y = data.SalePrice\n",
    "X = data.drop(['SalePrice'], axis=1).select_dtypes(exclude=['object'])\n",
    "train_X, test_X, train_y, test_y = train_test_split(X.values, y.values, test_size=0.25)\n",
    "\n",
    "my_imputer = SimpleImputer()\n",
    "train_X = my_imputer.fit_transform(train_X)\n",
    "test_X = my_imputer.transform(test_X)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "outputs": [
    {
     "data": {
      "text/plain": "array([[6.30000000e+01, 1.20000000e+02, 4.40000000e+01, ...,\n        0.00000000e+00, 1.00000000e+01, 2.00700000e+03],\n       [1.04000000e+02, 2.00000000e+01, 9.40000000e+01, ...,\n        0.00000000e+00, 5.00000000e+00, 2.01000000e+03],\n       [8.67000000e+02, 2.00000000e+01, 6.70000000e+01, ...,\n        0.00000000e+00, 1.10000000e+01, 2.00700000e+03],\n       ...,\n       [1.03400000e+03, 2.00000000e+01, 7.03978022e+01, ...,\n        0.00000000e+00, 2.00000000e+00, 2.00600000e+03],\n       [1.02600000e+03, 2.00000000e+01, 7.00000000e+01, ...,\n        0.00000000e+00, 3.00000000e+00, 2.00700000e+03],\n       [2.95000000e+02, 2.00000000e+01, 8.00000000e+01, ...,\n        0.00000000e+00, 1.00000000e+01, 2.00900000e+03]])"
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_X"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "outputs": [
    {
     "data": {
      "text/plain": "XGBRegressor(base_score=0.5, booster='gbtree', callbacks=None,\n             colsample_bylevel=1, colsample_bynode=1, colsample_bytree=1,\n             early_stopping_rounds=None, enable_categorical=False,\n             eval_metric=None, gamma=0, gpu_id=-1, grow_policy='depthwise',\n             importance_type=None, interaction_constraints='',\n             learning_rate=0.300000012, max_bin=256, max_cat_to_onehot=4,\n             max_delta_step=0, max_depth=6, max_leaves=0, min_child_weight=1,\n             missing=nan, monotone_constraints='()', n_estimators=100, n_jobs=0,\n             num_parallel_tree=1, predictor='auto', random_state=0, reg_alpha=0,\n             reg_lambda=1, ...)",
      "text/html": "<style>#sk-container-id-5 {color: black;background-color: white;}#sk-container-id-5 pre{padding: 0;}#sk-container-id-5 div.sk-toggleable {background-color: white;}#sk-container-id-5 label.sk-toggleable__label {cursor: pointer;display: block;width: 100%;margin-bottom: 0;padding: 0.3em;box-sizing: border-box;text-align: center;}#sk-container-id-5 label.sk-toggleable__label-arrow:before {content: \"▸\";float: left;margin-right: 0.25em;color: #696969;}#sk-container-id-5 label.sk-toggleable__label-arrow:hover:before {color: black;}#sk-container-id-5 div.sk-estimator:hover label.sk-toggleable__label-arrow:before {color: black;}#sk-container-id-5 div.sk-toggleable__content {max-height: 0;max-width: 0;overflow: hidden;text-align: left;background-color: #f0f8ff;}#sk-container-id-5 div.sk-toggleable__content pre {margin: 0.2em;color: black;border-radius: 0.25em;background-color: #f0f8ff;}#sk-container-id-5 input.sk-toggleable__control:checked~div.sk-toggleable__content {max-height: 200px;max-width: 100%;overflow: auto;}#sk-container-id-5 input.sk-toggleable__control:checked~label.sk-toggleable__label-arrow:before {content: \"▾\";}#sk-container-id-5 div.sk-estimator input.sk-toggleable__control:checked~label.sk-toggleable__label {background-color: #d4ebff;}#sk-container-id-5 div.sk-label input.sk-toggleable__control:checked~label.sk-toggleable__label {background-color: #d4ebff;}#sk-container-id-5 input.sk-hidden--visually {border: 0;clip: rect(1px 1px 1px 1px);clip: rect(1px, 1px, 1px, 1px);height: 1px;margin: -1px;overflow: hidden;padding: 0;position: absolute;width: 1px;}#sk-container-id-5 div.sk-estimator {font-family: monospace;background-color: #f0f8ff;border: 1px dotted black;border-radius: 0.25em;box-sizing: border-box;margin-bottom: 0.5em;}#sk-container-id-5 div.sk-estimator:hover {background-color: #d4ebff;}#sk-container-id-5 div.sk-parallel-item::after {content: \"\";width: 100%;border-bottom: 1px solid gray;flex-grow: 1;}#sk-container-id-5 div.sk-label:hover label.sk-toggleable__label {background-color: #d4ebff;}#sk-container-id-5 div.sk-serial::before {content: \"\";position: absolute;border-left: 1px solid gray;box-sizing: border-box;top: 0;bottom: 0;left: 50%;z-index: 0;}#sk-container-id-5 div.sk-serial {display: flex;flex-direction: column;align-items: center;background-color: white;padding-right: 0.2em;padding-left: 0.2em;position: relative;}#sk-container-id-5 div.sk-item {position: relative;z-index: 1;}#sk-container-id-5 div.sk-parallel {display: flex;align-items: stretch;justify-content: center;background-color: white;position: relative;}#sk-container-id-5 div.sk-item::before, #sk-container-id-5 div.sk-parallel-item::before {content: \"\";position: absolute;border-left: 1px solid gray;box-sizing: border-box;top: 0;bottom: 0;left: 50%;z-index: -1;}#sk-container-id-5 div.sk-parallel-item {display: flex;flex-direction: column;z-index: 1;position: relative;background-color: white;}#sk-container-id-5 div.sk-parallel-item:first-child::after {align-self: flex-end;width: 50%;}#sk-container-id-5 div.sk-parallel-item:last-child::after {align-self: flex-start;width: 50%;}#sk-container-id-5 div.sk-parallel-item:only-child::after {width: 0;}#sk-container-id-5 div.sk-dashed-wrapped {border: 1px dashed gray;margin: 0 0.4em 0.5em 0.4em;box-sizing: border-box;padding-bottom: 0.4em;background-color: white;}#sk-container-id-5 div.sk-label label {font-family: monospace;font-weight: bold;display: inline-block;line-height: 1.2em;}#sk-container-id-5 div.sk-label-container {text-align: center;}#sk-container-id-5 div.sk-container {/* jupyter's `normalize.less` sets `[hidden] { display: none; }` but bootstrap.min.css set `[hidden] { display: none !important; }` so we also need the `!important` here to be able to override the default hidden behavior on the sphinx rendered scikit-learn.org. See: https://github.com/scikit-learn/scikit-learn/issues/21755 */display: inline-block !important;position: relative;}#sk-container-id-5 div.sk-text-repr-fallback {display: none;}</style><div id=\"sk-container-id-5\" class=\"sk-top-container\"><div class=\"sk-text-repr-fallback\"><pre>XGBRegressor(base_score=0.5, booster=&#x27;gbtree&#x27;, callbacks=None,\n             colsample_bylevel=1, colsample_bynode=1, colsample_bytree=1,\n             early_stopping_rounds=None, enable_categorical=False,\n             eval_metric=None, gamma=0, gpu_id=-1, grow_policy=&#x27;depthwise&#x27;,\n             importance_type=None, interaction_constraints=&#x27;&#x27;,\n             learning_rate=0.300000012, max_bin=256, max_cat_to_onehot=4,\n             max_delta_step=0, max_depth=6, max_leaves=0, min_child_weight=1,\n             missing=nan, monotone_constraints=&#x27;()&#x27;, n_estimators=100, n_jobs=0,\n             num_parallel_tree=1, predictor=&#x27;auto&#x27;, random_state=0, reg_alpha=0,\n             reg_lambda=1, ...)</pre><b>In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook. <br />On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.</b></div><div class=\"sk-container\" hidden><div class=\"sk-item\"><div class=\"sk-estimator sk-toggleable\"><input class=\"sk-toggleable__control sk-hidden--visually\" id=\"sk-estimator-id-5\" type=\"checkbox\" checked><label for=\"sk-estimator-id-5\" class=\"sk-toggleable__label sk-toggleable__label-arrow\">XGBRegressor</label><div class=\"sk-toggleable__content\"><pre>XGBRegressor(base_score=0.5, booster=&#x27;gbtree&#x27;, callbacks=None,\n             colsample_bylevel=1, colsample_bynode=1, colsample_bytree=1,\n             early_stopping_rounds=None, enable_categorical=False,\n             eval_metric=None, gamma=0, gpu_id=-1, grow_policy=&#x27;depthwise&#x27;,\n             importance_type=None, interaction_constraints=&#x27;&#x27;,\n             learning_rate=0.300000012, max_bin=256, max_cat_to_onehot=4,\n             max_delta_step=0, max_depth=6, max_leaves=0, min_child_weight=1,\n             missing=nan, monotone_constraints=&#x27;()&#x27;, n_estimators=100, n_jobs=0,\n             num_parallel_tree=1, predictor=&#x27;auto&#x27;, random_state=0, reg_alpha=0,\n             reg_lambda=1, ...)</pre></div></div></div></div></div>"
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "from xgboost import XGBRegressor\n",
    "\n",
    "my_model = XGBRegressor()\n",
    "# Add silent=True to avoid printing out updates with each cycle\n",
    "my_model.fit(train_X, train_y, verbose=False)"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "outputs": [],
   "source": [
    "# make predictions\n",
    "predictions = my_model.predict(test_X)\n",
    "\n",
    "from sklearn.metrics import mean_absolute_error\n",
    "print(\"Mean Absolute Error : \" + str(mean_absolute_error(predictions, test_y)))"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "outputs": [
    {
     "data": {
      "text/plain": "['./model/xgb1.pkl']"
     },
     "execution_count": 24,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "import joblib\n",
    "joblib.dump(my_model, './model/xgb1.pkl')"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "training epoch 1\n",
      "err 17955.410809075343 in epoch 1 :\n",
      "save epoch 1 with err: 17955.410809075343 \n",
      "training epoch 2\n",
      "err 17955.410809075343 in epoch 2 :\n",
      "training epoch 3\n",
      "err 17955.410809075343 in epoch 3 :\n",
      "training epoch 4\n",
      "err 17955.410809075343 in epoch 4 :\n",
      "training epoch 5\n",
      "err 17955.410809075343 in epoch 5 :\n",
      "training epoch 6\n",
      "err 17955.410809075343 in epoch 6 :\n",
      "training epoch 7\n",
      "err 17955.410809075343 in epoch 7 :\n",
      "training epoch 8\n",
      "err 17955.410809075343 in epoch 8 :\n",
      "training epoch 9\n",
      "err 17955.410809075343 in epoch 9 :\n",
      "training epoch 10\n",
      "err 17955.410809075343 in epoch 10 :\n",
      "training epoch 11\n",
      "err 17955.410809075343 in epoch 11 :\n",
      "training epoch 12\n",
      "err 17955.410809075343 in epoch 12 :\n",
      "training epoch 13\n",
      "err 17955.410809075343 in epoch 13 :\n",
      "training epoch 14\n",
      "err 17955.410809075343 in epoch 14 :\n",
      "training epoch 15\n",
      "err 17955.410809075343 in epoch 15 :\n",
      "training epoch 16\n",
      "err 17955.410809075343 in epoch 16 :\n",
      "training epoch 17\n",
      "err 17955.410809075343 in epoch 17 :\n",
      "training epoch 18\n",
      "err 17955.410809075343 in epoch 18 :\n",
      "training epoch 19\n",
      "err 17955.410809075343 in epoch 19 :\n",
      "training epoch 20\n",
      "err 17955.410809075343 in epoch 20 :\n",
      "training epoch 21\n",
      "err 17955.410809075343 in epoch 21 :\n",
      "training epoch 22\n",
      "err 17955.410809075343 in epoch 22 :\n",
      "training epoch 23\n",
      "err 17955.410809075343 in epoch 23 :\n",
      "training epoch 24\n",
      "err 17955.410809075343 in epoch 24 :\n",
      "training epoch 25\n",
      "err 17955.410809075343 in epoch 25 :\n",
      "training epoch 26\n",
      "err 17955.410809075343 in epoch 26 :\n",
      "training epoch 27\n",
      "err 17955.410809075343 in epoch 27 :\n",
      "training epoch 28\n",
      "err 17955.410809075343 in epoch 28 :\n",
      "training epoch 29\n",
      "err 17955.410809075343 in epoch 29 :\n",
      "training epoch 30\n",
      "err 17955.410809075343 in epoch 30 :\n",
      "training epoch 31\n",
      "err 17955.410809075343 in epoch 31 :\n",
      "training epoch 32\n",
      "err 17955.410809075343 in epoch 32 :\n",
      "training epoch 33\n",
      "err 17955.410809075343 in epoch 33 :\n",
      "training epoch 34\n",
      "err 17955.410809075343 in epoch 34 :\n",
      "training epoch 35\n",
      "err 17955.410809075343 in epoch 35 :\n",
      "training epoch 36\n",
      "err 17955.410809075343 in epoch 36 :\n",
      "training epoch 37\n",
      "err 17955.410809075343 in epoch 37 :\n",
      "training epoch 38\n",
      "err 17955.410809075343 in epoch 38 :\n",
      "training epoch 39\n",
      "err 17955.410809075343 in epoch 39 :\n",
      "training epoch 40\n",
      "err 17955.410809075343 in epoch 40 :\n",
      "training epoch 41\n",
      "err 17955.410809075343 in epoch 41 :\n",
      "training epoch 42\n",
      "err 17955.410809075343 in epoch 42 :\n",
      "training epoch 43\n",
      "err 17955.410809075343 in epoch 43 :\n",
      "training epoch 44\n",
      "err 17955.410809075343 in epoch 44 :\n",
      "training epoch 45\n",
      "err 17955.410809075343 in epoch 45 :\n",
      "training epoch 46\n",
      "err 17955.410809075343 in epoch 46 :\n",
      "training epoch 47\n",
      "err 17955.410809075343 in epoch 47 :\n",
      "training epoch 48\n",
      "err 17955.410809075343 in epoch 48 :\n",
      "training epoch 49\n",
      "err 17955.410809075343 in epoch 49 :\n",
      "training epoch 50\n",
      "err 17955.410809075343 in epoch 50 :\n",
      "training epoch 51\n",
      "err 17955.410809075343 in epoch 51 :\n",
      "training epoch 52\n",
      "err 17955.410809075343 in epoch 52 :\n",
      "training epoch 53\n",
      "err 17955.410809075343 in epoch 53 :\n",
      "training epoch 54\n",
      "err 17955.410809075343 in epoch 54 :\n",
      "training epoch 55\n",
      "err 17955.410809075343 in epoch 55 :\n",
      "training epoch 56\n",
      "err 17955.410809075343 in epoch 56 :\n",
      "training epoch 57\n",
      "err 17955.410809075343 in epoch 57 :\n",
      "training epoch 58\n",
      "err 17955.410809075343 in epoch 58 :\n",
      "training epoch 59\n",
      "err 17955.410809075343 in epoch 59 :\n",
      "training epoch 60\n",
      "err 17955.410809075343 in epoch 60 :\n",
      "training epoch 61\n",
      "err 17955.410809075343 in epoch 61 :\n",
      "training epoch 62\n",
      "err 17955.410809075343 in epoch 62 :\n",
      "training epoch 63\n",
      "err 17955.410809075343 in epoch 63 :\n",
      "training epoch 64\n",
      "err 17955.410809075343 in epoch 64 :\n",
      "training epoch 65\n",
      "err 17955.410809075343 in epoch 65 :\n",
      "training epoch 66\n",
      "err 17955.410809075343 in epoch 66 :\n",
      "training epoch 67\n",
      "err 17955.410809075343 in epoch 67 :\n",
      "training epoch 68\n",
      "err 17955.410809075343 in epoch 68 :\n",
      "training epoch 69\n",
      "err 17955.410809075343 in epoch 69 :\n",
      "training epoch 70\n",
      "err 17955.410809075343 in epoch 70 :\n",
      "training epoch 71\n",
      "err 17955.410809075343 in epoch 71 :\n",
      "training epoch 72\n",
      "err 17955.410809075343 in epoch 72 :\n",
      "training epoch 73\n",
      "err 17955.410809075343 in epoch 73 :\n",
      "training epoch 74\n",
      "err 17955.410809075343 in epoch 74 :\n",
      "training epoch 75\n",
      "err 17955.410809075343 in epoch 75 :\n",
      "training epoch 76\n",
      "err 17955.410809075343 in epoch 76 :\n",
      "training epoch 77\n",
      "err 17955.410809075343 in epoch 77 :\n",
      "training epoch 78\n",
      "err 17955.410809075343 in epoch 78 :\n",
      "training epoch 79\n",
      "err 17955.410809075343 in epoch 79 :\n",
      "training epoch 80\n",
      "err 17955.410809075343 in epoch 80 :\n",
      "training epoch 81\n",
      "err 17955.410809075343 in epoch 81 :\n",
      "training epoch 82\n",
      "err 17955.410809075343 in epoch 82 :\n",
      "training epoch 83\n",
      "err 17955.410809075343 in epoch 83 :\n",
      "training epoch 84\n",
      "err 17955.410809075343 in epoch 84 :\n",
      "training epoch 85\n",
      "err 17955.410809075343 in epoch 85 :\n",
      "training epoch 86\n",
      "err 17955.410809075343 in epoch 86 :\n",
      "training epoch 87\n",
      "err 17955.410809075343 in epoch 87 :\n",
      "training epoch 88\n",
      "err 17955.410809075343 in epoch 88 :\n",
      "training epoch 89\n",
      "err 17955.410809075343 in epoch 89 :\n",
      "training epoch 90\n",
      "err 17955.410809075343 in epoch 90 :\n",
      "training epoch 91\n",
      "err 17955.410809075343 in epoch 91 :\n",
      "training epoch 92\n",
      "err 17955.410809075343 in epoch 92 :\n",
      "training epoch 93\n",
      "err 17955.410809075343 in epoch 93 :\n",
      "training epoch 94\n",
      "err 17955.410809075343 in epoch 94 :\n",
      "training epoch 95\n",
      "err 17955.410809075343 in epoch 95 :\n",
      "training epoch 96\n",
      "err 17955.410809075343 in epoch 96 :\n",
      "training epoch 97\n",
      "err 17955.410809075343 in epoch 97 :\n",
      "training epoch 98\n",
      "err 17955.410809075343 in epoch 98 :\n",
      "training epoch 99\n",
      "err 17955.410809075343 in epoch 99 :\n",
      "training epoch 100\n",
      "err 17955.410809075343 in epoch 100 :\n",
      "training epoch 101\n",
      "err 17955.410809075343 in epoch 101 :\n",
      "training epoch 102\n",
      "err 17955.410809075343 in epoch 102 :\n",
      "training epoch 103\n",
      "err 17955.410809075343 in epoch 103 :\n",
      "training epoch 104\n",
      "err 17955.410809075343 in epoch 104 :\n",
      "training epoch 105\n",
      "err 17955.410809075343 in epoch 105 :\n",
      "training epoch 106\n",
      "err 17955.410809075343 in epoch 106 :\n",
      "training epoch 107\n",
      "err 17955.410809075343 in epoch 107 :\n",
      "training epoch 108\n",
      "err 17955.410809075343 in epoch 108 :\n",
      "training epoch 109\n",
      "err 17955.410809075343 in epoch 109 :\n",
      "training epoch 110\n",
      "err 17955.410809075343 in epoch 110 :\n",
      "training epoch 111\n",
      "err 17955.410809075343 in epoch 111 :\n",
      "training epoch 112\n",
      "err 17955.410809075343 in epoch 112 :\n",
      "training epoch 113\n",
      "err 17955.410809075343 in epoch 113 :\n",
      "training epoch 114\n",
      "err 17955.410809075343 in epoch 114 :\n",
      "training epoch 115\n",
      "err 17955.410809075343 in epoch 115 :\n",
      "training epoch 116\n",
      "err 17955.410809075343 in epoch 116 :\n",
      "training epoch 117\n",
      "err 17955.410809075343 in epoch 117 :\n",
      "training epoch 118\n",
      "err 17955.410809075343 in epoch 118 :\n",
      "training epoch 119\n",
      "err 17955.410809075343 in epoch 119 :\n",
      "training epoch 120\n",
      "err 17955.410809075343 in epoch 120 :\n",
      "training epoch 121\n",
      "err 17955.410809075343 in epoch 121 :\n",
      "training epoch 122\n",
      "err 17955.410809075343 in epoch 122 :\n",
      "training epoch 123\n",
      "err 17955.410809075343 in epoch 123 :\n",
      "training epoch 124\n",
      "err 17955.410809075343 in epoch 124 :\n",
      "training epoch 125\n",
      "err 17955.410809075343 in epoch 125 :\n",
      "training epoch 126\n",
      "err 17955.410809075343 in epoch 126 :\n",
      "training epoch 127\n",
      "err 17955.410809075343 in epoch 127 :\n",
      "training epoch 128\n",
      "err 17955.410809075343 in epoch 128 :\n",
      "training epoch 129\n",
      "err 17955.410809075343 in epoch 129 :\n",
      "training epoch 130\n",
      "err 17955.410809075343 in epoch 130 :\n",
      "training epoch 131\n",
      "err 17955.410809075343 in epoch 131 :\n",
      "training epoch 132\n",
      "err 17955.410809075343 in epoch 132 :\n",
      "training epoch 133\n",
      "err 17955.410809075343 in epoch 133 :\n",
      "training epoch 134\n",
      "err 17955.410809075343 in epoch 134 :\n",
      "training epoch 135\n",
      "err 17955.410809075343 in epoch 135 :\n",
      "training epoch 136\n",
      "err 17955.410809075343 in epoch 136 :\n",
      "training epoch 137\n",
      "err 17955.410809075343 in epoch 137 :\n",
      "training epoch 138\n",
      "err 17955.410809075343 in epoch 138 :\n",
      "training epoch 139\n",
      "err 17955.410809075343 in epoch 139 :\n",
      "training epoch 140\n",
      "err 17955.410809075343 in epoch 140 :\n",
      "training epoch 141\n",
      "err 17955.410809075343 in epoch 141 :\n",
      "training epoch 142\n",
      "err 17955.410809075343 in epoch 142 :\n",
      "training epoch 143\n",
      "err 17955.410809075343 in epoch 143 :\n",
      "training epoch 144\n",
      "err 17955.410809075343 in epoch 144 :\n",
      "training epoch 145\n",
      "err 17955.410809075343 in epoch 145 :\n",
      "training epoch 146\n",
      "err 17955.410809075343 in epoch 146 :\n",
      "training epoch 147\n",
      "err 17955.410809075343 in epoch 147 :\n",
      "training epoch 148\n",
      "err 17955.410809075343 in epoch 148 :\n",
      "training epoch 149\n",
      "err 17955.410809075343 in epoch 149 :\n",
      "training epoch 150\n",
      "err 17955.410809075343 in epoch 150 :\n",
      "training epoch 151\n",
      "err 17955.410809075343 in epoch 151 :\n",
      "training epoch 152\n",
      "err 17955.410809075343 in epoch 152 :\n",
      "training epoch 153\n",
      "err 17955.410809075343 in epoch 153 :\n",
      "training epoch 154\n",
      "err 17955.410809075343 in epoch 154 :\n",
      "training epoch 155\n",
      "err 17955.410809075343 in epoch 155 :\n",
      "training epoch 156\n",
      "err 17955.410809075343 in epoch 156 :\n",
      "training epoch 157\n",
      "err 17955.410809075343 in epoch 157 :\n",
      "training epoch 158\n",
      "err 17955.410809075343 in epoch 158 :\n",
      "training epoch 159\n",
      "err 17955.410809075343 in epoch 159 :\n",
      "training epoch 160\n",
      "err 17955.410809075343 in epoch 160 :\n",
      "training epoch 161\n",
      "err 17955.410809075343 in epoch 161 :\n",
      "training epoch 162\n",
      "err 17955.410809075343 in epoch 162 :\n",
      "training epoch 163\n",
      "err 17955.410809075343 in epoch 163 :\n",
      "training epoch 164\n",
      "err 17955.410809075343 in epoch 164 :\n",
      "training epoch 165\n",
      "err 17955.410809075343 in epoch 165 :\n",
      "training epoch 166\n",
      "err 17955.410809075343 in epoch 166 :\n",
      "training epoch 167\n",
      "err 17955.410809075343 in epoch 167 :\n",
      "training epoch 168\n",
      "err 17955.410809075343 in epoch 168 :\n",
      "training epoch 169\n",
      "err 17955.410809075343 in epoch 169 :\n",
      "training epoch 170\n",
      "err 17955.410809075343 in epoch 170 :\n",
      "training epoch 171\n",
      "err 17955.410809075343 in epoch 171 :\n",
      "training epoch 172\n",
      "err 17955.410809075343 in epoch 172 :\n",
      "training epoch 173\n",
      "err 17955.410809075343 in epoch 173 :\n",
      "training epoch 174\n",
      "err 17955.410809075343 in epoch 174 :\n",
      "training epoch 175\n",
      "err 17955.410809075343 in epoch 175 :\n",
      "training epoch 176\n",
      "err 17955.410809075343 in epoch 176 :\n",
      "training epoch 177\n",
      "err 17955.410809075343 in epoch 177 :\n",
      "training epoch 178\n",
      "err 17955.410809075343 in epoch 178 :\n",
      "training epoch 179\n",
      "err 17955.410809075343 in epoch 179 :\n",
      "training epoch 180\n",
      "err 17955.410809075343 in epoch 180 :\n",
      "training epoch 181\n",
      "err 17955.410809075343 in epoch 181 :\n",
      "training epoch 182\n",
      "err 17955.410809075343 in epoch 182 :\n",
      "training epoch 183\n",
      "err 17955.410809075343 in epoch 183 :\n",
      "training epoch 184\n",
      "err 17955.410809075343 in epoch 184 :\n",
      "training epoch 185\n",
      "err 17955.410809075343 in epoch 185 :\n",
      "training epoch 186\n",
      "err 17955.410809075343 in epoch 186 :\n",
      "training epoch 187\n",
      "err 17955.410809075343 in epoch 187 :\n",
      "training epoch 188\n",
      "err 17955.410809075343 in epoch 188 :\n",
      "training epoch 189\n",
      "err 17955.410809075343 in epoch 189 :\n",
      "training epoch 190\n",
      "err 17955.410809075343 in epoch 190 :\n",
      "training epoch 191\n",
      "err 17955.410809075343 in epoch 191 :\n",
      "training epoch 192\n",
      "err 17955.410809075343 in epoch 192 :\n",
      "training epoch 193\n",
      "err 17955.410809075343 in epoch 193 :\n",
      "training epoch 194\n",
      "err 17955.410809075343 in epoch 194 :\n",
      "training epoch 195\n",
      "err 17955.410809075343 in epoch 195 :\n",
      "training epoch 196\n",
      "err 17955.410809075343 in epoch 196 :\n",
      "training epoch 197\n",
      "err 17955.410809075343 in epoch 197 :\n",
      "training epoch 198\n",
      "err 17955.410809075343 in epoch 198 :\n",
      "training epoch 199\n",
      "err 17955.410809075343 in epoch 199 :\n",
      "training epoch 200\n",
      "err 17955.410809075343 in epoch 200 :\n",
      "training epoch 201\n",
      "err 17955.410809075343 in epoch 201 :\n",
      "training epoch 202\n",
      "err 17955.410809075343 in epoch 202 :\n",
      "training epoch 203\n",
      "err 17955.410809075343 in epoch 203 :\n",
      "training epoch 204\n",
      "err 17955.410809075343 in epoch 204 :\n",
      "training epoch 205\n",
      "err 17955.410809075343 in epoch 205 :\n",
      "training epoch 206\n",
      "err 17955.410809075343 in epoch 206 :\n",
      "training epoch 207\n",
      "err 17955.410809075343 in epoch 207 :\n",
      "training epoch 208\n",
      "err 17955.410809075343 in epoch 208 :\n",
      "training epoch 209\n",
      "err 17955.410809075343 in epoch 209 :\n",
      "training epoch 210\n",
      "err 17955.410809075343 in epoch 210 :\n",
      "training epoch 211\n",
      "err 17955.410809075343 in epoch 211 :\n",
      "training epoch 212\n",
      "err 17955.410809075343 in epoch 212 :\n",
      "training epoch 213\n",
      "err 17955.410809075343 in epoch 213 :\n",
      "training epoch 214\n",
      "err 17955.410809075343 in epoch 214 :\n",
      "training epoch 215\n",
      "err 17955.410809075343 in epoch 215 :\n",
      "training epoch 216\n",
      "err 17955.410809075343 in epoch 216 :\n",
      "training epoch 217\n",
      "err 17955.410809075343 in epoch 217 :\n",
      "training epoch 218\n",
      "err 17955.410809075343 in epoch 218 :\n",
      "training epoch 219\n",
      "err 17955.410809075343 in epoch 219 :\n",
      "training epoch 220\n",
      "err 17955.410809075343 in epoch 220 :\n",
      "training epoch 221\n",
      "err 17955.410809075343 in epoch 221 :\n",
      "training epoch 222\n",
      "err 17955.410809075343 in epoch 222 :\n",
      "training epoch 223\n",
      "err 17955.410809075343 in epoch 223 :\n",
      "training epoch 224\n",
      "err 17955.410809075343 in epoch 224 :\n",
      "training epoch 225\n",
      "err 17955.410809075343 in epoch 225 :\n",
      "training epoch 226\n",
      "err 17955.410809075343 in epoch 226 :\n",
      "training epoch 227\n",
      "err 17955.410809075343 in epoch 227 :\n",
      "training epoch 228\n",
      "err 17955.410809075343 in epoch 228 :\n",
      "training epoch 229\n",
      "err 17955.410809075343 in epoch 229 :\n",
      "training epoch 230\n",
      "err 17955.410809075343 in epoch 230 :\n",
      "training epoch 231\n",
      "err 17955.410809075343 in epoch 231 :\n",
      "training epoch 232\n",
      "err 17955.410809075343 in epoch 232 :\n",
      "training epoch 233\n",
      "err 17955.410809075343 in epoch 233 :\n",
      "training epoch 234\n",
      "err 17955.410809075343 in epoch 234 :\n",
      "training epoch 235\n",
      "err 17955.410809075343 in epoch 235 :\n",
      "training epoch 236\n",
      "err 17955.410809075343 in epoch 236 :\n",
      "training epoch 237\n",
      "err 17955.410809075343 in epoch 237 :\n",
      "training epoch 238\n",
      "err 17955.410809075343 in epoch 238 :\n",
      "training epoch 239\n",
      "err 17955.410809075343 in epoch 239 :\n",
      "training epoch 240\n",
      "err 17955.410809075343 in epoch 240 :\n",
      "training epoch 241\n",
      "err 17955.410809075343 in epoch 241 :\n",
      "training epoch 242\n",
      "err 17955.410809075343 in epoch 242 :\n",
      "training epoch 243\n",
      "err 17955.410809075343 in epoch 243 :\n",
      "training epoch 244\n",
      "err 17955.410809075343 in epoch 244 :\n",
      "training epoch 245\n",
      "err 17955.410809075343 in epoch 245 :\n",
      "training epoch 246\n",
      "err 17955.410809075343 in epoch 246 :\n",
      "training epoch 247\n",
      "err 17955.410809075343 in epoch 247 :\n",
      "training epoch 248\n",
      "err 17955.410809075343 in epoch 248 :\n",
      "training epoch 249\n",
      "err 17955.410809075343 in epoch 249 :\n",
      "training epoch 250\n",
      "err 17955.410809075343 in epoch 250 :\n",
      "training epoch 251\n",
      "err 17955.410809075343 in epoch 251 :\n",
      "training epoch 252\n",
      "err 17955.410809075343 in epoch 252 :\n",
      "training epoch 253\n",
      "err 17955.410809075343 in epoch 253 :\n",
      "training epoch 254\n",
      "err 17955.410809075343 in epoch 254 :\n",
      "training epoch 255\n",
      "err 17955.410809075343 in epoch 255 :\n",
      "training epoch 256\n",
      "err 17955.410809075343 in epoch 256 :\n",
      "training epoch 257\n",
      "err 17955.410809075343 in epoch 257 :\n",
      "training epoch 258\n",
      "err 17955.410809075343 in epoch 258 :\n",
      "training epoch 259\n",
      "err 17955.410809075343 in epoch 259 :\n",
      "training epoch 260\n",
      "err 17955.410809075343 in epoch 260 :\n",
      "training epoch 261\n",
      "err 17955.410809075343 in epoch 261 :\n",
      "training epoch 262\n",
      "err 17955.410809075343 in epoch 262 :\n",
      "training epoch 263\n",
      "err 17955.410809075343 in epoch 263 :\n",
      "training epoch 264\n",
      "err 17955.410809075343 in epoch 264 :\n",
      "training epoch 265\n",
      "err 17955.410809075343 in epoch 265 :\n",
      "training epoch 266\n",
      "err 17955.410809075343 in epoch 266 :\n",
      "training epoch 267\n",
      "err 17955.410809075343 in epoch 267 :\n",
      "training epoch 268\n",
      "err 17955.410809075343 in epoch 268 :\n",
      "training epoch 269\n",
      "err 17955.410809075343 in epoch 269 :\n",
      "training epoch 270\n",
      "err 17955.410809075343 in epoch 270 :\n",
      "training epoch 271\n",
      "err 17955.410809075343 in epoch 271 :\n",
      "training epoch 272\n",
      "err 17955.410809075343 in epoch 272 :\n",
      "training epoch 273\n",
      "err 17955.410809075343 in epoch 273 :\n",
      "training epoch 274\n",
      "err 17955.410809075343 in epoch 274 :\n",
      "training epoch 275\n",
      "err 17955.410809075343 in epoch 275 :\n",
      "training epoch 276\n",
      "err 17955.410809075343 in epoch 276 :\n",
      "training epoch 277\n",
      "err 17955.410809075343 in epoch 277 :\n",
      "training epoch 278\n",
      "err 17955.410809075343 in epoch 278 :\n",
      "training epoch 279\n",
      "err 17955.410809075343 in epoch 279 :\n",
      "training epoch 280\n",
      "err 17955.410809075343 in epoch 280 :\n",
      "training epoch 281\n",
      "err 17955.410809075343 in epoch 281 :\n",
      "training epoch 282\n",
      "err 17955.410809075343 in epoch 282 :\n",
      "training epoch 283\n",
      "err 17955.410809075343 in epoch 283 :\n",
      "training epoch 284\n",
      "err 17955.410809075343 in epoch 284 :\n",
      "training epoch 285\n",
      "err 17955.410809075343 in epoch 285 :\n",
      "training epoch 286\n",
      "err 17955.410809075343 in epoch 286 :\n",
      "training epoch 287\n",
      "err 17955.410809075343 in epoch 287 :\n",
      "training epoch 288\n",
      "err 17955.410809075343 in epoch 288 :\n",
      "training epoch 289\n",
      "err 17955.410809075343 in epoch 289 :\n",
      "training epoch 290\n",
      "err 17955.410809075343 in epoch 290 :\n",
      "training epoch 291\n",
      "err 17955.410809075343 in epoch 291 :\n",
      "training epoch 292\n",
      "err 17955.410809075343 in epoch 292 :\n",
      "training epoch 293\n",
      "err 17955.410809075343 in epoch 293 :\n",
      "training epoch 294\n",
      "err 17955.410809075343 in epoch 294 :\n",
      "training epoch 295\n",
      "err 17955.410809075343 in epoch 295 :\n",
      "training epoch 296\n",
      "err 17955.410809075343 in epoch 296 :\n",
      "training epoch 297\n",
      "err 17955.410809075343 in epoch 297 :\n",
      "training epoch 298\n",
      "err 17955.410809075343 in epoch 298 :\n",
      "training epoch 299\n",
      "err 17955.410809075343 in epoch 299 :\n",
      "training epoch 300\n",
      "err 17955.410809075343 in epoch 300 :\n",
      "training epoch 301\n",
      "err 17955.410809075343 in epoch 301 :\n",
      "training epoch 302\n",
      "err 17955.410809075343 in epoch 302 :\n",
      "training epoch 303\n",
      "err 17955.410809075343 in epoch 303 :\n",
      "training epoch 304\n",
      "err 17955.410809075343 in epoch 304 :\n",
      "training epoch 305\n",
      "err 17955.410809075343 in epoch 305 :\n",
      "training epoch 306\n",
      "err 17955.410809075343 in epoch 306 :\n",
      "training epoch 307\n",
      "err 17955.410809075343 in epoch 307 :\n",
      "training epoch 308\n",
      "err 17955.410809075343 in epoch 308 :\n",
      "training epoch 309\n",
      "err 17955.410809075343 in epoch 309 :\n",
      "training epoch 310\n",
      "err 17955.410809075343 in epoch 310 :\n",
      "training epoch 311\n",
      "err 17955.410809075343 in epoch 311 :\n",
      "training epoch 312\n",
      "err 17955.410809075343 in epoch 312 :\n",
      "training epoch 313\n",
      "err 17955.410809075343 in epoch 313 :\n",
      "training epoch 314\n",
      "err 17955.410809075343 in epoch 314 :\n",
      "training epoch 315\n",
      "err 17955.410809075343 in epoch 315 :\n",
      "training epoch 316\n",
      "err 17955.410809075343 in epoch 316 :\n",
      "training epoch 317\n",
      "err 17955.410809075343 in epoch 317 :\n",
      "training epoch 318\n",
      "err 17955.410809075343 in epoch 318 :\n",
      "training epoch 319\n",
      "err 17955.410809075343 in epoch 319 :\n",
      "training epoch 320\n",
      "err 17955.410809075343 in epoch 320 :\n",
      "training epoch 321\n",
      "err 17955.410809075343 in epoch 321 :\n",
      "training epoch 322\n",
      "err 17955.410809075343 in epoch 322 :\n",
      "training epoch 323\n",
      "err 17955.410809075343 in epoch 323 :\n",
      "training epoch 324\n",
      "err 17955.410809075343 in epoch 324 :\n",
      "training epoch 325\n",
      "err 17955.410809075343 in epoch 325 :\n",
      "training epoch 326\n",
      "err 17955.410809075343 in epoch 326 :\n",
      "training epoch 327\n",
      "err 17955.410809075343 in epoch 327 :\n",
      "training epoch 328\n",
      "err 17955.410809075343 in epoch 328 :\n",
      "training epoch 329\n",
      "err 17955.410809075343 in epoch 329 :\n",
      "training epoch 330\n",
      "err 17955.410809075343 in epoch 330 :\n",
      "training epoch 331\n",
      "err 17955.410809075343 in epoch 331 :\n",
      "training epoch 332\n",
      "err 17955.410809075343 in epoch 332 :\n",
      "training epoch 333\n",
      "err 17955.410809075343 in epoch 333 :\n",
      "training epoch 334\n",
      "err 17955.410809075343 in epoch 334 :\n",
      "training epoch 335\n",
      "err 17955.410809075343 in epoch 335 :\n",
      "training epoch 336\n",
      "err 17955.410809075343 in epoch 336 :\n",
      "training epoch 337\n",
      "err 17955.410809075343 in epoch 337 :\n",
      "training epoch 338\n",
      "err 17955.410809075343 in epoch 338 :\n",
      "training epoch 339\n",
      "err 17955.410809075343 in epoch 339 :\n",
      "training epoch 340\n",
      "err 17955.410809075343 in epoch 340 :\n",
      "training epoch 341\n",
      "err 17955.410809075343 in epoch 341 :\n",
      "training epoch 342\n",
      "err 17955.410809075343 in epoch 342 :\n",
      "training epoch 343\n",
      "err 17955.410809075343 in epoch 343 :\n",
      "training epoch 344\n",
      "err 17955.410809075343 in epoch 344 :\n",
      "training epoch 345\n",
      "err 17955.410809075343 in epoch 345 :\n",
      "training epoch 346\n",
      "err 17955.410809075343 in epoch 346 :\n",
      "training epoch 347\n",
      "err 17955.410809075343 in epoch 347 :\n",
      "training epoch 348\n",
      "err 17955.410809075343 in epoch 348 :\n",
      "training epoch 349\n",
      "err 17955.410809075343 in epoch 349 :\n",
      "training epoch 350\n",
      "err 17955.410809075343 in epoch 350 :\n",
      "training epoch 351\n",
      "err 17955.410809075343 in epoch 351 :\n",
      "training epoch 352\n",
      "err 17955.410809075343 in epoch 352 :\n",
      "training epoch 353\n",
      "err 17955.410809075343 in epoch 353 :\n",
      "training epoch 354\n",
      "err 17955.410809075343 in epoch 354 :\n",
      "training epoch 355\n",
      "err 17955.410809075343 in epoch 355 :\n",
      "training epoch 356\n",
      "err 17955.410809075343 in epoch 356 :\n",
      "training epoch 357\n",
      "err 17955.410809075343 in epoch 357 :\n",
      "training epoch 358\n",
      "err 17955.410809075343 in epoch 358 :\n",
      "training epoch 359\n",
      "err 17955.410809075343 in epoch 359 :\n",
      "training epoch 360\n",
      "err 17955.410809075343 in epoch 360 :\n",
      "training epoch 361\n",
      "err 17955.410809075343 in epoch 361 :\n",
      "training epoch 362\n",
      "err 17955.410809075343 in epoch 362 :\n",
      "training epoch 363\n",
      "err 17955.410809075343 in epoch 363 :\n",
      "training epoch 364\n",
      "err 17955.410809075343 in epoch 364 :\n",
      "training epoch 365\n",
      "err 17955.410809075343 in epoch 365 :\n",
      "training epoch 366\n",
      "err 17955.410809075343 in epoch 366 :\n",
      "training epoch 367\n",
      "err 17955.410809075343 in epoch 367 :\n",
      "training epoch 368\n",
      "err 17955.410809075343 in epoch 368 :\n",
      "training epoch 369\n",
      "err 17955.410809075343 in epoch 369 :\n",
      "training epoch 370\n",
      "err 17955.410809075343 in epoch 370 :\n",
      "training epoch 371\n",
      "err 17955.410809075343 in epoch 371 :\n",
      "training epoch 372\n",
      "err 17955.410809075343 in epoch 372 :\n",
      "training epoch 373\n",
      "err 17955.410809075343 in epoch 373 :\n",
      "training epoch 374\n",
      "err 17955.410809075343 in epoch 374 :\n",
      "training epoch 375\n",
      "err 17955.410809075343 in epoch 375 :\n",
      "training epoch 376\n",
      "err 17955.410809075343 in epoch 376 :\n",
      "training epoch 377\n",
      "err 17955.410809075343 in epoch 377 :\n",
      "training epoch 378\n",
      "err 17955.410809075343 in epoch 378 :\n",
      "training epoch 379\n",
      "err 17955.410809075343 in epoch 379 :\n",
      "training epoch 380\n",
      "err 17955.410809075343 in epoch 380 :\n",
      "training epoch 381\n",
      "err 17955.410809075343 in epoch 381 :\n",
      "training epoch 382\n",
      "err 17955.410809075343 in epoch 382 :\n",
      "training epoch 383\n",
      "err 17955.410809075343 in epoch 383 :\n",
      "training epoch 384\n",
      "err 17955.410809075343 in epoch 384 :\n",
      "training epoch 385\n",
      "err 17955.410809075343 in epoch 385 :\n",
      "training epoch 386\n",
      "err 17955.410809075343 in epoch 386 :\n",
      "training epoch 387\n",
      "err 17955.410809075343 in epoch 387 :\n",
      "training epoch 388\n",
      "err 17955.410809075343 in epoch 388 :\n",
      "training epoch 389\n",
      "err 17955.410809075343 in epoch 389 :\n",
      "training epoch 390\n",
      "err 17955.410809075343 in epoch 390 :\n",
      "training epoch 391\n",
      "err 17955.410809075343 in epoch 391 :\n",
      "training epoch 392\n",
      "err 17955.410809075343 in epoch 392 :\n",
      "training epoch 393\n",
      "err 17955.410809075343 in epoch 393 :\n",
      "training epoch 394\n",
      "err 17955.410809075343 in epoch 394 :\n",
      "training epoch 395\n",
      "err 17955.410809075343 in epoch 395 :\n",
      "training epoch 396\n",
      "err 17955.410809075343 in epoch 396 :\n",
      "training epoch 397\n",
      "err 17955.410809075343 in epoch 397 :\n",
      "training epoch 398\n",
      "err 17955.410809075343 in epoch 398 :\n",
      "training epoch 399\n",
      "err 17955.410809075343 in epoch 399 :\n",
      "training epoch 400\n",
      "err 17955.410809075343 in epoch 400 :\n",
      "training epoch 401\n",
      "err 17955.410809075343 in epoch 401 :\n",
      "training epoch 402\n",
      "err 17955.410809075343 in epoch 402 :\n",
      "training epoch 403\n",
      "err 17955.410809075343 in epoch 403 :\n",
      "training epoch 404\n",
      "err 17955.410809075343 in epoch 404 :\n",
      "training epoch 405\n",
      "err 17955.410809075343 in epoch 405 :\n",
      "training epoch 406\n",
      "err 17955.410809075343 in epoch 406 :\n",
      "training epoch 407\n",
      "err 17955.410809075343 in epoch 407 :\n",
      "training epoch 408\n",
      "err 17955.410809075343 in epoch 408 :\n",
      "training epoch 409\n",
      "err 17955.410809075343 in epoch 409 :\n",
      "training epoch 410\n",
      "err 17955.410809075343 in epoch 410 :\n",
      "training epoch 411\n",
      "err 17955.410809075343 in epoch 411 :\n",
      "training epoch 412\n",
      "err 17955.410809075343 in epoch 412 :\n",
      "training epoch 413\n",
      "err 17955.410809075343 in epoch 413 :\n",
      "training epoch 414\n",
      "err 17955.410809075343 in epoch 414 :\n",
      "training epoch 415\n",
      "err 17955.410809075343 in epoch 415 :\n",
      "training epoch 416\n",
      "err 17955.410809075343 in epoch 416 :\n",
      "training epoch 417\n",
      "err 17955.410809075343 in epoch 417 :\n",
      "training epoch 418\n",
      "err 17955.410809075343 in epoch 418 :\n",
      "training epoch 419\n",
      "err 17955.410809075343 in epoch 419 :\n",
      "training epoch 420\n",
      "err 17955.410809075343 in epoch 420 :\n",
      "training epoch 421\n",
      "err 17955.410809075343 in epoch 421 :\n",
      "training epoch 422\n",
      "err 17955.410809075343 in epoch 422 :\n",
      "training epoch 423\n",
      "err 17955.410809075343 in epoch 423 :\n",
      "training epoch 424\n",
      "err 17955.410809075343 in epoch 424 :\n",
      "training epoch 425\n",
      "err 17955.410809075343 in epoch 425 :\n",
      "training epoch 426\n",
      "err 17955.410809075343 in epoch 426 :\n",
      "training epoch 427\n",
      "err 17955.410809075343 in epoch 427 :\n",
      "training epoch 428\n",
      "err 17955.410809075343 in epoch 428 :\n",
      "training epoch 429\n",
      "err 17955.410809075343 in epoch 429 :\n",
      "training epoch 430\n",
      "err 17955.410809075343 in epoch 430 :\n",
      "training epoch 431\n",
      "err 17955.410809075343 in epoch 431 :\n",
      "training epoch 432\n",
      "err 17955.410809075343 in epoch 432 :\n",
      "training epoch 433\n",
      "err 17955.410809075343 in epoch 433 :\n",
      "training epoch 434\n",
      "err 17955.410809075343 in epoch 434 :\n",
      "training epoch 435\n",
      "err 17955.410809075343 in epoch 435 :\n",
      "training epoch 436\n",
      "err 17955.410809075343 in epoch 436 :\n",
      "training epoch 437\n",
      "err 17955.410809075343 in epoch 437 :\n",
      "training epoch 438\n"
     ]
    },
    {
     "ename": "KeyboardInterrupt",
     "evalue": "",
     "output_type": "error",
     "traceback": [
      "\u001B[0;31m---------------------------------------------------------------------------\u001B[0m",
      "\u001B[0;31mKeyboardInterrupt\u001B[0m                         Traceback (most recent call last)",
      "Input \u001B[0;32mIn [29]\u001B[0m, in \u001B[0;36m<cell line: 2>\u001B[0;34m()\u001B[0m\n\u001B[1;32m      3\u001B[0m \u001B[38;5;28mprint\u001B[39m(\u001B[38;5;124m'\u001B[39m\u001B[38;5;124mtraining epoch \u001B[39m\u001B[38;5;132;01m{}\u001B[39;00m\u001B[38;5;124m'\u001B[39m\u001B[38;5;241m.\u001B[39mformat(i\u001B[38;5;241m+\u001B[39m\u001B[38;5;241m1\u001B[39m))\n\u001B[1;32m      4\u001B[0m my_model \u001B[38;5;241m=\u001B[39m XGBRegressor(seed\u001B[38;5;241m=\u001B[39mi)\n\u001B[0;32m----> 5\u001B[0m \u001B[43mmy_model\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mfit\u001B[49m\u001B[43m(\u001B[49m\u001B[43mtrain_X\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mtrain_y\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m      6\u001B[0m predictions \u001B[38;5;241m=\u001B[39m my_model\u001B[38;5;241m.\u001B[39mpredict(test_X)\n\u001B[1;32m      7\u001B[0m this_epoch_err\u001B[38;5;241m=\u001B[39mmean_absolute_error(predictions, test_y)\n",
      "File \u001B[0;32m~/anaconda3/envs/mytorch/lib/python3.8/site-packages/xgboost/core.py:575\u001B[0m, in \u001B[0;36m_deprecate_positional_args.<locals>.inner_f\u001B[0;34m(*args, **kwargs)\u001B[0m\n\u001B[1;32m    573\u001B[0m \u001B[38;5;28;01mfor\u001B[39;00m k, arg \u001B[38;5;129;01min\u001B[39;00m \u001B[38;5;28mzip\u001B[39m(sig\u001B[38;5;241m.\u001B[39mparameters, args):\n\u001B[1;32m    574\u001B[0m     kwargs[k] \u001B[38;5;241m=\u001B[39m arg\n\u001B[0;32m--> 575\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mf\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n",
      "File \u001B[0;32m~/anaconda3/envs/mytorch/lib/python3.8/site-packages/xgboost/sklearn.py:961\u001B[0m, in \u001B[0;36mXGBModel.fit\u001B[0;34m(self, X, y, sample_weight, base_margin, eval_set, eval_metric, early_stopping_rounds, verbose, xgb_model, sample_weight_eval_set, base_margin_eval_set, feature_weights, callbacks)\u001B[0m\n\u001B[1;32m    956\u001B[0m     obj \u001B[38;5;241m=\u001B[39m \u001B[38;5;28;01mNone\u001B[39;00m\n\u001B[1;32m    958\u001B[0m model, metric, params, early_stopping_rounds, callbacks \u001B[38;5;241m=\u001B[39m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_configure_fit(\n\u001B[1;32m    959\u001B[0m     xgb_model, eval_metric, params, early_stopping_rounds, callbacks\n\u001B[1;32m    960\u001B[0m )\n\u001B[0;32m--> 961\u001B[0m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_Booster \u001B[38;5;241m=\u001B[39m \u001B[43mtrain\u001B[49m\u001B[43m(\u001B[49m\n\u001B[1;32m    962\u001B[0m \u001B[43m    \u001B[49m\u001B[43mparams\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    963\u001B[0m \u001B[43m    \u001B[49m\u001B[43mtrain_dmatrix\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    964\u001B[0m \u001B[43m    \u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mget_num_boosting_rounds\u001B[49m\u001B[43m(\u001B[49m\u001B[43m)\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    965\u001B[0m \u001B[43m    \u001B[49m\u001B[43mevals\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mevals\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    966\u001B[0m \u001B[43m    \u001B[49m\u001B[43mearly_stopping_rounds\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mearly_stopping_rounds\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    967\u001B[0m \u001B[43m    \u001B[49m\u001B[43mevals_result\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mevals_result\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    968\u001B[0m \u001B[43m    \u001B[49m\u001B[43mobj\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mobj\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    969\u001B[0m \u001B[43m    \u001B[49m\u001B[43mcustom_metric\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mmetric\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    970\u001B[0m \u001B[43m    \u001B[49m\u001B[43mverbose_eval\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mverbose\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    971\u001B[0m \u001B[43m    \u001B[49m\u001B[43mxgb_model\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mmodel\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    972\u001B[0m \u001B[43m    \u001B[49m\u001B[43mcallbacks\u001B[49m\u001B[38;5;241;43m=\u001B[39;49m\u001B[43mcallbacks\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m    973\u001B[0m \u001B[43m\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m    975\u001B[0m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_set_evaluation_result(evals_result)\n\u001B[1;32m    976\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28mself\u001B[39m\n",
      "File \u001B[0;32m~/anaconda3/envs/mytorch/lib/python3.8/site-packages/xgboost/core.py:575\u001B[0m, in \u001B[0;36m_deprecate_positional_args.<locals>.inner_f\u001B[0;34m(*args, **kwargs)\u001B[0m\n\u001B[1;32m    573\u001B[0m \u001B[38;5;28;01mfor\u001B[39;00m k, arg \u001B[38;5;129;01min\u001B[39;00m \u001B[38;5;28mzip\u001B[39m(sig\u001B[38;5;241m.\u001B[39mparameters, args):\n\u001B[1;32m    574\u001B[0m     kwargs[k] \u001B[38;5;241m=\u001B[39m arg\n\u001B[0;32m--> 575\u001B[0m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43mf\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[38;5;241;43m*\u001B[39;49m\u001B[43mkwargs\u001B[49m\u001B[43m)\u001B[49m\n",
      "File \u001B[0;32m~/anaconda3/envs/mytorch/lib/python3.8/site-packages/xgboost/training.py:181\u001B[0m, in \u001B[0;36mtrain\u001B[0;34m(params, dtrain, num_boost_round, evals, obj, feval, maximize, early_stopping_rounds, evals_result, verbose_eval, xgb_model, callbacks, custom_metric)\u001B[0m\n\u001B[1;32m    179\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m cb_container\u001B[38;5;241m.\u001B[39mbefore_iteration(bst, i, dtrain, evals):\n\u001B[1;32m    180\u001B[0m     \u001B[38;5;28;01mbreak\u001B[39;00m\n\u001B[0;32m--> 181\u001B[0m \u001B[43mbst\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mupdate\u001B[49m\u001B[43m(\u001B[49m\u001B[43mdtrain\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mi\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mobj\u001B[49m\u001B[43m)\u001B[49m\n\u001B[1;32m    182\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m cb_container\u001B[38;5;241m.\u001B[39mafter_iteration(bst, i, dtrain, evals):\n\u001B[1;32m    183\u001B[0m     \u001B[38;5;28;01mbreak\u001B[39;00m\n",
      "File \u001B[0;32m~/anaconda3/envs/mytorch/lib/python3.8/site-packages/xgboost/core.py:1778\u001B[0m, in \u001B[0;36mBooster.update\u001B[0;34m(self, dtrain, iteration, fobj)\u001B[0m\n\u001B[1;32m   1775\u001B[0m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39m_validate_features(dtrain)\n\u001B[1;32m   1777\u001B[0m \u001B[38;5;28;01mif\u001B[39;00m fobj \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[0;32m-> 1778\u001B[0m     _check_call(\u001B[43m_LIB\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mXGBoosterUpdateOneIter\u001B[49m\u001B[43m(\u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mhandle\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1779\u001B[0m \u001B[43m                                            \u001B[49m\u001B[43mctypes\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mc_int\u001B[49m\u001B[43m(\u001B[49m\u001B[43miteration\u001B[49m\u001B[43m)\u001B[49m\u001B[43m,\u001B[49m\n\u001B[1;32m   1780\u001B[0m \u001B[43m                                            \u001B[49m\u001B[43mdtrain\u001B[49m\u001B[38;5;241;43m.\u001B[39;49m\u001B[43mhandle\u001B[49m\u001B[43m)\u001B[49m)\n\u001B[1;32m   1781\u001B[0m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[1;32m   1782\u001B[0m     pred \u001B[38;5;241m=\u001B[39m \u001B[38;5;28mself\u001B[39m\u001B[38;5;241m.\u001B[39mpredict(dtrain, output_margin\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mTrue\u001B[39;00m, training\u001B[38;5;241m=\u001B[39m\u001B[38;5;28;01mTrue\u001B[39;00m)\n",
      "\u001B[0;31mKeyboardInterrupt\u001B[0m: "
     ]
    }
   ],
   "source": [
    "max_abs_err = 19000\n",
    "# for i in range(1000):\n",
    "#     print('training epoch {}'.format(i+1))\n",
    "#     my_model = XGBRegressor(seed=i)\n",
    "#     my_model.fit(train_X, train_y)\n",
    "#     predictions = my_model.predict(test_X)\n",
    "#     this_epoch_err=mean_absolute_error(predictions, test_y)\n",
    "#     print('err {} in epoch {} :'.format(this_epoch_err,i+1))\n",
    "#     if this_epoch_err<max_abs_err:\n",
    "#         max_abs_err=this_epoch_err\n",
    "#         print('save epoch {} with err: {} '.format(i+1,this_epoch_err))\n",
    "#         joblib.dump(my_model, './model/xgb{}.pkl'.format(this_epoch_err))\n",
    "#     else:\n",
    "#         continue"
   ],
   "metadata": {
    "collapsed": false
   }
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "outputs": [],
   "source": [],
   "metadata": {
    "collapsed": false
   }
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 0
}
