{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "from xgboost import XGBClassifier\n",
    "import xgboost as xgb\n",
    "\n",
    "import pandas as pd \n",
    "import numpy as np\n",
    "\n",
    "import math\n",
    "\n",
    "from sklearn.model_selection import GridSearchCV\n",
    "from sklearn.model_selection import StratifiedKFold\n",
    "\n",
    "from sklearn.metrics import log_loss\n",
    "\n",
    "from matplotlib import pyplot\n",
    "import seaborn as sns\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>bathrooms</th>\n",
       "      <th>bedrooms</th>\n",
       "      <th>price</th>\n",
       "      <th>price_bathrooms</th>\n",
       "      <th>price_bedrooms</th>\n",
       "      <th>room_diff</th>\n",
       "      <th>room_num</th>\n",
       "      <th>Year</th>\n",
       "      <th>Month</th>\n",
       "      <th>Day</th>\n",
       "      <th>...</th>\n",
       "      <th>walk</th>\n",
       "      <th>walls</th>\n",
       "      <th>war</th>\n",
       "      <th>washer</th>\n",
       "      <th>water</th>\n",
       "      <th>wheelchair</th>\n",
       "      <th>wifi</th>\n",
       "      <th>windows</th>\n",
       "      <th>work</th>\n",
       "      <th>interest_level</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.5</td>\n",
       "      <td>3</td>\n",
       "      <td>3000</td>\n",
       "      <td>1200.0</td>\n",
       "      <td>750.000000</td>\n",
       "      <td>-1.5</td>\n",
       "      <td>4.5</td>\n",
       "      <td>2016</td>\n",
       "      <td>6</td>\n",
       "      <td>24</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>1.0</td>\n",
       "      <td>2</td>\n",
       "      <td>5465</td>\n",
       "      <td>2732.5</td>\n",
       "      <td>1821.666667</td>\n",
       "      <td>-1.0</td>\n",
       "      <td>3.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>6</td>\n",
       "      <td>12</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>1.0</td>\n",
       "      <td>1</td>\n",
       "      <td>2850</td>\n",
       "      <td>1425.0</td>\n",
       "      <td>1425.000000</td>\n",
       "      <td>0.0</td>\n",
       "      <td>2.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>4</td>\n",
       "      <td>17</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>1.0</td>\n",
       "      <td>1</td>\n",
       "      <td>3275</td>\n",
       "      <td>1637.5</td>\n",
       "      <td>1637.500000</td>\n",
       "      <td>0.0</td>\n",
       "      <td>2.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>4</td>\n",
       "      <td>18</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>1.0</td>\n",
       "      <td>4</td>\n",
       "      <td>3350</td>\n",
       "      <td>1675.0</td>\n",
       "      <td>670.000000</td>\n",
       "      <td>-3.0</td>\n",
       "      <td>5.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>4</td>\n",
       "      <td>28</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 228 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "   bathrooms  bedrooms  price  price_bathrooms  price_bedrooms  room_diff  \\\n",
       "0        1.5         3   3000           1200.0      750.000000       -1.5   \n",
       "1        1.0         2   5465           2732.5     1821.666667       -1.0   \n",
       "2        1.0         1   2850           1425.0     1425.000000        0.0   \n",
       "3        1.0         1   3275           1637.5     1637.500000        0.0   \n",
       "4        1.0         4   3350           1675.0      670.000000       -3.0   \n",
       "\n",
       "   room_num  Year  Month  Day       ...        walk  walls  war  washer  \\\n",
       "0       4.5  2016      6   24       ...           0      0    0       0   \n",
       "1       3.0  2016      6   12       ...           0      0    0       0   \n",
       "2       2.0  2016      4   17       ...           0      0    0       0   \n",
       "3       2.0  2016      4   18       ...           0      0    0       0   \n",
       "4       5.0  2016      4   28       ...           0      0    1       0   \n",
       "\n",
       "   water  wheelchair  wifi  windows  work  interest_level  \n",
       "0      0           0     0        0     0               1  \n",
       "1      0           0     0        0     0               2  \n",
       "2      0           0     0        0     0               0  \n",
       "3      0           0     0        0     0               2  \n",
       "4      0           0     0        0     0               2  \n",
       "\n",
       "[5 rows x 228 columns]"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# path to where the data lies\n",
    "dpath = './data/'\n",
    "train = pd.read_csv(dpath +\"RentListingInquries_FE_train.csv\")\n",
    "train.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "y_train = train['interest_level']\n",
    "X_train = train.drop(['interest_level'], axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "# prepare cross validation\n",
    "kfold = StratifiedKFold(n_splits=5, shuffle=True, random_state=3)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {},
   "outputs": [],
   "source": [
    "def modelfit(alg, X_train, y_train, useTrainCV=True, cv_folds=None, early_stopping_rounds=10):\n",
    "    \n",
    "    if useTrainCV:\n",
    "        xgb_param = alg.get_xgb_params()\n",
    "        xgb_param['num_class'] = 3\n",
    "        \n",
    "        xgtrain = xgb.DMatrix(X_train, label = y_train)\n",
    "        \n",
    "        cvresult = xgb.cv(xgb_param, xgtrain, num_boost_round=alg.get_params()['n_estimators'], folds =cv_folds,\n",
    "                         metrics='mlogloss', early_stopping_rounds=early_stopping_rounds)\n",
    "        \n",
    "        n_estimators = cvresult.shape[0]\n",
    "        alg.set_params(n_estimators = n_estimators)\n",
    "        \n",
    "        #result = pd.DataFrame(cvresult)   #cv缺省返回结果为DataFrame\n",
    "        #result.to_csv('my_preds.csv', index_label = 'n_estimators')\n",
    "        cvresult.to_csv('my_preds4_2_3_699.csv', index_label = 'n_estimators')\n",
    "        \n",
    "        # plot\n",
    "        test_means = cvresult['test-mlogloss-mean']\n",
    "        test_stds = cvresult['test-mlogloss-std'] \n",
    "        \n",
    "        train_means = cvresult['train-mlogloss-mean']\n",
    "        train_stds = cvresult['train-mlogloss-std'] \n",
    "\n",
    "        x_axis = range(0, n_estimators)\n",
    "        pyplot.errorbar(x_axis, test_means, yerr=test_stds ,label='Test')\n",
    "        pyplot.errorbar(x_axis, train_means, yerr=train_stds ,label='Train')\n",
    "        pyplot.title(\"XGBoost n_estimators vs Log Loss\")\n",
    "        pyplot.xlabel( 'n_estimators' )\n",
    "        pyplot.ylabel( 'Log Loss' )\n",
    "        pyplot.savefig( 'n_estimators4_2_3_699.png' )\n",
    "    \n",
    "    #Fit the algorithm on the data\n",
    "    alg.fit(X_train, y_train, eval_metric='mlogloss')\n",
    "        \n",
    "    #Predict training set:\n",
    "    train_predprob = alg.predict_proba(X_train)\n",
    "    logloss = log_loss(y_train, train_predprob)\n",
    "\n",
    "        \n",
    "    #Print model report:\n",
    "    print (\"logloss of train :\" )\n",
    "    logloss"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 33,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "logloss of train :\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYkAAAEXCAYAAABYsbiOAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAIABJREFUeJzt3XmcHHWd//HXp3vOZK4ck5BM7hCOcBggAiJCFA9ABEVEQFhRV9b9Lev1U8RdfspPxfVYlVX56SKLiCi3IiCKIiCCIoQASQgEAknIfU+SyWTO/vz++NZkaibTmZ7J9HR3+v18POox3VXVVZ+unun31LeqvmXujoiISF8SuS5ARETyl0JCRETSUkiIiEhaCgkREUlLISEiImkpJEREJC2FhEiMmf2bmd2Q6zpE8oVCosCYWZWZrTCzD8XGVZvZ62Z2XmzcXDO738y2mVmjmS0xs2vMbFQ0/VIz6zSzpmh4zcz+Ocu1zzOz1dlcx0D0VY+7f93d/zFL61thZm/PxrKzYbg+r0LbLsVGIVFg3L0J+CfgWjOrj0Z/C5jv7ncBmNlJwKPAE8Bh7l4HnA50AG+ILe5v7l7l7lXA+4Fvmdkxw/NOZCDMrCTXNUiRcncNBTgANwG3AvOALcBBsWmPAz/o5/WXAo/3GvcUcFHs+dnAC0AjIXQOj007PBrXGM1zdmzamcASYCewBvgcMBLYDaSApmiYmOZ9XQf8Nnr934GZGWyPw4A/AluBpcD5g6kHuBq4JXrdNMCBjwCrgG3AJ4A3Aguj9/7D2HpmAg9Hn8dm4BdAXTTt59G6dkfruiKDbbwC+EK0rlagJHq+JnovS4HT+tgWJwDrgWRs3PuAhdHj44H5wA5gA/DdNNt0HrA6zbRa4GZgE7ASuApIRNOSwHeibbAcuDzajiVplrUCeHuaaR8HlkWf671dvzOAAd8DNkbvYxFwZLrPO9d/r4U85LwADYP84GAUsC76Q/xIbPxIoBOY18/rLyUWEtEXXyNwSPT8EGAX8A6gFLgi+mMti54vA/4tev626A/y0Oi164C3xOo8Nnqc9ksnVsdNhC/Z46MvxV8At/XzmpGEL/GPRK85JtouswdaD32HxI+BCuCdQAtwDzAOaIi+pE6N5j842l7lQD3wGHBtbNk9vgz3tY1j8z8HTAYqgUOj9zkxVl+fAQq8Crwj9vxO4Mro8d+AS6LHVcCJaZaR9vMiBMRvgOqojpeBj0XTPkH4kp4Ube+HGERIRL9Xm4Fjo236A+CxaNq7gGeAOkJgHA5M2NfnrWFwg5qbCpS7byP8BzoC+FVs0ihCM+L6rhFm9q3ouMQuM7sqNu+J0fidhL2InwOvRNM+CPzW3f/o7u3AfxK+qE4CTiR8uXzD3dvc/WHgfuDC6LXtwGwzq3H3be6+YIBv79fu/pS7dxBCYk4/858FrHD3n7p7h7s/C9wNfGCI6vmqu7e4+x8IX+q3uvtGd18D/IUQSrj7smh7tbr7JuC7wKn7WO6+tnGX77v7KnffTQj/8ui9lLr7Cnd/Nc2ybyX6PMysmvDf9a2x7XGwmY119yZ3f3IgG8PMksAFwBfdfae7ryDsOVwSzXI+8F/uvjr6Pf3GQJYf8yHgRndf4O6twBeBN5nZtOg9VBP2IM3dX3T3dbH3tz+ft8QoJAqUmV1M+A/uIeCbsUnbCM0aE7pGuPsVHo5L/Jrwn3aXJ929zt2rgYOAI4CvR9MmEpoRupaRIvwX2xBNWxWN67Iymgbh+MaZwEoz+7OZvWmAb2997HEzIZD2ZSpwQhR4jWbWSPiCOWiI6tkQe7y7j+dVAGY23sxuM7M1ZrYDuAUYu4/l7msbd1kVm74M+DRhb2djtK6JaZb9S+BcMysHzgUWuHvXuj5G2It5ycyeNrOz9lFjX8YS9nxWxsbFP/+J8bp7PR6I3tunibCX2RD9Y/JDQtPkRjO73sxqoln39/OWGIVEATKzcYT22I8TDmKfb2ZvAXD3XYR2/HMHskx330D47/s90ai1hC/frnUaodljTTRtspnFf3+mRNNw96fd/RxCk8w9wB1dqxlITQOwCvhzFHhdQ5W7//Mw1/P1aJlHuXsNcDGhKaRL7/Xtaxv3+Rp3/6W7nxy9zun5D0J8viWEL9gzgIsIodE17RV3v5CwPb4J3GVmIzN/m2wm/Lc+NTZuz+dPaO6ZFJs2eQDLjuu9fUYCY+j+Pfu+ux8HzCaE3uej8ek+bxkEhURh+iFwj7s/Eu1iXwH8JPqvkej5R83syihQMLNJwPR0CzSzMYSDmy9Eo+4A3m1mp5lZKfC/CQdP/0oIoWbgCjMrNbN5hHC5zczKzOxDZlYbNaHsIOzZQPgPfIyZ1Q7RduhyP3CImV0S1VNqZm80s8OHuZ5qwkHp7WbWQPSlFbMBmBF7vq9tvBczO9TM3hZ9zi10H3hP55fAp4BTCMckupZzsZnVR3sujdHotMsxs4r4EM17B3BNdPr1VOCzhD2nrvf1KTNrMLM6wsH2/pT2Wk8JoXnsI2Y2J3rPXwf+7u4ros/3hGi77Yq2R6qfz1sGI9cHRTQMbADeS/gPq67X+IeBa2LPTwAeIHwJNAKLgWuAMdH0Swlt3F1n9mwk/FGOiy3jfYQDkNuBPwNHxKYdEY3bHs3zvmh8GfB7QrPXDuBp4OTY624kNBk0kv7spq/Fns+jn4Pd0XyHEs6I2hQt/2HCsYwB1UPfB65LYvOvJnZSAOGL8arYNnkm2p7PEb70V8fmPQd4PVrX5zLYxivoeaD7aMKxo52Es33u72sbxuafQviC/G2v8bdEn3cT4Z+C96Z5/bzo/fceDiYc+7ol2t6rgC/RfXZTCWFPdwvh7KbPEPY8LM16VvSxjq9F0z5BOAjf9X4nReNPI5z11UT3mWRV/X3eGgY+WLTBRUSywszOAH7s7lP7nVnyjpqbRGRImVmlmZ1pZiVRs9uXCSdNSAHSnoQUhOjA/O/6mubhinHJE2Y2gtB0dhjhuMlvgU+5+46cFiaDopAQEZG01NwkIiJpFVynYWPHjvVp06blugwRkYLyzDPPbHb3+v7n7KngQmLatGnMnz8/12WIiBQUM1vZ/1x7U3OTiIikpZAQEZG0FBIiIpKWQkJERNJSSIiISFoKCRERSUshISIiaRVNSKzatJ3Hn38JT6lreRGRTBVNSDzw3//Gyb8+gebmXbkuRUSkYBRNSBwzawoAzU2N/cwpIiJdiiYkSiqrAWjeqZAQEclU0YREsrIGgNam7TmuRESkcBRNSJSOCCHR1qyQEBHJVNGERPnIOgDamnVzLBGRTBVNSFSMDHsSHbsVEiIimSqakKisHgVAZ4tCQkQkU0UTEiOqagHwlp05rkREpHAUTUiUj6gm5QatCgkRkUwVTUhYIkmzVUBbU65LEREpGFkLCTO70cw2mtniNNPNzL5vZsvMbKGZHZutWro0M4Jku0JCRCRT2dyTuAk4fR/TzwBmRcNlwI+yWAsALYlKku3qu0lEJFNZCwl3fwzYuo9ZzgFu9uBJoM7MJmSrHoDWxAhKOxQSIiKZyuUxiQZgVez56mjcXszsMjObb2bzN23aNOgVtiVHUtapkBARyVRBHLh29+vdfa67z62vrx/0ctpLRlKWah7CykREDmy5DIk1wOTY80nRuKzpLK2iUiEhIpKxXIbEvcA/RGc5nQhsd/d12VzhaztghCskREQyVZKtBZvZrcA8YKyZrQa+DJQCuPuPgQeAM4FlQDPwkWzV0mXqxAlUrdpNR0cnJSXJbK9ORKTgZS0k3P3CfqY78C/ZWn9frLKWEkvR2LSDurpRw7lqEZGCVBAHrodKojJ0F75r+77OzBURkS5FFRKl0T0lmndsyXElIiKFoahComxkaGJq2ak9CRGRTBRVSJRH95Ro3dWY40pERApDUYVEZfVoADoUEiIiGSmqkBhZG0Kic7dCQkQkE8UVEtGehCskREQyUlQhkSirpIVSTPe5FhHJSFGFBMAuRpJoU0iIiGSi6EKiOVFFSbtCQkQkE0UXEi3JKkrbd+a6DBGRglB0IdFaUk1Fp+5zLSKSiaILifbSaipTCgkRkUwUXUikymupUkiIiGSk6ELCK0dRSxMtbR25LkVEJO8VXUjYiFGUWIod6i5cRKRfRRcSJSPHALBz28YcVyIikv+KLiTKqkJING/fnONKRETyX9GFREVtCImWHQoJEZH+FF1IjKytB6B9p+5OJyLSn6ILiapR4wDo2KUD1yIi/Sm6kKisCc1N3qyQEBHpT9GFhJWUs4sKEi26p4SISH+KLiQAmqyaZOu2XJchIpL3ijIktngVbTpwLSLSr6yGhJmdbmZLzWyZmV3Zx/SpZvYnM1toZo+a2aRs1tMlYUY92pMQEelP1kLCzJLAdcAZwGzgQjOb3Wu2/wRudvejga8A/5GteuLaaqdTbbuHY1UiIgUtm3sSxwPL3P01d28DbgPO6TXPbODh6PEjfUzPis7KsdT5Djo6U8OxOhGRgpXNkGgAVsWer47GxT0PnBs9fh9QbWZjslhTMHIMNdbMtp27sr4qEZFClusD158DTjWzZ4FTgTVAZ++ZzOwyM5tvZvM3bdq03ytNVoWrrrdvWb/fyxIROZBlMyTWAJNjzydF4/Zw97Xufq67HwP8ezRurwsY3P16d5/r7nPr6+v3u7CymnDVddOWdfu9LBGRA1k2Q+JpYJaZTTezMuAC4N74DGY21sy6avgicGMW69lj5KjxAOzevmE4ViciUrCyFhLu3gFcDjwIvAjc4e4vmNlXzOzsaLZ5wFIzexkYD1yTrXriqkYfBEDbdt1TQkRkX0qyuXB3fwB4oNe4L8Ue3wXclc0a+lIzZiIAqSZ1Fy4isi+5PnCdE8kRo+ggAc0KCRGRfSnKkCCRoNGr2bJxba4rERHJa8UZEsBuq2AyOgVWRGRfijYkmkc0MCLRnusyRETyWtGGRFtFPaM6t+LuuS5FRCRvFW1I+MhxjKWRnS3amxARSadoQyJRcxAV1s7mzTrDSUQknaINibK6CQBs37SmnzlFRIpX0YbEiNHhgrrmbQoJEZF0ijYkauvDTfBat6mTPxGRdIo2JKqirjk6d6iTPxGRdIo2JKxyFO2UYE0KCRGRdIo2JEgk2J4YRelu9QQrIpJO8YYEsLNsHCNbFRIiIukUdUi0VI5ndOdmXXUtIpJGUYdEqnoC49nCtl1tuS5FRCQvFXVIJGsnMtJa2bBZTU4iIn0p6pCoGDMFgO3rX89xJSIi+amoQ6JmXAiJXZsVEiIifSnqkKgdPxWAvz23KMeViIjkp6IOiWRtuOp6TmpJjisREclPJbkuIKdKytmaHENFSXFvBhGRdIp6TwJgZ8VEalvVE6yISF+KPiRaq6cwwTeyvVl3qBMR6a3oQyIxahoT2cKqTY25LkVEJO/0GxJmNtPMyqPH88zsk2ZWl/3Shkfl+JkkzNm05tVclyIikncy2ZO4G+g0s4OB64HJwC8zWbiZnW5mS81smZld2cf0KWb2iJk9a2YLzezMAVU/BEY1zALgvkf/OtyrFhHJe5mERMrdO4D3AT9w988DE/p7kZklgeuAM4DZwIVmNrvXbFcBd7j7McAFwP8bSPFDYcS4mQC8pb5puFctIpL3MgmJdjO7EPgwcH80rjSD1x0PLHP319y9DbgNOKfXPA7URI9rgbUZLHdoVU+gnRKS23XVtYhIb5mExEeANwHXuPtyM5sO/DyD1zUAq2LPV0fj4q4GLjaz1cADwL/2tSAzu8zM5pvZ/E2bNmWw6gFIJNhaNoHq5tVDu1wRkQNAvyHh7kvc/ZPufquZjQKq3f2bQ7T+C4Gb3H0ScCbwczPbqyZ3v97d57r73Pr6+iFadbfmEQ2M7VhPW0dqyJctIlLIMjm76VEzqzGz0cAC4Cdm9t0Mlr2GcJC7y6RoXNzHgDsA3P1vQAUwNpPCh1Jn7VQm20bWNu4e7lWLiOS1TJqbat19B3AucLO7nwC8PYPXPQ3MMrPpZlZGODB9b695XgdOAzCzwwkhMcTtSf0rGzOdOtvFZ29+dLhXLSKS1zIJiRIzmwCcT/eB635FZ0RdDjwIvEg4i+kFM/uKmZ0dzfa/gY+b2fPArcClnoN7idZNCqfB/sNhRX9toYhID5n0bPcVwhf9E+7+tJnNAF7JZOHu/gDhgHR83Jdij5cAb8683OyomRBComnBHXDmGTmuRkQkf/QbEu5+J3Bn7PlrwPuzWdSwGzOLFEaibESuKxERySuZHLieZGa/NrON0XC3mU0ajuKGTdkIGssmULfrVXLQ2iUikrcyaYT/KeGA88RouC8ad0Bprj2YGb6aNTrDSURkj0xCot7df+ruHdFwEzD0Fyvk2N921DPD1rJsnXqDFRHpkklIbDGzi80sGQ0XA1uyXdhwe/dpb6XMOlm38sVclyIikjcyCYmPEk5/XQ+sA84DLs1iTTkxYsFPAGhZ/UKOKxERyR+ZdMux0t3Pdvd6dx/n7u/lQDu7CeAj4Uzdkq1Lc1yIiEj+GOzVY58d0iryQXkVjWUHUbfrNTpTOsNJRAQGHxI2pFXkiaWdDcxkDcs378p1KSIieWGwIXFA/qt9yJFzmWlree71A+64vIjIoKS94trMdtJ3GBhQmbWKcqh21cMkrJ1VryyCuVNzXY6ISM6l3ZNw92p3r+ljqHb3TPp8KjiJD94MQOeqZ3JciYhIflC3p3H1h9JMBWN3LGZXa0euqxERyTmFRFwiSWv90cxJvMrC1dtzXY2ISM4pJHqpnHY8h9tKnl+5MdeliIjknEKil4ppb6TcOtj86oJclyIiknOZdBW+08x29BpWRd2HzxiOIodVw3EAtK98Wt2Gi0jRy+QspWuB1cAvCae/XgDMBBYANwLzslVcTtROYnf5GI7qfJWX1u/k8Ak1ua5IRCRnMmluOtvd/9vdd7r7Dne/HniXu98OjMpyfcPPDGs4jjm2jL+8sinX1YiI5FQmIdFsZuebWSIazgdaomkHZHtMxfQTOTixludezOhW3iIiB6xMQuJDwCXAxmi4BLjYzCqBy7NYW+5MPxWAxOuPs7utM8fFiIjkTr/HJNz9NeA9aSY/PrTl5IkJc+goreKkjsU8tWIrpx5ywN2IT0QkI5mc3TQpOpNpYzTcbWaThqO4nEmWYMly3pJczGMv67iEiBSvTJqbfgrcC0yMhvuicQe05LwrmGybePRJnQorIsUrk5Cod/efuntHNNwEHPjtLzPCcYm5vogX1u7IcTEiIrmRSUhsMbOLzSwZDRcDGd1wwcxON7OlZrbMzK7sY/r3zOy5aHjZzBoH+gaypv4wUiPHcXJiMfc9vzbX1YiI5EQmIfFR4HxgPbAOOA+4tL8XmVkSuA44A5gNXGhms+PzuPtn3H2Ou88BfgD8akDVZ5MZiVnv4NTkQm55YpmanESkKPUbEu6+0t3Pdvd6dx/n7u8F3p/Bso8Hlrn7a+7eBtwGnLOP+S8Ebs2o6uFy+HuoYRfH+SIWvJ4/OzkiIsNlsB38fTaDeRqAVbHnq6NxezGzqcB04OFB1pMdM96Kl43kjORT/PMtuhGRiBSfwYaEDWkVoT+ou9y9zyvXzOwyM5tvZvM3bRrGU1JLK7BZ7+KssmfZ3drGzpb24Vu3iEgeGGxIZNJAvwaYHHs+KRrXlwvYR1OTu1/v7nPdfW59/TCfWDX7bKo7G5ndvoSzfnBgXjsoIpJO2pBI00X4DjPbSbheoj9PA7PMbLqZlRGC4N4+1nMYoaPAvw3yPWTXwe8AS/Dh8ocpSZgOYItIUUkbEu5e7e41fQzV7p5Jdx4dhL6dHgReBO5w9xfM7CtmdnZs1guA2zxfv33Lq+DI83h76SJWb9rG6df+JdcViYgMm0zuJzFo7v4A8ECvcV/q9fzqbNYwJI79B8oW3cFZJU/xYOM83B2zoT4sIyKSf3T70kxMOxlGz+CKcU/R1NrBu659LNcViYgMC4VEJszgmEsYv3U+J4/aRsKMVCo/W8dERIaSQiJTcy4CjC/s/h4vrd/J7fNX9fsSEZFCp5DIVPVBcOwlHJlcxfSKnVx1z2K2NLXmuioRkaxSSAzEyZ/BOlv5H/sqqZTz9QdeynVFIiJZpZAYiNEz4OgPMqNkK4fXtHL3gtX8duG6XFclIpI1ComBesvnoKOV+45+gqryJP966wKWbdyZ66pERLJCITFQ9YfAyHqS82/g4UsnkjDjrB88zq7WjlxXJiIy5BQSg/GJx6GsmnF/+zo3f/R4WtpTnPzNh2nvTOW6MhGRIaWQGIyqehgxBl7+HSe1/oX/e/YRbGtu5/hrHlJQiMgBRSExWP/6DJRVwd3/yIePquBLZ83eExRtHQoKETkwKCQGK1kCH38EEiVw7yf56JuncfV7QlDM/dof2b5b954QkcKnkNgf9YfA26+GVx6EBT/j0jdP59vnHc3Olg5O+PpDvL6lOdcViojsF4XE/jr+n6CiFu77NKyezwfmTubWy06kpT3Fqd9+hN8vXp/rCkVEBk0hsb8SCfjXBVBSBjeeDttXc+KMMRw9qRYz+MQtz/Avv1jAZnXhISIFSCExFEaOhcv+DJ6CH74Rdm/j3stPZunXzmDSqEp+u2gdJ379T9w5fxWd6j1WRAqIQmKojDscLvk1tO+Ga4+GtmZKkwke/8LbeOizp1BRmuTzdy3kyC8/yO8Xr9dtUEWkICgkhtKMU+EDN0HrDvjOodAauus4eFw1C7/8Tn540TE4zidueYYjv/wgv1u0TtdViEhes0L7j3bu3Lk+f/78XJexb4vugrs/BmXV8JlFUDlqz6SOzhS/enYN/+eexbR2pDBgQl0Fd37iJBrqKnNXs4gc0MzsGXefO+DXKSSy5LoTYdNLMP4I+NCdUDOxx+TOlPPo0o1c9vNn9hynqKss5dsfeAOnHDKW8pJkLqoWkQOUQiIfvfow3HIeJJLw0Qeh4dg+Z1u9rZnzf/w31m1voevTGDOyjGvedySnHjKOyjIFhojsH4VEvtqwBK6fB51t8J5r4dgPh3tm96G9M8UTyzbz+TsXsrmpdU9glCSMyaMqueHSNzJj7EgszetFRNJRSOSzXZvhB8dBSyMc+X4449swcsw+X9LRmeKp5Vv59O3PsWlnd2AYMKaqjCtOP4zjpo5SaIhIRhQS+S7VCY9/Fx6+JvT3dO5/wxHnpt2riHN3Vmxp5qM/fYpV23bTmXLin1oyYUyoreBb7z+a2RNrqBtRlr33ISIFSSFRKDa8AP/zTmhrgsrR4d4UtQ0DWkQq5by6qYnLfv4Mq7c105lyel+jV1tZygXHT+awg6o57KAaZtZXUVaiM55FipVCopB0dsCT18FDV4ertOumwr88BaUVg17k9t3tPL+qkavuWcTaxhZSvndwJAzMjITBpLpKrjn3KGaMrWJ8TbmarEQOcHkZEmZ2OvBfQBK4wd2/0cc85wNXAw487+4X7WuZB0RIdNm2Aq5/G+zeAqOmwalXwlEfCN2QD4H2zhTLN+/ipfU7+cYDL7JxZ2uf4QEhQBJRgEyoq+Sqd89mYl0FE2srqRtRqhARKXB5FxJmlgReBt4BrAaeBi509yWxeWYBdwBvc/dtZjbO3Tfua7kHVEh0efVhuO1iaN8FJRVw1rVDGha9pVLOuh0tLN+0i6vuWcSaxt2kHFLupPt1iO+FGNBQV8kXzzycsdXl1FeVM6aqjBFl2alXRPZfPobEm4Cr3f1d0fMvArj7f8Tm+RbwsrvfkOlyD8iQAHCHpQ/A3R8ftrDoSyrlbG5qZe32FtY27uYztz9HW0eKZML2hMi+fmMsCpGE2Z7HE+oqueJdhzGmqoyxVSFUaipLtHciMozyMSTOA05393+Mnl8CnODul8fmuYewt/FmQpPU1e7++z6WdRlwGcCUKVOOW7lyZVZqzgt9hcU7vwZHfxAqanJdHQCtHZ1sbmpjS1Mrm5ta+er9L7Ji8y4gnGnlhDOy+g0UolAxi4IljDCgrSNFRWmCGy89nuqKEqorSqgqL6GqokRXo4sMQqGGxP1AO3A+MAl4DDjK3RvTLfeA3ZPorSss/vwtWPccWAKOuQSO+zBMPDajU2fzQWfK2bqrjc1NrWxpauMfb36a1vYUpUkjRXibmQRKb0b3/MlYuHTtvcSft7WnKC9N8MuPn8jI8hJGlCWpKi9hRFmJzviSojHYkMhmO8YaYHLs+aRoXNxq4O/u3g4sN7OXgVmE4xfFzQwOezcceiasXQDzb4Rnfw4LfgbjjwphcdR5PToPzEfJhFFfXU59dTkAL331jLTzujst7Sl2trbT1NJBU2sHV9y1kBVbdkXTo2DwcJ1IR2f39SLuTgog1RUePSNnd3uK9/2/v6Zdd4/QSYSAge4sDoETnrRFHTOWlyb2hNHM+pF8/8JjKStJUF6S6P6ZTKhZTQpaNvckSghNSacRwuFp4CJ3fyE2z+mEg9kfNrOxwLPAHHffkm65RbMn0ZfdjbDoTvjjl0NTFAZHngvHXAzTTw19RAkALe2dNLV27Ambz9/5PCu27IpCJgRCPHDcoSM67Stp8ekD28PZF4s9sNi4zmgFJYkwNp4pFhvR3hG6lS8v7bn30yOCzGht6wRg9sQavvfBOZQkE5QkjNJkgmTCKE3aXuPkwJd3zU0AZnYmcC3heMON7n6NmX0FmO/u91r4F+s7wOlAJ3CNu9+2r2UWdUh0cYd1z8Nzvwh7GKkOSJaGfqFmnwNTThrWg93FIJVymts7aW7t4K3/+Si72zopL0vSdTpY9x5N92vaoi/10qT1Ob3rYVcvwAnrDqb49OFmvZ501ZywXtN77SF1vY+SRM+U6x1BHVEqlibj81n8JUD39isrSURh2Xetre29wjNa1vQxI/jqe4+MzsqzPad5W6+f3Wfudc/TtZiuvUDrXiyG7bWHufcJG4YluqcnrPt1FnuciDeTZnmPMy9DIhsUEr20t4RjF0vugRfvCxfnYaE5avY5MO0tIUCkYHWmnLaOFK0dndHPFO2dKT5123O8uqlpz3xde0ZdX5pl8T2OPvaI4iG25/UxXV8Ne/aw4l/+vcIR2HP9zZ6w62OhhfVtk3+mjx3JI5+bN6jXKiQE2nbBsofg/s9C8+YwLlECb7gQZr/xFPSqAAAQJklEQVQXpp8CJerXSfKLu9OZcjq7fsaH2LhUCjpSKVLudETjvnDXQpas3QEQ9uzCAvtcT5/hmWae8tgJDX0tLb6Xsy/x+fb1TdveK7B763rtrHFVPPCpU/a5znQUEtJT+25Y9ie4/9Owa1P3+JHj4OwfwLSTobwqd/WJyLDKx7ObJJdKK+Hws8LQ0QqvPgJLfgOLbodbPxjmKauCuR+Fg0+DKW+CkvLc1iwieUd7EsWmow1WPg4rHoenfgKtYVcdS8DBbw/DzNNgzMyCuRZDRPqn5iYZnNamEBiv/gkW3AwdLWF8shzmXAQzToWpJ0NVfW7rFJH9opCQobF1eQiMZQ/Dy78HD+fcU1oJb7gIpr05hEb1+NzWKSIDopCQodfZEa7HWPEXePxaaNnWPa2kEt5wAcyYF4bKutzUKCIZUUhI9nV2wPrnYcUToYlq2UPdexrl1XDcpdAwFybNhZoGHdMQySMKCRl+nR2wZn4Iiyd/DG07u6cly2DWO6HhuBAaE48JQSIiOaGQkNzraIX1i0NwrJ4PL97bfSAcoHQEHHEuNBwTwmPcEbq4T2SYKCQkPzVvhTULuoPjtUdCX1MAGDQcG7o+bzguPB4zCxLqvltkqCkkpDC4Q+PKEBxrF8CaZ+H1v0Z9TgGWhKknheaphmNDt+ijp6uHW5H9pCuupTCYwahpYTjy3DAu1QmbX44FxwL46/djr0nAhDeE5qnxR8D42TD+SBg5NhfvQKSoKCQk9xJJGHd4GI75UBjX0Qobl8CGJdHPxbDwdki1d78uWQrTToGDjgx7HAcdGZqr1E26yJDRX5Pkp5Ly0OQ08Zie45s2woYXomExLLk3XPy3h8GEo7tDY/yR4Wee38FPJF/pmIQUvo620Fy1YTGsXxR+rnii115HeegqfdxhUH9498+yEbmrW2QY6ZiEFK+SsrC3cNCR4SpwCAfImzaEU3I3LAp7HhtfCtd0xHv2HzMLDjqqu8lq3GFQM0lnWIlEFBJyYDKD6oPCMOvt3eM7O2Dbctj4YjjWsX5ROD33hV91z1M6AsYcDPWHwthDof4QGHsIjJ6p6zqk6CgkpLgkS2DsrDDMPrt7/O7GsLexeSlsejk0X73+JCy6s+frxxwcC45DQ3iMnQUVNcP7PkSGiUJCBEIHhdPeHIa41ibY8kp3cGxeGpqslv6253zVE7uDo2vPY+yhUDVOfVhJQVNIiOxLeVXfZ1l1todu1buCY1P08+kbujs9hHB678TjegZH/SFQN1UXCEpBUEiIDEayNHzZ1x8CnNU93h12rO0ZHJtehpf/AM/eEnt9eXezV1dwjJ4RhoraYX87IukoJESGkhnUNoRh5tt6TmveCptfiYJjadgLWfssvPDrnvNVju4OjNHTux+Pmh6uMlfzlQwjhYTIcBkxGqacEIa49t2w5dVw1tXW16JheezAeeyU3bJqGD2tZ3B0hUn1RJ26K0NOISGSa6WV3dd59NbRCo2vdwfH1tdCmGx4IVxtHg+QZHkIi3hwdO2J1E4OTWQiA5TVkDCz04H/ApLADe7+jV7TLwW+DayJRv3Q3W/IZk0iBaUkduyit1QnbF/dHRx7gmQ5vPJgd8+6EHrXrZvSswmrK0xGTYPSimF7S1JYshYSZpYErgPeAawGnjaze919Sa9Zb3f3y7NVh8gBK5GEUVPDwFt7Tuu64jzefNX1ePV8aN0em9mgZmJ3YMSPh4yarmtAilw29ySOB5a5+2sAZnYbcA7QOyREZKjFrzifelLPae6we1vP4OjaE3n5Qdi1sef8I+tjTVgzeobIiNHD954kJ7IZEg3Aqtjz1cAJfcz3fjM7BXgZ+Iy7r+pjHhEZKmbhy33EaJh03N7TW3eGAOl9IH3F47Dwtp7zVtR23x+k96DjIAeEXB+4vg+41d1bzeyfgJ8Bb+s9k5ldBlwGMGXKlOGtUKTYlFeH7tYnHL33tPaWcGfBra9FZ2StCMOGJbD0d9DZ1j2vJaB2Undo1E2B2ilQNzk8rp6gCwoLQDZDYg0wOfZ8Et0HqAFw9y2xpzcA3+prQe5+PXA9hK7Ch7ZMEclYaUXo+LD+0L2npVKwc113cOwZlocA2bWp5/yJknAspCs4aieH4yt1U0Oo1ExUiOSBbIbE08AsM5tOCIcLgIviM5jZBHdfFz09G3gxi/WISDYlEt0XEvbuAwugrTmcjbX9dWhcBdtXhdN7G1fB8sdCwMTPyKLrVrdTQ4DUTooNk6GmQWdlDYOshYS7d5jZ5cCDhFNgb3T3F8zsK8B8d78X+KSZnQ10AFuBS7NVj4jkWNmIWFcmfehogx2rYdvK0KS1bUX341f+CE3r937NyPq9g6Prce2kMF0XGO4X3ZlORApDR2voF2v76u5hR+xx4ypo39XzNcmyWHD0HqJQKa/KzfsZZroznYgc2ErKu68i74s7tDT2DJH4sPwvIVR6qxwVQqOmjxCpnRROIy7iYyMKCRE5MJiFL/zKUeGWtH3p7AjHPravhh1rwnGRPUGyCpb9IVzJ3mO5ydjeSMPeIVI76YDuuVchISLFI1kSnYI7Of08LTuiAOkdIqth1VOw6C569JkFUF4T7Y2kCZGaiQV7zYhCQkQkrqImDOMO73t6KhWuSu/a++jRrLUKXnsEUh29XmThupB0eyK1k8MeUB52A6+QEBEZiESiu8uTSWmOA7c1RwfZ+wiRdQthyW96ne5LuPhw9Mw+QqQhOl7SEHoMHmYKCRGRoVY2AsYeHIa+uEPzlp4h0riq+2ytV/4QOmjsbfRM+OSC7Nbei0JCRGS4mYW7DI4cu/f907u0t8DOteHYyI41ITwOfvvw1olCQkQkP5VWdPe4m0O6FFFERNJSSIiISFoKCRERSUshISIiaSkkREQkLYWEiIikpZAQEZG0FBIiIpJWwd10yMw2ASsH+fKxwOYhLGe4FGLdhVgzFGbdqnn4FGLdXTVPdff6gb644EJif5jZ/MHcmSnXCrHuQqwZCrNu1Tx8CrHu/a1ZzU0iIpKWQkJERNIqtpC4PtcFDFIh1l2INUNh1q2ah08h1r1fNRfVMQkRERmYYtuTEBGRAVBIiIhIWkUTEmZ2upktNbNlZnZlrutJx8xWmNkiM3vOzOZH40ab2R/N7JXo56g8qPNGM9toZotj4/qs04LvR9t+oZkdm0c1X21ma6Lt/ZyZnRmb9sWo5qVm9q4c1TzZzB4xsyVm9oKZfSoan+/bOl3debu9zazCzJ4ys+ejmv9vNH66mf09qu12MyuLxpdHz5dF06cNd8391H2TmS2Pbes50fiB/Y64+wE/AEngVWAGUAY8D8zOdV1pal0BjO017lvAldHjK4Fv5kGdpwDHAov7qxM4E/gdYMCJwN/zqOargc/1Me/s6PekHJge/f4kc1DzBODY6HE18HJUW75v63R15+32jrZZVfS4FPh7tA3vAC6Ixv8Y+Ofo8f8Cfhw9vgC4PUfbOl3dNwHn9TH/gH5HimVP4nhgmbu/5u5twG3AOTmuaSDOAX4WPf4Z8N4c1gKAuz8GbO01Ol2d5wA3e/AkUGdmE4an0m5pak7nHOA2d2919+XAMsLv0bBy93XuviB6vBN4EWgg/7d1urrTyfn2jrZZU/S0NBoceBtwVzS+97bu+gzuAk4zMxumcvfYR93pDOh3pFhCogFYFXu+mn3/wuaSA38ws2fM7LJo3Hh3Xxc9Xg+Mz01p/UpXZ75v/8uj3e4bY015eVdz1JxxDOE/xYLZ1r3qhjze3maWNLPngI3AHwl7NI3u3tFHXXtqjqZvB8YMb8VB77rdvWtbXxNt6++ZWXk0bkDbulhCopCc7O7HAmcA/2Jmp8QnethfzPvzlgulTuBHwExgDrAO+E5uy+mbmVUBdwOfdvcd8Wn5vK37qDuvt7e7d7r7HGASYU/msByXlJHedZvZkcAXCfW/ERgNfGEwyy6WkFgDTI49nxSNyzvuvib6uRH4NeEXdUPX7mD0c2PuKtyndHXm7fZ39w3RH1gK+AndTRx5U7OZlRK+aH/h7r+KRuf9tu6r7kLY3gDu3gg8AryJ0BxT0kdde2qOptcCW4a51B5idZ8eNfm5u7cCP2WQ27pYQuJpYFZ0lkIZ4SDTvTmuaS9mNtLMqrseA+8EFhNq/XA024eB3+Smwn6lq/Ne4B+isypOBLbHmkpyqldb7PsI2xtCzRdEZ7BMB2YBT+WgPgP+B3jR3b8bm5TX2zpd3fm8vc2s3szqoseVwDsIx1IeAc6LZuu9rbs+g/OAh6O9umGVpu6XYv9EGOE4SnxbZ/47kouj8bkYCEf0Xya0Mf57rutJU+MMwhkezwMvdNVJaOf8E/AK8BAwOg9qvZXQXNBOaNP8WLo6CWdRXBdt+0XA3Dyq+edRTQujP54Jsfn/Pap5KXBGjmo+mdCUtBB4LhrOLIBtna7uvN3ewNHAs1Fti4EvReNnEAJrGXAnUB6Nr4ieL4umz8jRtk5X98PRtl4M3EL3GVAD+h1RtxwiIpJWsTQ3iYjIICgkREQkLYWEiIikpZAQEZG0FBIiIpKWQkJERNJSSIhkwMzm9OrW+mwboi7nzezTZjZiKJYlMtR0nYRIBszsUsJFR5dnYdkromVvHsBrku7eOdS1iPSmPQk5oJjZNDN70cx+Et2A5Q9RVwV9zTvTzH4f9bj7FzM7LBr/ATNbHN3E5bGoK5evAB+Mbt7yQTO71Mx+GM1/k5n9yMyeNLPXzGxe1MPpi2Z2U2x9PzKz+dbzxjCfBCYCj5jZI9G4Cy3ceGqxmX0z9vomM/uOmT0PvMnMvmHhpj4Lzew/s7NFpejl4jJyDRqyNQDTgA5gTvT8DuDiNPP+CZgVPT6B0PcOhK4KGqLHddHPS4Efxl675znh5i63Ebo7OAfYARxF+CfsmVgtXV1nJIFHgaOj5yuIbjRFCIzXgXqghNC1wnujaQ6cHz0eQ+i+wuJ1atAw1IP2JORAtNzdn4seP0MIjh6iLqxPAu6M+uH/b8Ld1ACeAG4ys48TvtAzcZ+7OyFgNrj7Ig89nb4QW//5ZraA0M/OEYS7sfX2RuBRd9/k4R4FvyDcUQ+gk9CrKoR7F7QA/2Nm5wLNGdYpMiAl/c8iUnBaY487gb6amxKEm8nM6T3B3T9hZicA7waeMbPjBrDOVK/1p4CSqGfTzwFvdPdtUTNURQbLjWvx6DiEu3eY2fHAaYQeSC8n3EFNZEhpT0KKkocb4Cw3sw/AnpvDvyF6PNPd/+7uXwI2Efre30m4V/Ng1QC7gO1mNp5wU6ku8WU/BZxqZmPNLAlcCPy598KiPaFad38A+Azwhv2oTSQt7UlIMfsQ8CMzu4pwX+DbCN20f9vMZhGOMfwpGvc6cGXUNPUfA12Ruz9vZs8CLxFuHflEbPL1wO/NbK27vzU6tfaRaP2/dfe+7h9SDfzGzCqi+T470JpEMqFTYEVEJC01N4mISFpqbpIDnpldB7y51+j/cvef5qIekUKi5iYREUlLzU0iIpKWQkJERNJSSIiISFoKCRERSev/A9/X8pXHc0vNAAAAAElFTkSuQmCC\n",
      "text/plain": [
       "<Figure size 432x288 with 1 Axes>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "#基于上几轮的调优参数，对learning_rate =0.1,n_estimators=1000进行重新学习，采用xgb的交叉验证\n",
    "xgb1_upd = XGBClassifier(\n",
    "        learning_rate =0.1,\n",
    "        n_estimators=1000,\n",
    "        max_depth=5,\n",
    "        min_child_weight=5,\n",
    "        gamma=0,\n",
    "        subsample=0.85,\n",
    "        colsample_bytree=0.85,\n",
    "        colsample_bylevel=0.7,\n",
    "        reg_alpha=2,\n",
    "        reg_lambda=0.0,\n",
    "        objective= 'multi:softprob',\n",
    "        seed=3)\n",
    "\n",
    "modelfit(xgb1_upd, X_train, y_train, cv_folds = kfold)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=0.7,\n",
       "       colsample_bytree=0.85, gamma=0, learning_rate=0.1, max_delta_step=0,\n",
       "       max_depth=5, min_child_weight=5, missing=None, n_estimators=336,\n",
       "       n_jobs=1, nthread=None, objective='multi:softprob', random_state=0,\n",
       "       reg_alpha=2, reg_lambda=0.0, scale_pos_weight=1, seed=3,\n",
       "       silent=True, subsample=0.85)"
      ]
     },
     "execution_count": 35,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "xgb1_upd"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 36,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/home/fei/.local/lib/python3.6/site-packages/ipykernel_launcher.py:1: FutureWarning: from_csv is deprecated. Please use read_csv(...) instead. Note that some of the default arguments are different, so please refer to the documentation for from_csv when changing your function calls\n",
      "  \"\"\"Entry point for launching an IPython kernel.\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>train-mlogloss-mean</th>\n",
       "      <th>train-mlogloss-std</th>\n",
       "      <th>test-mlogloss-mean</th>\n",
       "      <th>test-mlogloss-std</th>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>n_estimators</th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "      <th></th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.039063</td>\n",
       "      <td>0.000175</td>\n",
       "      <td>1.039675</td>\n",
       "      <td>0.000397</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.988945</td>\n",
       "      <td>0.000548</td>\n",
       "      <td>0.990182</td>\n",
       "      <td>0.000339</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.946050</td>\n",
       "      <td>0.000511</td>\n",
       "      <td>0.947743</td>\n",
       "      <td>0.000350</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.909195</td>\n",
       "      <td>0.000625</td>\n",
       "      <td>0.911434</td>\n",
       "      <td>0.000759</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.877251</td>\n",
       "      <td>0.000500</td>\n",
       "      <td>0.879968</td>\n",
       "      <td>0.000748</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>0.849259</td>\n",
       "      <td>0.000562</td>\n",
       "      <td>0.852465</td>\n",
       "      <td>0.000718</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>0.824768</td>\n",
       "      <td>0.000539</td>\n",
       "      <td>0.828483</td>\n",
       "      <td>0.000884</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>0.803366</td>\n",
       "      <td>0.000554</td>\n",
       "      <td>0.807648</td>\n",
       "      <td>0.001027</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>0.784510</td>\n",
       "      <td>0.000635</td>\n",
       "      <td>0.789300</td>\n",
       "      <td>0.001010</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>0.767545</td>\n",
       "      <td>0.000743</td>\n",
       "      <td>0.772906</td>\n",
       "      <td>0.001053</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>0.752624</td>\n",
       "      <td>0.000905</td>\n",
       "      <td>0.758442</td>\n",
       "      <td>0.000999</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>0.739487</td>\n",
       "      <td>0.000917</td>\n",
       "      <td>0.745676</td>\n",
       "      <td>0.001106</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>0.727632</td>\n",
       "      <td>0.000808</td>\n",
       "      <td>0.734392</td>\n",
       "      <td>0.001243</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>0.717015</td>\n",
       "      <td>0.000810</td>\n",
       "      <td>0.724224</td>\n",
       "      <td>0.001363</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>0.707501</td>\n",
       "      <td>0.001087</td>\n",
       "      <td>0.715241</td>\n",
       "      <td>0.001240</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>0.698919</td>\n",
       "      <td>0.000905</td>\n",
       "      <td>0.707135</td>\n",
       "      <td>0.001460</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>0.691206</td>\n",
       "      <td>0.000996</td>\n",
       "      <td>0.699850</td>\n",
       "      <td>0.001436</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>0.684131</td>\n",
       "      <td>0.000934</td>\n",
       "      <td>0.693251</td>\n",
       "      <td>0.001502</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>0.677738</td>\n",
       "      <td>0.000940</td>\n",
       "      <td>0.687395</td>\n",
       "      <td>0.001612</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>0.672062</td>\n",
       "      <td>0.000974</td>\n",
       "      <td>0.682118</td>\n",
       "      <td>0.001625</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20</th>\n",
       "      <td>0.666853</td>\n",
       "      <td>0.000836</td>\n",
       "      <td>0.677362</td>\n",
       "      <td>0.001808</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>21</th>\n",
       "      <td>0.661953</td>\n",
       "      <td>0.000953</td>\n",
       "      <td>0.672857</td>\n",
       "      <td>0.001788</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22</th>\n",
       "      <td>0.657440</td>\n",
       "      <td>0.001092</td>\n",
       "      <td>0.668802</td>\n",
       "      <td>0.001638</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>23</th>\n",
       "      <td>0.653430</td>\n",
       "      <td>0.001072</td>\n",
       "      <td>0.665272</td>\n",
       "      <td>0.001727</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24</th>\n",
       "      <td>0.649538</td>\n",
       "      <td>0.001146</td>\n",
       "      <td>0.661815</td>\n",
       "      <td>0.001771</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>0.645914</td>\n",
       "      <td>0.001234</td>\n",
       "      <td>0.658633</td>\n",
       "      <td>0.001730</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>26</th>\n",
       "      <td>0.642620</td>\n",
       "      <td>0.001281</td>\n",
       "      <td>0.655866</td>\n",
       "      <td>0.001845</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>27</th>\n",
       "      <td>0.639603</td>\n",
       "      <td>0.001166</td>\n",
       "      <td>0.653325</td>\n",
       "      <td>0.001865</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>0.636601</td>\n",
       "      <td>0.001225</td>\n",
       "      <td>0.650784</td>\n",
       "      <td>0.001860</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29</th>\n",
       "      <td>0.633826</td>\n",
       "      <td>0.001380</td>\n",
       "      <td>0.648547</td>\n",
       "      <td>0.001778</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>306</th>\n",
       "      <td>0.476309</td>\n",
       "      <td>0.000678</td>\n",
       "      <td>0.583883</td>\n",
       "      <td>0.002765</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>307</th>\n",
       "      <td>0.476001</td>\n",
       "      <td>0.000700</td>\n",
       "      <td>0.583858</td>\n",
       "      <td>0.002781</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>308</th>\n",
       "      <td>0.475683</td>\n",
       "      <td>0.000704</td>\n",
       "      <td>0.583860</td>\n",
       "      <td>0.002767</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>309</th>\n",
       "      <td>0.475383</td>\n",
       "      <td>0.000662</td>\n",
       "      <td>0.583822</td>\n",
       "      <td>0.002770</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>310</th>\n",
       "      <td>0.475050</td>\n",
       "      <td>0.000612</td>\n",
       "      <td>0.583783</td>\n",
       "      <td>0.002771</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>311</th>\n",
       "      <td>0.474746</td>\n",
       "      <td>0.000648</td>\n",
       "      <td>0.583810</td>\n",
       "      <td>0.002780</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>312</th>\n",
       "      <td>0.474446</td>\n",
       "      <td>0.000623</td>\n",
       "      <td>0.583779</td>\n",
       "      <td>0.002805</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>313</th>\n",
       "      <td>0.474197</td>\n",
       "      <td>0.000691</td>\n",
       "      <td>0.583787</td>\n",
       "      <td>0.002791</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>314</th>\n",
       "      <td>0.473892</td>\n",
       "      <td>0.000671</td>\n",
       "      <td>0.583782</td>\n",
       "      <td>0.002775</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>315</th>\n",
       "      <td>0.473599</td>\n",
       "      <td>0.000670</td>\n",
       "      <td>0.583788</td>\n",
       "      <td>0.002776</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>316</th>\n",
       "      <td>0.473323</td>\n",
       "      <td>0.000747</td>\n",
       "      <td>0.583794</td>\n",
       "      <td>0.002755</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>317</th>\n",
       "      <td>0.473002</td>\n",
       "      <td>0.000783</td>\n",
       "      <td>0.583751</td>\n",
       "      <td>0.002770</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>318</th>\n",
       "      <td>0.472686</td>\n",
       "      <td>0.000793</td>\n",
       "      <td>0.583743</td>\n",
       "      <td>0.002777</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>319</th>\n",
       "      <td>0.472390</td>\n",
       "      <td>0.000764</td>\n",
       "      <td>0.583746</td>\n",
       "      <td>0.002775</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>320</th>\n",
       "      <td>0.472091</td>\n",
       "      <td>0.000790</td>\n",
       "      <td>0.583735</td>\n",
       "      <td>0.002776</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>321</th>\n",
       "      <td>0.471786</td>\n",
       "      <td>0.000736</td>\n",
       "      <td>0.583740</td>\n",
       "      <td>0.002771</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>322</th>\n",
       "      <td>0.471484</td>\n",
       "      <td>0.000754</td>\n",
       "      <td>0.583676</td>\n",
       "      <td>0.002773</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>323</th>\n",
       "      <td>0.471189</td>\n",
       "      <td>0.000766</td>\n",
       "      <td>0.583649</td>\n",
       "      <td>0.002777</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>324</th>\n",
       "      <td>0.470923</td>\n",
       "      <td>0.000745</td>\n",
       "      <td>0.583644</td>\n",
       "      <td>0.002780</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>325</th>\n",
       "      <td>0.470644</td>\n",
       "      <td>0.000728</td>\n",
       "      <td>0.583614</td>\n",
       "      <td>0.002752</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>326</th>\n",
       "      <td>0.470404</td>\n",
       "      <td>0.000734</td>\n",
       "      <td>0.583603</td>\n",
       "      <td>0.002787</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>327</th>\n",
       "      <td>0.470156</td>\n",
       "      <td>0.000739</td>\n",
       "      <td>0.583607</td>\n",
       "      <td>0.002772</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>328</th>\n",
       "      <td>0.469873</td>\n",
       "      <td>0.000701</td>\n",
       "      <td>0.583543</td>\n",
       "      <td>0.002781</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>329</th>\n",
       "      <td>0.469553</td>\n",
       "      <td>0.000705</td>\n",
       "      <td>0.583548</td>\n",
       "      <td>0.002786</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>330</th>\n",
       "      <td>0.469193</td>\n",
       "      <td>0.000703</td>\n",
       "      <td>0.583515</td>\n",
       "      <td>0.002814</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>331</th>\n",
       "      <td>0.468856</td>\n",
       "      <td>0.000690</td>\n",
       "      <td>0.583517</td>\n",
       "      <td>0.002803</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>332</th>\n",
       "      <td>0.468507</td>\n",
       "      <td>0.000694</td>\n",
       "      <td>0.583491</td>\n",
       "      <td>0.002808</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>333</th>\n",
       "      <td>0.468215</td>\n",
       "      <td>0.000739</td>\n",
       "      <td>0.583487</td>\n",
       "      <td>0.002802</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>334</th>\n",
       "      <td>0.467940</td>\n",
       "      <td>0.000747</td>\n",
       "      <td>0.583503</td>\n",
       "      <td>0.002797</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>335</th>\n",
       "      <td>0.467657</td>\n",
       "      <td>0.000777</td>\n",
       "      <td>0.583480</td>\n",
       "      <td>0.002790</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>336 rows × 4 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "              train-mlogloss-mean  train-mlogloss-std  test-mlogloss-mean  \\\n",
       "n_estimators                                                                \n",
       "0                        1.039063            0.000175            1.039675   \n",
       "1                        0.988945            0.000548            0.990182   \n",
       "2                        0.946050            0.000511            0.947743   \n",
       "3                        0.909195            0.000625            0.911434   \n",
       "4                        0.877251            0.000500            0.879968   \n",
       "5                        0.849259            0.000562            0.852465   \n",
       "6                        0.824768            0.000539            0.828483   \n",
       "7                        0.803366            0.000554            0.807648   \n",
       "8                        0.784510            0.000635            0.789300   \n",
       "9                        0.767545            0.000743            0.772906   \n",
       "10                       0.752624            0.000905            0.758442   \n",
       "11                       0.739487            0.000917            0.745676   \n",
       "12                       0.727632            0.000808            0.734392   \n",
       "13                       0.717015            0.000810            0.724224   \n",
       "14                       0.707501            0.001087            0.715241   \n",
       "15                       0.698919            0.000905            0.707135   \n",
       "16                       0.691206            0.000996            0.699850   \n",
       "17                       0.684131            0.000934            0.693251   \n",
       "18                       0.677738            0.000940            0.687395   \n",
       "19                       0.672062            0.000974            0.682118   \n",
       "20                       0.666853            0.000836            0.677362   \n",
       "21                       0.661953            0.000953            0.672857   \n",
       "22                       0.657440            0.001092            0.668802   \n",
       "23                       0.653430            0.001072            0.665272   \n",
       "24                       0.649538            0.001146            0.661815   \n",
       "25                       0.645914            0.001234            0.658633   \n",
       "26                       0.642620            0.001281            0.655866   \n",
       "27                       0.639603            0.001166            0.653325   \n",
       "28                       0.636601            0.001225            0.650784   \n",
       "29                       0.633826            0.001380            0.648547   \n",
       "...                           ...                 ...                 ...   \n",
       "306                      0.476309            0.000678            0.583883   \n",
       "307                      0.476001            0.000700            0.583858   \n",
       "308                      0.475683            0.000704            0.583860   \n",
       "309                      0.475383            0.000662            0.583822   \n",
       "310                      0.475050            0.000612            0.583783   \n",
       "311                      0.474746            0.000648            0.583810   \n",
       "312                      0.474446            0.000623            0.583779   \n",
       "313                      0.474197            0.000691            0.583787   \n",
       "314                      0.473892            0.000671            0.583782   \n",
       "315                      0.473599            0.000670            0.583788   \n",
       "316                      0.473323            0.000747            0.583794   \n",
       "317                      0.473002            0.000783            0.583751   \n",
       "318                      0.472686            0.000793            0.583743   \n",
       "319                      0.472390            0.000764            0.583746   \n",
       "320                      0.472091            0.000790            0.583735   \n",
       "321                      0.471786            0.000736            0.583740   \n",
       "322                      0.471484            0.000754            0.583676   \n",
       "323                      0.471189            0.000766            0.583649   \n",
       "324                      0.470923            0.000745            0.583644   \n",
       "325                      0.470644            0.000728            0.583614   \n",
       "326                      0.470404            0.000734            0.583603   \n",
       "327                      0.470156            0.000739            0.583607   \n",
       "328                      0.469873            0.000701            0.583543   \n",
       "329                      0.469553            0.000705            0.583548   \n",
       "330                      0.469193            0.000703            0.583515   \n",
       "331                      0.468856            0.000690            0.583517   \n",
       "332                      0.468507            0.000694            0.583491   \n",
       "333                      0.468215            0.000739            0.583487   \n",
       "334                      0.467940            0.000747            0.583503   \n",
       "335                      0.467657            0.000777            0.583480   \n",
       "\n",
       "              test-mlogloss-std  \n",
       "n_estimators                     \n",
       "0                      0.000397  \n",
       "1                      0.000339  \n",
       "2                      0.000350  \n",
       "3                      0.000759  \n",
       "4                      0.000748  \n",
       "5                      0.000718  \n",
       "6                      0.000884  \n",
       "7                      0.001027  \n",
       "8                      0.001010  \n",
       "9                      0.001053  \n",
       "10                     0.000999  \n",
       "11                     0.001106  \n",
       "12                     0.001243  \n",
       "13                     0.001363  \n",
       "14                     0.001240  \n",
       "15                     0.001460  \n",
       "16                     0.001436  \n",
       "17                     0.001502  \n",
       "18                     0.001612  \n",
       "19                     0.001625  \n",
       "20                     0.001808  \n",
       "21                     0.001788  \n",
       "22                     0.001638  \n",
       "23                     0.001727  \n",
       "24                     0.001771  \n",
       "25                     0.001730  \n",
       "26                     0.001845  \n",
       "27                     0.001865  \n",
       "28                     0.001860  \n",
       "29                     0.001778  \n",
       "...                         ...  \n",
       "306                    0.002765  \n",
       "307                    0.002781  \n",
       "308                    0.002767  \n",
       "309                    0.002770  \n",
       "310                    0.002771  \n",
       "311                    0.002780  \n",
       "312                    0.002805  \n",
       "313                    0.002791  \n",
       "314                    0.002775  \n",
       "315                    0.002776  \n",
       "316                    0.002755  \n",
       "317                    0.002770  \n",
       "318                    0.002777  \n",
       "319                    0.002775  \n",
       "320                    0.002776  \n",
       "321                    0.002771  \n",
       "322                    0.002773  \n",
       "323                    0.002777  \n",
       "324                    0.002780  \n",
       "325                    0.002752  \n",
       "326                    0.002787  \n",
       "327                    0.002772  \n",
       "328                    0.002781  \n",
       "329                    0.002786  \n",
       "330                    0.002814  \n",
       "331                    0.002803  \n",
       "332                    0.002808  \n",
       "333                    0.002802  \n",
       "334                    0.002797  \n",
       "335                    0.002790  \n",
       "\n",
       "[336 rows x 4 columns]"
      ]
     },
     "execution_count": 36,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "cvresult = pd.DataFrame.from_csv('my_preds4_2_3_699.csv')\n",
    "cvresult"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 37,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.4936174813311426"
      ]
     },
     "execution_count": 37,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_predprob = xgb1_upd.predict_proba(X_train)\n",
    "logloss = log_loss(y_train, train_predprob)\n",
    "logloss"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 38,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "336"
      ]
     },
     "execution_count": 38,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "xgb1_upd.n_estimators"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 40,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 保存模型\n",
    "import pickle\n",
    "\n",
    "pickle.dump(xgb1_upd,open(\"xgb1_upd_model.pkl\",\"wb\"))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 45,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "0.4936174813311426"
      ]
     },
     "execution_count": 45,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# 验证保存模型是否成功\n",
    "import pickle\n",
    "from sklearn import metrics\n",
    "from sklearn.metrics import log_loss\n",
    "\n",
    "xgb_test=pickle.load(open(\"xgb1_upd_model.pkl\",\"rb\"))\n",
    "\n",
    "train_predprob = xgb_test.predict_proba(X_train)\n",
    "logloss = log_loss(y_train, train_predprob)\n",
    "logloss"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "与a)初步确定弱学习器数目 得到的logloss 比较知道 该xgb1_upd有收敛，最优分类器是就是 \n",
    "XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=0.7,\n",
    "       colsample_bytree=0.85, gamma=0, learning_rate=0.1, max_delta_step=0,\n",
    "       max_depth=5, min_child_weight=5, missing=None, n_estimators=336,\n",
    "       n_jobs=1, nthread=None, objective='multi:softprob', random_state=0,\n",
    "       reg_alpha=2, reg_lambda=0.0, scale_pos_weight=1, seed=3,\n",
    "       silent=True, subsample=0.85)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
