{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Rental Listing Inquiries\n",
    "========================\n",
    "模型拟合-use origin feature\n",
    "------------------------\n",
    "> Rental Listing Inquiries数据集是Kaggle平台上的一个分类竞赛任务，需要根据公寓的特征来预测其受欢迎程度（用户感兴趣程度分为高、中、低三类）。其中房屋的特征x共有14维，响应值y为用户对该公寓的感兴趣程度。评价标准为logloss"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "from xgboost import XGBClassifier\n",
    "import xgboost as xgb\n",
    "import matplotlib.pyplot as plt\n",
    "from sklearn.metrics import accuracy_score\n",
    "from sklearn.model_selection import GridSearchCV, StratifiedKFold\n",
    "import pandas as pd\n",
    "import multiprocessing\n",
    "import numpy as np\n",
    "from functools import reduce\n",
    "import time\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "1. 用xgboost库中的cv初步确定n_estimaters\n",
    "--------------------------------------"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "dpath = '.\\\\data\\\\'\n",
    "data_train_dmatrix = xgb.DMatrix(dpath + 'RentListingInquries_FE_train.bin')\n",
    "data_train_pd = pd.read_csv(dpath + 'RentListingInquries_FE_train.csv')\n",
    "data_test_dmatrix = xgb.DMatrix(dpath + 'RentListingInquries_FE_test.bin')\n",
    "data_test_pd = pd.read_csv(dpath + 'RentListingInquries_FE_test.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "49352"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "data_train_dmatrix.num_row()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "data_test_dmatrix.num_row()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>test-mlogloss-mean</th>\n",
       "      <th>test-mlogloss-std</th>\n",
       "      <th>train-mlogloss-mean</th>\n",
       "      <th>train-mlogloss-std</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.037356</td>\n",
       "      <td>0.000135</td>\n",
       "      <td>1.036054</td>\n",
       "      <td>0.000115</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.985894</td>\n",
       "      <td>0.000258</td>\n",
       "      <td>0.983379</td>\n",
       "      <td>0.000250</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.942203</td>\n",
       "      <td>0.000337</td>\n",
       "      <td>0.938533</td>\n",
       "      <td>0.000455</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.904744</td>\n",
       "      <td>0.000486</td>\n",
       "      <td>0.899911</td>\n",
       "      <td>0.000578</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.872476</td>\n",
       "      <td>0.000486</td>\n",
       "      <td>0.866475</td>\n",
       "      <td>0.000835</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>0.844465</td>\n",
       "      <td>0.000492</td>\n",
       "      <td>0.837385</td>\n",
       "      <td>0.000961</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>0.819919</td>\n",
       "      <td>0.000483</td>\n",
       "      <td>0.811849</td>\n",
       "      <td>0.001045</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>0.798460</td>\n",
       "      <td>0.000521</td>\n",
       "      <td>0.789216</td>\n",
       "      <td>0.001136</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>0.779551</td>\n",
       "      <td>0.000524</td>\n",
       "      <td>0.769261</td>\n",
       "      <td>0.001301</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>0.763002</td>\n",
       "      <td>0.000685</td>\n",
       "      <td>0.751600</td>\n",
       "      <td>0.001245</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>0.748170</td>\n",
       "      <td>0.000680</td>\n",
       "      <td>0.735757</td>\n",
       "      <td>0.001281</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>0.735044</td>\n",
       "      <td>0.000755</td>\n",
       "      <td>0.721494</td>\n",
       "      <td>0.001314</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>0.723287</td>\n",
       "      <td>0.000730</td>\n",
       "      <td>0.708769</td>\n",
       "      <td>0.001395</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>0.712855</td>\n",
       "      <td>0.000855</td>\n",
       "      <td>0.697212</td>\n",
       "      <td>0.001359</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>0.703515</td>\n",
       "      <td>0.000802</td>\n",
       "      <td>0.686809</td>\n",
       "      <td>0.001443</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>0.695209</td>\n",
       "      <td>0.000802</td>\n",
       "      <td>0.677419</td>\n",
       "      <td>0.001467</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>0.687667</td>\n",
       "      <td>0.000832</td>\n",
       "      <td>0.668852</td>\n",
       "      <td>0.001458</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>0.681004</td>\n",
       "      <td>0.000808</td>\n",
       "      <td>0.661043</td>\n",
       "      <td>0.001383</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>0.674957</td>\n",
       "      <td>0.000730</td>\n",
       "      <td>0.653981</td>\n",
       "      <td>0.001520</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>0.669496</td>\n",
       "      <td>0.000761</td>\n",
       "      <td>0.647483</td>\n",
       "      <td>0.001541</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20</th>\n",
       "      <td>0.664649</td>\n",
       "      <td>0.000672</td>\n",
       "      <td>0.641642</td>\n",
       "      <td>0.001628</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>21</th>\n",
       "      <td>0.660075</td>\n",
       "      <td>0.000721</td>\n",
       "      <td>0.636147</td>\n",
       "      <td>0.001640</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22</th>\n",
       "      <td>0.656106</td>\n",
       "      <td>0.000662</td>\n",
       "      <td>0.631181</td>\n",
       "      <td>0.001765</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>23</th>\n",
       "      <td>0.652255</td>\n",
       "      <td>0.000764</td>\n",
       "      <td>0.626487</td>\n",
       "      <td>0.001651</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24</th>\n",
       "      <td>0.648844</td>\n",
       "      <td>0.000808</td>\n",
       "      <td>0.622197</td>\n",
       "      <td>0.001742</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>0.645775</td>\n",
       "      <td>0.000893</td>\n",
       "      <td>0.618116</td>\n",
       "      <td>0.001716</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>26</th>\n",
       "      <td>0.642924</td>\n",
       "      <td>0.000952</td>\n",
       "      <td>0.614328</td>\n",
       "      <td>0.001738</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>27</th>\n",
       "      <td>0.640237</td>\n",
       "      <td>0.000977</td>\n",
       "      <td>0.610703</td>\n",
       "      <td>0.001831</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>0.637790</td>\n",
       "      <td>0.001081</td>\n",
       "      <td>0.607183</td>\n",
       "      <td>0.001697</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29</th>\n",
       "      <td>0.635566</td>\n",
       "      <td>0.001176</td>\n",
       "      <td>0.604032</td>\n",
       "      <td>0.001735</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>234</th>\n",
       "      <td>0.586599</td>\n",
       "      <td>0.002159</td>\n",
       "      <td>0.442165</td>\n",
       "      <td>0.001156</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>235</th>\n",
       "      <td>0.586569</td>\n",
       "      <td>0.002115</td>\n",
       "      <td>0.441768</td>\n",
       "      <td>0.001231</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>236</th>\n",
       "      <td>0.586576</td>\n",
       "      <td>0.002128</td>\n",
       "      <td>0.441256</td>\n",
       "      <td>0.001275</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>237</th>\n",
       "      <td>0.586565</td>\n",
       "      <td>0.002134</td>\n",
       "      <td>0.440691</td>\n",
       "      <td>0.001306</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>238</th>\n",
       "      <td>0.586563</td>\n",
       "      <td>0.002123</td>\n",
       "      <td>0.440189</td>\n",
       "      <td>0.001299</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>239</th>\n",
       "      <td>0.586565</td>\n",
       "      <td>0.002139</td>\n",
       "      <td>0.439815</td>\n",
       "      <td>0.001369</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>240</th>\n",
       "      <td>0.586559</td>\n",
       "      <td>0.002128</td>\n",
       "      <td>0.439534</td>\n",
       "      <td>0.001299</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>241</th>\n",
       "      <td>0.586541</td>\n",
       "      <td>0.002160</td>\n",
       "      <td>0.439125</td>\n",
       "      <td>0.001271</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>242</th>\n",
       "      <td>0.586560</td>\n",
       "      <td>0.002138</td>\n",
       "      <td>0.438867</td>\n",
       "      <td>0.001368</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>243</th>\n",
       "      <td>0.586553</td>\n",
       "      <td>0.002111</td>\n",
       "      <td>0.438501</td>\n",
       "      <td>0.001342</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>244</th>\n",
       "      <td>0.586527</td>\n",
       "      <td>0.002109</td>\n",
       "      <td>0.438013</td>\n",
       "      <td>0.001423</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>245</th>\n",
       "      <td>0.586530</td>\n",
       "      <td>0.002120</td>\n",
       "      <td>0.437637</td>\n",
       "      <td>0.001426</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>246</th>\n",
       "      <td>0.586540</td>\n",
       "      <td>0.002157</td>\n",
       "      <td>0.437163</td>\n",
       "      <td>0.001379</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>247</th>\n",
       "      <td>0.586531</td>\n",
       "      <td>0.002119</td>\n",
       "      <td>0.436917</td>\n",
       "      <td>0.001369</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>248</th>\n",
       "      <td>0.586557</td>\n",
       "      <td>0.002091</td>\n",
       "      <td>0.436441</td>\n",
       "      <td>0.001350</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>249</th>\n",
       "      <td>0.586551</td>\n",
       "      <td>0.002114</td>\n",
       "      <td>0.436194</td>\n",
       "      <td>0.001377</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>250</th>\n",
       "      <td>0.586547</td>\n",
       "      <td>0.002133</td>\n",
       "      <td>0.435772</td>\n",
       "      <td>0.001297</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>251</th>\n",
       "      <td>0.586524</td>\n",
       "      <td>0.002147</td>\n",
       "      <td>0.435444</td>\n",
       "      <td>0.001379</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>252</th>\n",
       "      <td>0.586468</td>\n",
       "      <td>0.002115</td>\n",
       "      <td>0.434996</td>\n",
       "      <td>0.001573</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>253</th>\n",
       "      <td>0.586487</td>\n",
       "      <td>0.002096</td>\n",
       "      <td>0.434628</td>\n",
       "      <td>0.001442</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>254</th>\n",
       "      <td>0.586507</td>\n",
       "      <td>0.002065</td>\n",
       "      <td>0.434194</td>\n",
       "      <td>0.001362</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>255</th>\n",
       "      <td>0.586487</td>\n",
       "      <td>0.002052</td>\n",
       "      <td>0.433752</td>\n",
       "      <td>0.001422</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>256</th>\n",
       "      <td>0.586472</td>\n",
       "      <td>0.002040</td>\n",
       "      <td>0.433280</td>\n",
       "      <td>0.001546</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>257</th>\n",
       "      <td>0.586456</td>\n",
       "      <td>0.002052</td>\n",
       "      <td>0.432652</td>\n",
       "      <td>0.001563</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>258</th>\n",
       "      <td>0.586450</td>\n",
       "      <td>0.002065</td>\n",
       "      <td>0.432207</td>\n",
       "      <td>0.001551</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>259</th>\n",
       "      <td>0.586464</td>\n",
       "      <td>0.002075</td>\n",
       "      <td>0.431823</td>\n",
       "      <td>0.001614</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>260</th>\n",
       "      <td>0.586436</td>\n",
       "      <td>0.002055</td>\n",
       "      <td>0.431405</td>\n",
       "      <td>0.001565</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>261</th>\n",
       "      <td>0.586438</td>\n",
       "      <td>0.002013</td>\n",
       "      <td>0.431157</td>\n",
       "      <td>0.001608</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>262</th>\n",
       "      <td>0.586434</td>\n",
       "      <td>0.001974</td>\n",
       "      <td>0.430689</td>\n",
       "      <td>0.001459</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>263</th>\n",
       "      <td>0.586434</td>\n",
       "      <td>0.001950</td>\n",
       "      <td>0.430315</td>\n",
       "      <td>0.001325</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>264 rows × 4 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "     test-mlogloss-mean  test-mlogloss-std  train-mlogloss-mean  \\\n",
       "0              1.037356           0.000135             1.036054   \n",
       "1              0.985894           0.000258             0.983379   \n",
       "2              0.942203           0.000337             0.938533   \n",
       "3              0.904744           0.000486             0.899911   \n",
       "4              0.872476           0.000486             0.866475   \n",
       "5              0.844465           0.000492             0.837385   \n",
       "6              0.819919           0.000483             0.811849   \n",
       "7              0.798460           0.000521             0.789216   \n",
       "8              0.779551           0.000524             0.769261   \n",
       "9              0.763002           0.000685             0.751600   \n",
       "10             0.748170           0.000680             0.735757   \n",
       "11             0.735044           0.000755             0.721494   \n",
       "12             0.723287           0.000730             0.708769   \n",
       "13             0.712855           0.000855             0.697212   \n",
       "14             0.703515           0.000802             0.686809   \n",
       "15             0.695209           0.000802             0.677419   \n",
       "16             0.687667           0.000832             0.668852   \n",
       "17             0.681004           0.000808             0.661043   \n",
       "18             0.674957           0.000730             0.653981   \n",
       "19             0.669496           0.000761             0.647483   \n",
       "20             0.664649           0.000672             0.641642   \n",
       "21             0.660075           0.000721             0.636147   \n",
       "22             0.656106           0.000662             0.631181   \n",
       "23             0.652255           0.000764             0.626487   \n",
       "24             0.648844           0.000808             0.622197   \n",
       "25             0.645775           0.000893             0.618116   \n",
       "26             0.642924           0.000952             0.614328   \n",
       "27             0.640237           0.000977             0.610703   \n",
       "28             0.637790           0.001081             0.607183   \n",
       "29             0.635566           0.001176             0.604032   \n",
       "..                  ...                ...                  ...   \n",
       "234            0.586599           0.002159             0.442165   \n",
       "235            0.586569           0.002115             0.441768   \n",
       "236            0.586576           0.002128             0.441256   \n",
       "237            0.586565           0.002134             0.440691   \n",
       "238            0.586563           0.002123             0.440189   \n",
       "239            0.586565           0.002139             0.439815   \n",
       "240            0.586559           0.002128             0.439534   \n",
       "241            0.586541           0.002160             0.439125   \n",
       "242            0.586560           0.002138             0.438867   \n",
       "243            0.586553           0.002111             0.438501   \n",
       "244            0.586527           0.002109             0.438013   \n",
       "245            0.586530           0.002120             0.437637   \n",
       "246            0.586540           0.002157             0.437163   \n",
       "247            0.586531           0.002119             0.436917   \n",
       "248            0.586557           0.002091             0.436441   \n",
       "249            0.586551           0.002114             0.436194   \n",
       "250            0.586547           0.002133             0.435772   \n",
       "251            0.586524           0.002147             0.435444   \n",
       "252            0.586468           0.002115             0.434996   \n",
       "253            0.586487           0.002096             0.434628   \n",
       "254            0.586507           0.002065             0.434194   \n",
       "255            0.586487           0.002052             0.433752   \n",
       "256            0.586472           0.002040             0.433280   \n",
       "257            0.586456           0.002052             0.432652   \n",
       "258            0.586450           0.002065             0.432207   \n",
       "259            0.586464           0.002075             0.431823   \n",
       "260            0.586436           0.002055             0.431405   \n",
       "261            0.586438           0.002013             0.431157   \n",
       "262            0.586434           0.001974             0.430689   \n",
       "263            0.586434           0.001950             0.430315   \n",
       "\n",
       "     train-mlogloss-std  \n",
       "0              0.000115  \n",
       "1              0.000250  \n",
       "2              0.000455  \n",
       "3              0.000578  \n",
       "4              0.000835  \n",
       "5              0.000961  \n",
       "6              0.001045  \n",
       "7              0.001136  \n",
       "8              0.001301  \n",
       "9              0.001245  \n",
       "10             0.001281  \n",
       "11             0.001314  \n",
       "12             0.001395  \n",
       "13             0.001359  \n",
       "14             0.001443  \n",
       "15             0.001467  \n",
       "16             0.001458  \n",
       "17             0.001383  \n",
       "18             0.001520  \n",
       "19             0.001541  \n",
       "20             0.001628  \n",
       "21             0.001640  \n",
       "22             0.001765  \n",
       "23             0.001651  \n",
       "24             0.001742  \n",
       "25             0.001716  \n",
       "26             0.001738  \n",
       "27             0.001831  \n",
       "28             0.001697  \n",
       "29             0.001735  \n",
       "..                  ...  \n",
       "234            0.001156  \n",
       "235            0.001231  \n",
       "236            0.001275  \n",
       "237            0.001306  \n",
       "238            0.001299  \n",
       "239            0.001369  \n",
       "240            0.001299  \n",
       "241            0.001271  \n",
       "242            0.001368  \n",
       "243            0.001342  \n",
       "244            0.001423  \n",
       "245            0.001426  \n",
       "246            0.001379  \n",
       "247            0.001369  \n",
       "248            0.001350  \n",
       "249            0.001377  \n",
       "250            0.001297  \n",
       "251            0.001379  \n",
       "252            0.001573  \n",
       "253            0.001442  \n",
       "254            0.001362  \n",
       "255            0.001422  \n",
       "256            0.001546  \n",
       "257            0.001563  \n",
       "258            0.001551  \n",
       "259            0.001614  \n",
       "260            0.001565  \n",
       "261            0.001608  \n",
       "262            0.001459  \n",
       "263            0.001325  \n",
       "\n",
       "[264 rows x 4 columns]"
      ]
     },
     "execution_count": 5,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "param = {\n",
    "    'silent': 0, \n",
    "    'eta': 0.1,\n",
    "    'objective': 'multi:softmax',\n",
    "    'eval_metric': 'mlogloss',\n",
    "    'num_class': 3\n",
    "}\n",
    "kfold = StratifiedKFold(n_splits=5, shuffle=True, random_state=3)\n",
    "cv_result = xgb.cv(param, data_train_dmatrix, folds =kfold, num_boost_round = 2000, early_stopping_rounds = 100, metrics='mlogloss')\n",
    "cv_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAEXCAYAAABCjVgAAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvNQv5yAAAIABJREFUeJzt3XmYXGWZ9/HvXVW9r1k6a4ckkACGISKGTVFAZGRxWJUhiCO4oL4yjo7KwMjlMIwoOi6jwjiiA7hBXgR1EBlQdvUFJAHCaiCEQJpsnZCkO530Wvf7x3m6U+lUdVcnXX26u36f6yqoOufUqfvU6dSvnuc5dY65OyIiIgCJuAsQEZHRQ6EgIiJ9FAoiItJHoSAiIn0UCiIi0kehICIifRQKIhnM7J/N7Edx1yESF4XCGGNm1Wa22szOz5hWY2avmdn7MqYtMrM7zWyLmW01s+fN7GozmxDmX2hmPWa2PdxWmdknC1z78WbWVMjXGIps9bj7V9z9owV6vdVm9u5CrLsQRmp/jbX3ZbxTKIwx7r4duBj4jpk1hMlfB5a6+20AZvY24EHgT8DB7l4PnAx0A2/OWN0j7l7t7tXA+4Cvm9lbRmZLZCjMLBV3DVIk3F23MXgDbgJuAY4HNgPTM+b9EfjeIM+/EPhjv2l/Bs7PeHw68BywlShk3pQx701h2tawzOkZ804FngdagdeBzwNVwE4gDWwPtxk5tus64Lfh+Y8BB+TxfhwM/B54A1gBnLs39QBXAj8Lz5sDOHARsAbYAnwCOAJ4Omz7tRmvcwBwf9gfm4CfA/Vh3k/Da+0Mr3VpHu/xauCfwmt1AKnw+PWwLSuAE7O8F0cD64FkxrSzgKfD/SOBpUALsAH4Vo739HigKce8OuAnQDPwKnAFkAjzksA3w3vwCnBJeB9TOda1Gnh3jnkfA1aG/XpH798MYMC3gY3AtvAe/VWu/R33v9exdIu9AN32csfBBGBd+Id3Ucb0KqAHOH6Q519IRiiED7qtwIHh8YFAG3ASUAJcGv5xlobHK4F/Do/fFf4BHhSeuw54R0adh4f7OT9kMuq4KXwAHBk+BH8OLBnkOVVEH9oXheccHt6XQ4ZaD9lD4b+AcuCvgXbg18AUYGb4UDouLD8vvF9lQAPwMPAfGeve7cNvoPc4Y/mngFlABXBQ2M4ZGfVlDUzgZeCkjMe/AC4L9x8BPhjuVwNH51hHzv1FFAj/A9SEOl4EPhLmfYLoQ7kxvN/3shehEP6uNoX9WQZ8D3g4zHsPsAyoJwqINxG+GOXa37rld1P30Rjl7luIvmFWAr/MmDWBqFtwfe8EM/t6GFdoM7MrMpY9OkzfTtRK+CnwUpj3t8Bv3f337t4FfIPog+ltRN9Eq4Fr3L3T3e8H7gQWh+d2AQvMrNbdt7j7E0PcvF+6+5/dvZsoFA4bZPn3Aqvd/UZ37w6vdztRl9hw1PNv7t7u7r8j+hC/xd03uvvrwB+AtwC4+8rwfnW4ezPwLeC4AdY70Hvc67vuvsbddxKFfVnYlhJ3X+3uL+dY9y2E/WFmNUTfnm/JeD/mmdlkd9/u7o8O5c0ws2So/XJ3b3X31UQtgw+GRc4FvuPuTeHv9JqhrD/DB4Ab3P0Jd+8ALgeOMbM5YRtqiFqI5u4vuPu6jO3bl/1d1BQKY5SZXUD0De1e4GsZs7YQdVNM753g7pd6NK7wK6Jv0r0edfd6j8YUpgGHAF8J82YQdQv0riNN9C11Zpi3Jkzr9WqYB3AO0YfQq2b2kJkdM8TNW59xfwdRAA1kNnBUCLitZraV6ANl2jDVsyHj/s4sj6sBzGyKmS0xs9fNrAX4GTB5gPUO9B73WpMxfyXwGaLWzMbwWjNyrPtm4GwzKwPOBp5w997X+ghRK+UvZva4mb13gBqzmUzUQnw1Y1rm/p+RWXe/+0PR//3ZTtQ1NzN8EbmWqKtxg5ldb2a1YdF93d9FTaEwBpnZFKL+1I8BHwfONbN3Arh7G1E//NlDWae7byD6dv03YdJaog/b3tc0om6M18O8WWaW+fezX5iHuz/u7mcQdbH8Gri192WGUtMQrAEeCgHXe6t290+OcD1fDetc6O61wAVEXRu9+r/eQO9x1ue4+83ufmx4nrP7F4LM5Z4n+kA9BTifKCR6573k7ouJ3o+vAbeZWVX+m8kmom/jszOm9e1/ou6bxox5s4aw7kz9358qYBK7/s6+6+5vJfoycyDwhTA91/6WPCgUxqZrgV+7+wOhyXwp8MPwrZDw+MNmdlkIEMysEZiba4VmNoloMPK5MOlW4DQzO9HMSoDPEQ12/j+i0GkDLjWzEjM7nihMlphZqZl9wMzqQpdIC1G3B0TfsCeZWd0wvQ+97gQONLMPhnpKzOwIM3vTCNdTQzSIvNXMZhI+pDJsAPbPeDzQe7wHMzvIzN4V9nM7USulJ9uywc3Ap4F3Eo0p9K7nAjNrCC2TrWFyzvWYWXnmjagleitwdTgcejbwj0Qto97t+gczm2lm9USD44Mp6fc6qVD/RWZ2WNjmrwCPufvqsH+PCu9bW3g/egbZ35KPuAc1dBvaDTiT6BtUfb/p9wFXZzw+CriL6B/9VuBZ4GpgUph/IdE/lt4jbzYS9TlPyVjHWUQDhtuAhwgDt2HeIWHatrDMWWF6KXA3UTdWC/A4cGzG824g6gLYSu6jj76c8fh4BhmcDssdRHTEUnNY//1EYxFDqofsA82pjOWbyBjEJ/ogvCLjPVkW3s+niD7kmzKWPQN4LbzW5/N4j1ez+8D0QqKxn1aiwfg7s72HGcvvR/QB/tt+038W9vd2oi8BZ+Z4/vFh+/vf5hGNXf0svN9rgC+x6+ijFFFLdjPR0UefJWpZWI7XWZ3lNb4c5n2CaNC8d3sbw/QTiY442s6uI72qB9vfug1+s/AGi4gUhJmdAvyXu88edGGJnbqPRGRYmVmFmZ1qZqnQjfYvRAc5yBigloKMCWb2DuB/s83z6OgpGSXMrJKoK+xgonGP3wL/4O4tsRYmeVEoiIhIH3UfiYhInzF3kq3Jkyf7nDlz4i5DRGRMWbZs2SZ3bxhsuTEXCnPmzGHp0qVxlyEiMqaY2auDL6XuIxERyaBQEBGRPgoFERHpo1AQEZE+CgUREemjUBARkT4KBRER6VM0obByYyu3L2sindZpPUREcimaULjvhY187hfL2dGl622IiORSNKFQVRb9eLutozvmSkRERq+iCYXqEArbFQoiIjkVXSiopSAiklvRhEKVWgoiIoMqmlDo6z5qVyiIiORSNKFQVZYEoK1ToSAikkvRhEJ1eW/3kQ5JFRHJpXhCQQPNIiKDKppQqChJkjCFgojIQIomFMyMqtIUrRpoFhHJqWhCAaLDUtVSEBHJrahCobo8paOPREQGULBQMLMbzGyjmT2bY76Z2XfNbKWZPW1mhxeqll5VZSkdfSQiMoBCthRuAk4eYP4pwPxwuxj4fgFrAaC6LKnuIxGRARQsFNz9YeCNARY5A/iJRx4F6s1seqHqAagqTekXzSIiA4hzTGEmsCbjcVOYVjDV5Smd+0hEZABxhoJlmZb1smhmdrGZLTWzpc3NzXv9gtVlGmgWERlInKHQBMzKeNwIrM22oLtf7+6L3H1RQ0PDXr+gDkkVERlYnKFwB/B34Siko4Ft7r6ukC9YXZaiq8fp6NYRSCIi2aQKtWIzuwU4HphsZk3AvwAlAO7+X8BdwKnASmAHcFGhaum16/xHPZSlkoV+ORGRMadgoeDuiweZ78CnCvX62VRlXFNhYlXpSL60iMiYUFy/aA7XVNARSCIi2RVVKNSUlwDQ2t4VcyUiIqNTkYVC1H2kM6WKiGRXVKFQG1oKLWopiIhkVVShoJaCiMjAiiwUQkthp1oKIiLZFFUolKYSlJckaNXRRyIiWRVVKEA0rqCWgohIdkUXCjXluk6ziEguRRcKtRUlOvpIRCSH4guF8hJa1FIQEcmq6EKhpjxFq8YURESyKrpQiLqP1FIQEcmm6EKhpjylMQURkRyKLhRqy0vo7E7T3qUL7YiI9FeEoaBTXYiI5FJ8oVChk+KJiORSdKGgk+KJiORWdKFQq5PiiYjkVHShUKNrKoiI5FR0oVDXO6awU91HIiL9FV0o1FdGobB1Z2fMlYiIjD5FFwrlJUnKSxJs26HuIxGR/oouFADqK0rZskMtBRGR/oozFCpL2KqWgojIHooyFOoqStiqQ1JFRPZQlKFQX1miMQURkSyKMxQqSnX0kYhIFsUZChpTEBHJqihDoa6yhA6dPltEZA9FGQr1FaUAai2IiPRTlKEwIfyqWb9VEBHZXVGGQl3vqS7UUhAR2U1RhkJv99E2HYEkIrKb4gwFtRRERLIqaCiY2clmtsLMVprZZVnmzzaz+8zsaTN70MwaC1lPr11nSlUoiIhkKlgomFkSuA44BVgALDazBf0W+wbwE3dfCFwFfLVQ9WSqKElSmkqwpU3dRyIimQrZUjgSWOnuq9y9E1gCnNFvmQXAfeH+A1nmF4SZMbGylM0KBRGR3RQyFGYCazIeN4VpmZYD54T7ZwE1Zjap/4rM7GIzW2pmS5ubm4eluIlVpbyhUBAR2U0hQ8GyTPN+jz8PHGdmTwLHAa8De1wn092vd/dF7r6ooaFhWIqbVK2WgohIf6kCrrsJmJXxuBFYm7mAu68FzgYws2rgHHffVsCa+kyqKmX15raReCkRkTGjkC2Fx4H5ZjbXzEqB84A7Mhcws8lm1lvD5cANBaxnNxOrynhju1oKIiKZChYK7t4NXALcA7wA3Oruz5nZVWZ2eljseGCFmb0ITAWuLlQ9/U2qLqWts0cnxRMRyVDI7iPc/S7grn7TvpRx/zbgtkLWkMvEquhXzW+0dTKjviKOEkRERp2i/EUz7B4KIiISKdpQmBRCQUcgiYjsUrShsKul0BFzJSIio0fRhsKkqjIANusIJBGRPkUbCrUVKVIJ05iCiEiGog0FM2OCTnUhIrKbog0FgLaObu5+bn3cZYiIjBoF/Z3CaPfW2RNoad/jVEsiIkWrqFsKU2rK2dSqo49ERHoVdSg01JTR3NqBe/+Tt4qIFKeiD4XOnjTbdFlOERFAoQBAs7qQRESAYg+FaoWCiEimQUPBzA4ws7Jw/3gz+7SZ1Re+tMKbUhtCYbtCQUQE8msp3A70mNk84L+BucDNBa1qhPR2H21sUSiIiEB+oZAOF8w5C/gPd/8sML2wZY2MmrIUZamEWgoiIkE+odBlZouBDwF3hmklhStp5JhZ32GpIiKSXyhcBBwDXO3ur5jZXOBnhS1r5CgURER2GfQ0F+7+PPBpADObANS4+zWFLmykrN7URntXOu4yRERGhXyOPnrQzGrNbCKwHLjRzL5V+NJGxulvnkFJ0uIuQ0RkVMin+6jO3VuAs4Eb3f2twLsLW9bImVZXQUt7N20dOjGeiEg+oZAys+nAuewaaB43ptVFh6Wub2mPuRIRkfjlEwpXAfcAL7v742a2P/BSYcsaOdNqKwDYsE2hICKSz0DzL4BfZDxeBZxTyKJG0vS6cgDWKRRERPIaaG40s1+Z2UYz22Bmt5tZ40gUNxKmhVBQ95GISH7dRzcCdwAzgJnAb8K0caG8JEl9ZQnr1VIQEckrFBrc/UZ37w63m4CGAtc1oqbVlqv7SESE/EJhk5ldYGbJcLsA2FzowkbStLpy1rfsjLsMEZHY5RMKHyY6HHU9sA54H9GpL8aN6XXlrN+mU12IiAwaCu7+mruf7u4N7j7F3c8k+iHbuDG9roJN2zto7+qJuxQRkVjt7ZXX/nFYq4jZzProtwoaVxCRYre3oTCuThbUOCEKhaYtO2KuREQkXnsbCj6sVcSscWIlAK9v0WCziBS3nL9oNrNWsn/4G1BRsIpiMLWmjGTCaFIoiEiRyxkK7l4zkoXEKZVMML2uXN1HIlL09rb7KC9mdrKZrTCzlWZ2WZb5+5nZA2b2pJk9bWanFrKegTROqOD1rWopiEhxK1gomFkSuA44BVgALDazBf0WuwK41d3fApwH/Geh6hnMzPpKdR+JSNErZEvhSGClu69y905gCXBGv2UcqA3364C1BaxnQI0TKljf0k5nty7NKSLFq5ChMBNYk/G4KUzLdCVwgZk1AXcBf59tRWZ2sZktNbOlzc3NhaiV3yxfizus26bWgogUr3xOnd1qZi39bmvC6bT3H+ipWab1P5ppMXCTuzcCpwI/NbM9anL36919kbsvamgozLn4rjlnIQCvbtZgs4gUr0EvsgN8i6hb52aiD/rzgGnACuAG4Pgcz2sCZmU8bmTP7qGPACcDuPsjZlYOTAY25lf+8JkzKfqtwqub2xhnJ4EVEclbPt1HJ7v7D9y91d1b3P164FR3/7/AhAGe9zgw38zmmlkpUZjc0W+Z14ATAczsTUA5UJj+oUE01JRRUZJktVoKIlLE8gmFtJmda2aJcDs3Y17OXza7ezdwCdH1nV8gOsroOTO7ysxOD4t9DviYmS0HbgEudPdYfi1tZsyeVBlaCiIixSmf7qMPAN9h1+GijxANDlcQfejn5O53EQ0gZ077Usb954G3D6XgQpo9qZKXmxUKIlK8Bg0Fd18F/E2O2X8c3nLiNWdSFQ/8pZmetJNMjKtz/omI5CWfo48aw5FGG81sg5ndbmaNI1HcSJs9qYrOnjTrW3QKbREpTvmMKdxINEA8g+h3Br8J08ad3iOQVm9SF5KIFKd8QqHB3W909+5wu4lxeszm3IYqAFY1b4+5EhGReOQTCpvM7AIzS4bbBcDmQhcWh2m15VSXpVi5UaEgIsUpn1D4MHAusB5YB7wPuKiQRcXFzDigoYqVaimISJEaNBTc/TV3P93dG9x9irufCZw9ArXF4oAp1WopiEjR2tsT4v3jsFYxisybUs2Glg5a2rviLkVEZMTtbSiM24P45zVUA/CyWgsiUoT2NhRiORXFSJg3JQoFdSGJSDHK+YtmM2sl+4e/ARUFqyhm+02spDSV4MUNrXGXIiIy4nKGgrvXjGQho0UqmeDAqdX8Zb1CQUSKTyGvvDZmHTytVqEgIkVJoZDFwdNqaG7tYNP2jrhLEREZUQqFLA6eVgvACrUWRKTIKBSyOHh6NJyiLiQRKTYKhSwmV5dRkjS+/+DKuEsRERlRCoUc3j5vMpOry+IuQ0RkRCkUclg4s44XN7Sys7Mn7lJEREaMQiGHQxvrSTs8v25b3KWIiIwYhUIOCxvrAHi6SaEgIsVDoZDD1NpyGmrKeEahICJFRKEwgK6eNHc9uy7uMkRERoxCYQAfe8f+tHel2dLWGXcpIiIjQqEwgLfOngDAk2u2xFyJiMjIUCgMYGFjHcmEsexVhYKIFAeFwgAqS1MsmF6rUBCRopHzegoS2djaTvPaDjq705SmlKEiMr7pU24Q/3r6IaQdljdtjbsUEZGCUygM4qi5kzCDR17eHHcpIiIFp1AYxISqUt40rVahICJFQaGQh2MOmMSy17bQ3qWT44nI+KZQyMOx8ybT2Z3mz6+8EXcpIiIFpVDIw9H7T6I0leChF5vjLkVEpKAKGgpmdrKZrTCzlWZ2WZb53zazp8LtRTMblYf4VJQmOWruRB5csTHuUkRECqpgoWBmSeA64BRgAbDYzBZkLuPun3X3w9z9MOB7wC8LVc++Ov6gKbzc3MaaN3bEXYqISMEUsqVwJLDS3Ve5eyewBDhjgOUXA7cUsJ59cuLBUwC457n1MVciIlI4hQyFmcCajMdNYdoezGw2MBe4v4D17JM5k6tYML2W3z6jU2mLyPhVyFCwLNM8x7LnAbe5e9ZjPs3sYjNbamZLm5vjG+w9beF0nnxtK69v3RlbDSIihVTIUGgCZmU8bgTW5lj2PAboOnL36919kbsvamhoGMYSh+a0Q6cDsPj6R2KrQUSkkAoZCo8D881srpmVEn3w39F/ITM7CJgAjPpP2jmTqzhkRi0Tq8riLkVEpCAKFgru3g1cAtwDvADc6u7PmdlVZnZ6xqKLgSXunqtraVQ5beF0nlqzVUchici4VNDfKbj7Xe5+oLsf4O5Xh2lfcvc7Mpa50t33+A3DaNXbhXSXBpxFZBzSL5qHaPakKg7fr54lj68hnR4TjRsRkbwpFPbCB4+ZzSub2vjjyk1xlyIiMqwUCnvh1EOnk0oYn17yZNyliIgMK4XCXihLJfn4cfvTsrOLpi0acBaR8UOhsJfOP2o2AD9/7LWYKxERGT4Khb00s76CuooSfvjwKto6uuMuR0RkWCgU9sF/X3gE3WnnZrUWRGScUCjsg8P3m8Cx8ybzg4dXsaNTrQURGfsUCvvosyfNZ9P2Dq5/eFXcpYiI7DOFwj566+yJnLZwOj94aBXrt7XHXY6IyD5RKAyDy04+mJ608+/3rIi7FBGRfaJQGAazJlZy0bFzuP2JJpavGZWXmRYRyYtCYZh86oR5TK0t4wu3Lae9K+u1gkRERj2FwjCpLS/hmnMW8uKG7ZzwjQfjLkdEZK8oFIbRCQdNYfGRs9jQ0s6yV9+IuxwRkSFTKAyzL562gJJkgsXXP0Zza0fc5YiIDIlCYZhVl6W4/ZNvI5GAT/5sGZ3d6bhLEhHJm0KhAP5qZh3feP+bWfrqFq749TOMkSuNiogoFArlvQtnMLO+nFuXNnHN3X9RMIjImKBQKKA//tO7mFJTxg8eWsV371sZdzkiIoNSKBSQmfHo5SdyzuGNfPveF3nbV+9Ti0FERjWFQoElEsbX37eQxUfOYu22dhZ9+V4NPovIqKVQGAHJhPGVsw7lC+85iM1tnRz+b79n7dadcZclIrIHhcIIMTM+dcI8rj3/Lezo7OadX3+Ae5/fEHdZIiK7USiMsPcunMF9nzue0lSCj/5kKZfd/jSt7V1xlyUiAigUYjF3chVPfukkpteVs+TxNbz1y/dyz3PrNQgtIrFTKMSkLJXkkctPZMH0GpJmfPyny3jzv/6Ov6xvibs0ESliNta+nS5atMiXLl0adxnDqrsnzc1/fo2rfvM83WlncnUpP/7wkRwyoy7u0kRknDCzZe6+aLDl1FIYBVLJBH93zByWXvFuptWWsWl7J6d9948ceuU93P3senrSYyu4RWTsUkthFNq2o4u/ufYPrHljJw6UpRJMrS3j1586lolVpXGXJyJjUL4tBYXCKNbdk+beFzbwhdueprW9G4D6ihL+5fQFnLRgGtVlqZgrFJGxQqEwzrywroUP3/Q467e107vHJlaV8pWzDuWdB06mslQBISK5KRTGqXTaeeK1Lfyfnz/BG22ddIfxhmTCmFlfzg0XHsEBDdWYWcyVishoolAoAt09aR575Q0+d+tyNra20zsebcCk6lIufc/BHL3/JGZNrFBIiBQ5hUIRatqygwt+9BhNW3bSk/a+biYj6mr69InzmT+1mgOn1jC5uizOUkVkhI2KUDCzk4HvAEngR+5+TZZlzgWuBBxY7u7nD7ROhUJ+3J2VG7fz6KrNfPvel9jS1knmnk4ljIrSJBUlSS551zzmT6nhwKnVTFJYiIxLsYeCmSWBF4GTgCbgcWCxuz+fscx84FbgXe6+xcymuPvGgdarUNg77s7G1g5e3NDKF3/1LDs7e9jZ1cP2ju7dljOgujzFGYfNYNaESmbUVzBzQgUz6ytoqC4jkVA3lMhYlG8oFPKQlSOBle6+KhS0BDgDeD5jmY8B17n7FoDBAkH2npkxtbacqbXlPHzpCX3T3Z0NLVFYXPHrZ1m3bSfucMuf12T90ZwZVJelOGnBVBrrK5ix261cR0GJjHGF/Bc8E1iT8bgJOKrfMgcCmNmfiLqYrnT3u/uvyMwuBi4G2G+//QpSbLEyM6bVlTOtbvewAGht72Lt1nbWbt1J09ad/OcDK2lu7cAd7ly+js6ePS8WlDAoTSbo6kljZpjB1JpyPn7c/tRVllJXUcLEylImVJUwqaqMitLkSG2qiOShkN1H7wfe4+4fDY8/CBzp7n+fscydQBdwLtAI/AH4K3ffmmu96j4aPbp70qxvae8Ljm/+bgVdPWk6e5xtO6IxjHzO0GHsaoGUJBO0tHdhZsyoK+cL7zmY+soSqstSVJenqAn/ryhJ6ogqkSEYDd1HTcCsjMeNwNosyzzq7l3AK2a2AphPNP4go1wqmaBxQiWNEyoBOPMtM7Mu19mdZtvOLrbt7GLrjk627OhiS1sn37v/Jda3RD/Gc4e0O9s7uunqccBZvXkHn7r5iQFr6A2UytIUyQS0dfRgBpOqSjn3iP2oLktSXVayW6BUlaaoKU9RXZaiqixFaUqnABPpVciWQopooPlE4HWiD/rz3f25jGVOJhp8/pCZTQaeBA5z98251quWQnHo6kmzZUcnW9q62LKjk7aObrb33tq7+fEjq6OuLACHytIkPe7s7OzBHYbyV20WrQOLQiaaFt2rLU+RMKMlXAjJwhOm1pTx0XfsT3lJgvKS6Ciu3qO5ysP98pIkpckEpckEJSmjJJkglTC1cCQWsR99FIo4FfgPovGCG9z9ajO7Cljq7ndY9K/jm8DJQA9wtbsvGWidCgXJVzrt7OjqYXt7N9s7umhtj0KlraOb1vZurr1/JT1pp8edTdujsRIIgeIeTkaYJO1OZ3d6SEEzmIRBwmzXYH5GIFWUJklg7Ojq6Z3Vp76yBAO27tz9an29yzTUlGFmbGzt2O15M+rK+eQJ80gljGQiCqhkwkgljFQIq4RF85IJ+u4nzEgljaQZiUT0/2Qiur/bc8xIJNj1nPA6CsDRY1SEQiEoFCRuPWmnvauH9q7osN72rnTf/d5DfXvnd3ZHYyxdPWm6utMsefw1mls7mFhViju8saOTzH+CTjS24u60dXTvFkRONIjvHrWkeqeNdhb+k7mdvVGRShqG0ZVO7zYdoLwkiQE7Qzj2X2dFaQoDdnR27zG/Mpwsckfnns+tCgc3tGWZB1BdFs3f3rHn/N6TUPY/lBugJsxrzTKv91Bvg76TW+723DCvJcs8gLqKEiAK/bs/886sywxmNIwpiIxLyYRRFcYjhurvT5xfgIp2SYeWT3eP051O05N2unqcnnT2x9FyTndPmh530mnC/72vFdUT7qf3uA89YZ03/Gk1G1radw+pkAKTq8twYNP2jj0BfOsIAAAG/klEQVTqra8oxXG27tjzOuWlqQQ4tHfveZQbRK0tIPT/7f6aOR+z6+CHXF+Ie88GkG1+d28YZ5nXNcA8Jxpbi15/z/ntXbnnOdAWgqa+siRrzcNJoSAyjiQSRgKjJAlRr+3IuPDtc0fstaSwdNiFiIj0USiIiEgfhYKIiPRRKIiISB+FgoiI9FEoiIhIH4WCiIj0USiIiEgfhYKIiPQZc+c+MrNm4NW9fPpkYNMwljNaaTvHF23n+BLXds5294bBFhpzobAvzGxpPieEGuu0neOLtnN8Ge3bqe4jERHpo1AQEZE+xRYK18ddwAjRdo4v2s7xZVRvZ1GNKYiIyMCKraUgIiIDUCiIiEifogkFMzvZzFaY2UozuyzueoaTma02s2fM7CkzWxqmTTSz35vZS+H/E+Kuc6jM7AYz22hmz2ZMy7pdFvlu2L9Pm9nh8VU+NDm280ozez3s06fM7NSMeZeH7VxhZu+Jp+qhMbNZZvaAmb1gZs+Z2T+E6eNqfw6wnWNnf7r7uL8RXZfwZWB/oBRYDiyIu65h3L7VwOR+074OXBbuXwZ8Le4692K73gkcDjw72HYBpwL/S3SN9KOBx+Kufx+380rg81mWXRD+fsuAueHvOhn3NuSxjdOBw8P9GuDFsC3jan8OsJ1jZn8WS0vhSGClu69y905gCXBGzDUV2hnAj8P9HwNnxljLXnH3h4E3+k3OtV1nAD/xyKNAvZlNH5lK902O7czlDGCJu3e4+yvASqK/71HN3de5+xPhfivwAjCTcbY/B9jOXEbd/iyWUJgJrMl43MTAO2qsceB3ZrbMzC4O06a6+zqI/lCBKbFVN7xybdd43MeXhK6TGzK6/8b8dprZHOAtwGOM4/3ZbzthjOzPYgkFyzJtPB2L+3Z3Pxw4BfiUmb0z7oJiMN728feBA4DDgHXAN8P0Mb2dZlYN3A58xt1bBlo0y7SxvJ1jZn8WSyg0AbMyHjcCa2OqZdi5+9rw/43Ar4ianxt6m9vh/xvjq3BY5dqucbWP3X2Du/e4exr4Ibu6FMbsdppZCdEH5c/d/Zdh8rjbn9m2cyztz2IJhceB+WY218xKgfOAO2KuaViYWZWZ1fTeB/4aeJZo+z4UFvsQ8D/xVDjscm3XHcDfhaNWjga29XZLjEX9+s/PItqnEG3neWZWZmZzgfnAn0e6vqEyMwP+G3jB3b+VMWtc7c9c2zmm9mfco/UjdSM6muFFotH9L8ZdzzBu1/5ERy8sB57r3TZgEnAf8FL4/8S4a92LbbuFqKndRfSN6iO5touoGX5d2L/PAIvirn8ft/OnYTueJvrgmJ6x/BfDdq4ATom7/jy38ViibpGngafC7dTxtj8H2M4xsz91mgsREelTLN1HIiKSB4WCiIj0USiIiEgfhYKIiPRRKIiISB+FgoiI9FEoiOTBzA7rd7rj04frFOxm9hkzqxyOdYnsK/1OQSQPZnYh0Q+oLinAuleHdW8awnOS7t4z3LWIqKUg44qZzQkXOPlhuMjJ78ysIseyB5jZ3eHssn8ws4PD9Peb2bNmttzMHg6nRrkK+NtwgZS/NbMLzezasPxNZvb9cHGVVWZ2XDgT5gtmdlPG633fzJaGuv41TPs0MAN4wMweCNMWW3TRpGfN7GsZz99uZleZ2WPAMWZ2jZk9H868+Y3CvKNSdOL+SbVuug3nDZgDdAOHhce3AhfkWPY+YH64fxRwf7j/DDAz3K8P/78QuDbjuX2PgZuIrtFhROfHbwEOJfrStSyjlt5TOCSBB4GF4fFqwkWSiALiNaABSAH3A2eGeQ6c27suotMiWGaduum2rze1FGQ8esXdnwr3lxEFxW7CqY3fBvzCzJ4CfkB01SyAPwE3mdnHiD7A8/Ebd3eiQNng7s94dEbM5zJe/1wzewJ4EjiE6Kpb/R0BPOjuze7eDfyc6MpsAD1EZ9+EKHjagR+Z2dnAjjzrFBlQKu4CRAqgI+N+D5Ct+ygBbHX3w/rPcPdPmNlRwGnAU2a2xzIDvGa63+ungVQ4A+bngSPcfUvoVirPsp5s59fv1e5hHMHdu83sSOBEorP+XgK8K486RQakloIUJY8ufPKKmb0f+i4U/+Zw/wB3f8zdvwRsIjrffSvRNXf3Vi3QBmwzs6lEF0Tqlbnux4DjzGyymSWBxcBD/VcWWjp17n4X8Bmii7eI7DO1FKSYfQD4vpldAZQQjQssB/7dzOYTfWu/L0x7DbgsdDV9dagv5O7LzexJou6kVURdVL2uB/7XzNa5+wlmdjnwQHj9u9w927UwaoD/MbPysNxnh1qTSDY6JFVERPqo+0hERPqo+0jGPTO7Dnh7v8nfcfcb46hHZDRT95GIiPRR95GIiPRRKIiISB+FgoiI9FEoiIhIn/8PxodSGnpGXPgAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x22497cff5c0>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "# plot\n",
    "test_means = cv_result['test-mlogloss-mean']\n",
    "test_stds = cv_result['test-mlogloss-std'] \n",
    "\n",
    "x_axis = range(0, cv_result.shape[0])\n",
    "        \n",
    "plt.errorbar(x_axis, test_means, yerr=test_stds ,label='Test')\n",
    "\n",
    "plt.title(\"XGBoost n_estimators vs Log Loss\")\n",
    "plt.xlabel( 'n_estimators' )\n",
    "plt.ylabel( 'Log Loss' )\n",
    "\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Best n_estimators: 264\n"
     ]
    }
   ],
   "source": [
    "#最佳参数n_estimators\n",
    "n_estimators = cv_result.shape[0]\n",
    "print(\"Best n_estimators:\", n_estimators)\n",
    "    \n",
    "# 采用交叉验证得到的最佳参数n_estimators，训练模型\n",
    "bst = xgb.train(param, data_train_dmatrix, n_estimators)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "train Accuracy: 81.30%\n"
     ]
    }
   ],
   "source": [
    "# make prediction\n",
    "preds = bst.predict(data_train_dmatrix)\n",
    "y_pred = [round(value) for value in preds]\n",
    "y_test = data_train_dmatrix.get_label()\n",
    "test_accuracy = accuracy_score(y_test, y_pred)\n",
    "print(\"train Accuracy: %.2f%%\" % (test_accuracy * 100.0))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "2. 对树的最大深度（可选）和min_children_weight进行调优（可选）\n",
    "-------------------------------------------------------"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 53,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\Programs\\Anaconda3\\lib\\site-packages\\sklearn\\model_selection\\_search.py:761: DeprecationWarning: The grid_scores_ attribute was deprecated in version 0.18 in favor of the more elaborate cv_results_ attribute. The grid_scores_ attribute will not be available from 0.20\n",
      "  DeprecationWarning)\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "([mean: -0.60005, std: 0.00342, params: {'max_depth': 3, 'min_child_weight': 1},\n",
       "  mean: -0.60005, std: 0.00398, params: {'max_depth': 3, 'min_child_weight': 3},\n",
       "  mean: -0.60052, std: 0.00392, params: {'max_depth': 3, 'min_child_weight': 5},\n",
       "  mean: -0.59223, std: 0.00356, params: {'max_depth': 5, 'min_child_weight': 1},\n",
       "  mean: -0.59169, std: 0.00281, params: {'max_depth': 5, 'min_child_weight': 3},\n",
       "  mean: -0.59104, std: 0.00330, params: {'max_depth': 5, 'min_child_weight': 5},\n",
       "  mean: -0.59591, std: 0.00439, params: {'max_depth': 7, 'min_child_weight': 1},\n",
       "  mean: -0.59462, std: 0.00417, params: {'max_depth': 7, 'min_child_weight': 3},\n",
       "  mean: -0.59374, std: 0.00435, params: {'max_depth': 7, 'min_child_weight': 5},\n",
       "  mean: -0.61311, std: 0.00494, params: {'max_depth': 9, 'min_child_weight': 1},\n",
       "  mean: -0.60562, std: 0.00564, params: {'max_depth': 9, 'min_child_weight': 3},\n",
       "  mean: -0.60390, std: 0.00478, params: {'max_depth': 9, 'min_child_weight': 5}],\n",
       " {'max_depth': 5, 'min_child_weight': 5},\n",
       " -0.5910407124955356)"
      ]
     },
     "execution_count": 53,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "# # this is too slow, wo use native xgboost api instand\n",
    "# max_depth = range(3,10,2)\n",
    "# min_child_weight = range(1,6,2)\n",
    "# param_search_depth_weight = dict(max_depth=max_depth, min_child_weight=min_child_weight)\n",
    "\n",
    "# xgb_depth_weight = XGBClassifier(\n",
    "#         learning_rate =0.1,\n",
    "#         n_estimators=n_estimators,  #第一轮参数调整得到的n_estimators最优值\n",
    "#         max_depth=5,\n",
    "#         min_child_weight=1,\n",
    "#         gamma=0,\n",
    "#         subsample=0.3,\n",
    "#         colsample_bytree=0.8,\n",
    "#         colsample_bylevel = 0.7,\n",
    "#         objective= 'multi:softmax',\n",
    "#         seed=3)\n",
    "\n",
    "\n",
    "# gsearch_depth_weight = GridSearchCV(xgb_depth_weight, param_grid = param_search_depth_weight, scoring='neg_log_loss',n_jobs=-1, cv=5)\n",
    "# gsearch_depth_weight.fit(data_train_pd.drop(['interest_level'], axis = 1) , data_train_pd['interest_level'])\n",
    "\n",
    "# gsearch_depth_weight.grid_scores_, gsearch_depth_weight.best_params_,     gsearch_depth_weight.best_score_"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "function build Time: 0.0 seconds\n"
     ]
    }
   ],
   "source": [
    "tmp = time.time()\n",
    "\n",
    "param_default = {\n",
    "    'silent': 0, \n",
    "    'eta': 0.1,\n",
    "    'objective': 'multi:softmax',\n",
    "    'eval_metric': 'mlogloss',\n",
    "    'num_class': 3,\n",
    "    'nthread': multiprocessing.cpu_count() - 1, #leave one cpu to let other task run, so we can continue edit code\n",
    "    'tree_method': 'gpu_exact'\n",
    "}\n",
    "\n",
    "param_cv_default = {\n",
    "    'num_boost_round' : 2000, # use a max value to let model stop automatically\n",
    "    'early_stopping_rounds': 100,  # use a max value to let model stop automatically\n",
    "    'folds' : StratifiedKFold(n_splits=5, shuffle=True, random_state=3),\n",
    "    'metrics': 'mlogloss'\n",
    "}\n",
    "\n",
    "#this is major function we used to search best parameter\n",
    "def SearchUseXgboostNative(param_grid, data_dmatrix, param_default = param_default, param_cv_default = param_cv_default):\n",
    "    if type(param_grid) == list:\n",
    "        tmp_grid = param_grid\n",
    "    elif type(param_grid) == dict:\n",
    "        tmp_grid = [ param_grid ]\n",
    "    else:\n",
    "        raise TypeError('unsupport parameter type...')\n",
    "        \n",
    "    param_search_list = []\n",
    "    for grid in tmp_grid:\n",
    "        for key,item in grid.items():\n",
    "            if type(item) != list:\n",
    "                raise TypeError('unsupport parameter type...')\n",
    "            if param_search_list:\n",
    "                tmp = []\n",
    "                for x in param_search_list:\n",
    "                    for y in item:\n",
    "                        param_dict = x.copy()\n",
    "                        param_dict[key] = y\n",
    "                        tmp.append(param_dict)\n",
    "                param_search_list = tmp\n",
    "            else:\n",
    "                param_search_list = [{key: x} for x in item]\n",
    "    \n",
    "    ret = []\n",
    "    for param_dict in param_search_list:\n",
    "        tmp_param_dict = param_default.copy()\n",
    "        tmp_param_dict.update(param_dict)\n",
    "        print('param : ', param_dict, end = \" \")\n",
    "        cv_result = xgb.cv(tmp_param_dict, data_dmatrix, **param_cv_default)\n",
    "        print('estimaters：', cv_result.shape[0], 'cv_resultmin test_score : ', cv_result['test-mlogloss-mean'].min())\n",
    "        ret.append((cv_result, tmp_param_dict))\n",
    "        \n",
    "                \n",
    "    return ret\n",
    "print(\"function build Time: %s seconds\" % (str(time.time() - tmp)))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "param :  {'max_depth': 3, 'min_child_weight': 1} estimaters： 899 cv_resultmin test_score :  0.5884840000000001\n",
      "param :  {'max_depth': 3, 'min_child_weight': 3} estimaters： 1025 cv_resultmin test_score :  0.5880606666666667\n",
      "param :  {'max_depth': 3, 'min_child_weight': 5} estimaters： 944 cv_resultmin test_score :  0.587897\n",
      "param :  {'max_depth': 5, 'min_child_weight': 1} estimaters： 326 cv_resultmin test_score :  0.5859676666666667\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3} estimaters： 359 cv_resultmin test_score :  0.5855566666666667\n",
      "param :  {'max_depth': 5, 'min_child_weight': 5} estimaters： 344 cv_resultmin test_score :  0.5857623333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 1} estimaters： 193 cv_resultmin test_score :  0.587263\n",
      "param :  {'max_depth': 7, 'min_child_weight': 3} estimaters： 169 cv_resultmin test_score :  0.5864053333333333\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5} estimaters： 193 cv_resultmin test_score :  0.5866793333333333\n",
      "param :  {'max_depth': 9, 'min_child_weight': 1} estimaters： 102 cv_resultmin test_score :  0.592766\n",
      "param :  {'max_depth': 9, 'min_child_weight': 3} estimaters： 105 cv_resultmin test_score :  0.5906536666666667\n",
      "param :  {'max_depth': 9, 'min_child_weight': 5} estimaters： 97 cv_resultmin test_score :  0.5902433333333333\n",
      "{'silent': 0, 'eta': 0.1, 'objective': 'multi:softmax', 'eval_metric': 'mlogloss', 'num_class': 3, 'nthread': 7, 'tree_method': 'gpu_exact', 'max_depth': 5, 'min_child_weight': 3}\n",
      "Training Time: 699.7747926712036 seconds\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>test-mlogloss-mean</th>\n",
       "      <th>test-mlogloss-std</th>\n",
       "      <th>train-mlogloss-mean</th>\n",
       "      <th>train-mlogloss-std</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.038764</td>\n",
       "      <td>0.000194</td>\n",
       "      <td>1.038024</td>\n",
       "      <td>0.000175</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.988537</td>\n",
       "      <td>0.000393</td>\n",
       "      <td>0.987108</td>\n",
       "      <td>0.000245</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.945870</td>\n",
       "      <td>0.000449</td>\n",
       "      <td>0.943750</td>\n",
       "      <td>0.000441</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.909230</td>\n",
       "      <td>0.000595</td>\n",
       "      <td>0.906493</td>\n",
       "      <td>0.000520</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.877649</td>\n",
       "      <td>0.000700</td>\n",
       "      <td>0.874315</td>\n",
       "      <td>0.000632</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>0.850008</td>\n",
       "      <td>0.000713</td>\n",
       "      <td>0.846125</td>\n",
       "      <td>0.000825</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>0.826063</td>\n",
       "      <td>0.000711</td>\n",
       "      <td>0.821615</td>\n",
       "      <td>0.000891</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>0.804993</td>\n",
       "      <td>0.000755</td>\n",
       "      <td>0.800017</td>\n",
       "      <td>0.001011</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>0.786520</td>\n",
       "      <td>0.000772</td>\n",
       "      <td>0.781012</td>\n",
       "      <td>0.001053</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>0.770057</td>\n",
       "      <td>0.000844</td>\n",
       "      <td>0.764016</td>\n",
       "      <td>0.001085</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>0.755569</td>\n",
       "      <td>0.000829</td>\n",
       "      <td>0.748981</td>\n",
       "      <td>0.001213</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>0.742745</td>\n",
       "      <td>0.000842</td>\n",
       "      <td>0.735580</td>\n",
       "      <td>0.001258</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>0.731359</td>\n",
       "      <td>0.000839</td>\n",
       "      <td>0.723648</td>\n",
       "      <td>0.001374</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>0.721175</td>\n",
       "      <td>0.000751</td>\n",
       "      <td>0.712848</td>\n",
       "      <td>0.001459</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>0.711928</td>\n",
       "      <td>0.000663</td>\n",
       "      <td>0.703092</td>\n",
       "      <td>0.001552</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>0.703844</td>\n",
       "      <td>0.000747</td>\n",
       "      <td>0.694363</td>\n",
       "      <td>0.001499</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>0.696552</td>\n",
       "      <td>0.000713</td>\n",
       "      <td>0.686581</td>\n",
       "      <td>0.001580</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>0.690000</td>\n",
       "      <td>0.000748</td>\n",
       "      <td>0.679398</td>\n",
       "      <td>0.001518</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>0.684249</td>\n",
       "      <td>0.000812</td>\n",
       "      <td>0.673071</td>\n",
       "      <td>0.001473</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>0.678897</td>\n",
       "      <td>0.000783</td>\n",
       "      <td>0.667106</td>\n",
       "      <td>0.001434</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20</th>\n",
       "      <td>0.674060</td>\n",
       "      <td>0.000734</td>\n",
       "      <td>0.661728</td>\n",
       "      <td>0.001478</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>21</th>\n",
       "      <td>0.669659</td>\n",
       "      <td>0.000787</td>\n",
       "      <td>0.656795</td>\n",
       "      <td>0.001468</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22</th>\n",
       "      <td>0.665737</td>\n",
       "      <td>0.000806</td>\n",
       "      <td>0.652210</td>\n",
       "      <td>0.001407</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>23</th>\n",
       "      <td>0.661993</td>\n",
       "      <td>0.000863</td>\n",
       "      <td>0.647932</td>\n",
       "      <td>0.001415</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24</th>\n",
       "      <td>0.658576</td>\n",
       "      <td>0.000971</td>\n",
       "      <td>0.643955</td>\n",
       "      <td>0.001292</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>0.655558</td>\n",
       "      <td>0.001017</td>\n",
       "      <td>0.640371</td>\n",
       "      <td>0.001224</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>26</th>\n",
       "      <td>0.652632</td>\n",
       "      <td>0.001041</td>\n",
       "      <td>0.636893</td>\n",
       "      <td>0.001318</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>27</th>\n",
       "      <td>0.649970</td>\n",
       "      <td>0.001148</td>\n",
       "      <td>0.633653</td>\n",
       "      <td>0.001229</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>0.647414</td>\n",
       "      <td>0.001106</td>\n",
       "      <td>0.630523</td>\n",
       "      <td>0.001262</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29</th>\n",
       "      <td>0.645133</td>\n",
       "      <td>0.001140</td>\n",
       "      <td>0.627714</td>\n",
       "      <td>0.001224</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>329</th>\n",
       "      <td>0.585787</td>\n",
       "      <td>0.002048</td>\n",
       "      <td>0.465708</td>\n",
       "      <td>0.001063</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>330</th>\n",
       "      <td>0.585789</td>\n",
       "      <td>0.002076</td>\n",
       "      <td>0.465346</td>\n",
       "      <td>0.001046</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>331</th>\n",
       "      <td>0.585771</td>\n",
       "      <td>0.002082</td>\n",
       "      <td>0.465074</td>\n",
       "      <td>0.001053</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>332</th>\n",
       "      <td>0.585750</td>\n",
       "      <td>0.002098</td>\n",
       "      <td>0.464775</td>\n",
       "      <td>0.001055</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>333</th>\n",
       "      <td>0.585722</td>\n",
       "      <td>0.002082</td>\n",
       "      <td>0.464524</td>\n",
       "      <td>0.000979</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>334</th>\n",
       "      <td>0.585712</td>\n",
       "      <td>0.002083</td>\n",
       "      <td>0.464291</td>\n",
       "      <td>0.000923</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>335</th>\n",
       "      <td>0.585669</td>\n",
       "      <td>0.002103</td>\n",
       "      <td>0.464000</td>\n",
       "      <td>0.000864</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>336</th>\n",
       "      <td>0.585641</td>\n",
       "      <td>0.002102</td>\n",
       "      <td>0.463725</td>\n",
       "      <td>0.000942</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>337</th>\n",
       "      <td>0.585648</td>\n",
       "      <td>0.002097</td>\n",
       "      <td>0.463478</td>\n",
       "      <td>0.000928</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>338</th>\n",
       "      <td>0.585617</td>\n",
       "      <td>0.002098</td>\n",
       "      <td>0.463127</td>\n",
       "      <td>0.000920</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>339</th>\n",
       "      <td>0.585623</td>\n",
       "      <td>0.002093</td>\n",
       "      <td>0.462917</td>\n",
       "      <td>0.000891</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>340</th>\n",
       "      <td>0.585652</td>\n",
       "      <td>0.002083</td>\n",
       "      <td>0.462679</td>\n",
       "      <td>0.000760</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>341</th>\n",
       "      <td>0.585630</td>\n",
       "      <td>0.002089</td>\n",
       "      <td>0.462272</td>\n",
       "      <td>0.000693</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>342</th>\n",
       "      <td>0.585643</td>\n",
       "      <td>0.002077</td>\n",
       "      <td>0.461936</td>\n",
       "      <td>0.000695</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>343</th>\n",
       "      <td>0.585651</td>\n",
       "      <td>0.002086</td>\n",
       "      <td>0.461704</td>\n",
       "      <td>0.000654</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>344</th>\n",
       "      <td>0.585672</td>\n",
       "      <td>0.002058</td>\n",
       "      <td>0.461324</td>\n",
       "      <td>0.000613</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>345</th>\n",
       "      <td>0.585693</td>\n",
       "      <td>0.002045</td>\n",
       "      <td>0.460983</td>\n",
       "      <td>0.000611</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>346</th>\n",
       "      <td>0.585699</td>\n",
       "      <td>0.002027</td>\n",
       "      <td>0.460722</td>\n",
       "      <td>0.000711</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>347</th>\n",
       "      <td>0.585658</td>\n",
       "      <td>0.002057</td>\n",
       "      <td>0.460431</td>\n",
       "      <td>0.000750</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>348</th>\n",
       "      <td>0.585686</td>\n",
       "      <td>0.002064</td>\n",
       "      <td>0.460175</td>\n",
       "      <td>0.000763</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>349</th>\n",
       "      <td>0.585663</td>\n",
       "      <td>0.002099</td>\n",
       "      <td>0.459873</td>\n",
       "      <td>0.000809</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>350</th>\n",
       "      <td>0.585650</td>\n",
       "      <td>0.002121</td>\n",
       "      <td>0.459558</td>\n",
       "      <td>0.000863</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>351</th>\n",
       "      <td>0.585639</td>\n",
       "      <td>0.002073</td>\n",
       "      <td>0.459190</td>\n",
       "      <td>0.000896</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>352</th>\n",
       "      <td>0.585624</td>\n",
       "      <td>0.002071</td>\n",
       "      <td>0.458948</td>\n",
       "      <td>0.000896</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>353</th>\n",
       "      <td>0.585584</td>\n",
       "      <td>0.002107</td>\n",
       "      <td>0.458639</td>\n",
       "      <td>0.000901</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>354</th>\n",
       "      <td>0.585605</td>\n",
       "      <td>0.002117</td>\n",
       "      <td>0.458401</td>\n",
       "      <td>0.000966</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>355</th>\n",
       "      <td>0.585614</td>\n",
       "      <td>0.002144</td>\n",
       "      <td>0.458165</td>\n",
       "      <td>0.000915</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>356</th>\n",
       "      <td>0.585595</td>\n",
       "      <td>0.002170</td>\n",
       "      <td>0.457867</td>\n",
       "      <td>0.000829</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>357</th>\n",
       "      <td>0.585597</td>\n",
       "      <td>0.002164</td>\n",
       "      <td>0.457611</td>\n",
       "      <td>0.000843</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>358</th>\n",
       "      <td>0.585557</td>\n",
       "      <td>0.002149</td>\n",
       "      <td>0.457399</td>\n",
       "      <td>0.000799</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>359 rows × 4 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "     test-mlogloss-mean  test-mlogloss-std  train-mlogloss-mean  \\\n",
       "0              1.038764           0.000194             1.038024   \n",
       "1              0.988537           0.000393             0.987108   \n",
       "2              0.945870           0.000449             0.943750   \n",
       "3              0.909230           0.000595             0.906493   \n",
       "4              0.877649           0.000700             0.874315   \n",
       "5              0.850008           0.000713             0.846125   \n",
       "6              0.826063           0.000711             0.821615   \n",
       "7              0.804993           0.000755             0.800017   \n",
       "8              0.786520           0.000772             0.781012   \n",
       "9              0.770057           0.000844             0.764016   \n",
       "10             0.755569           0.000829             0.748981   \n",
       "11             0.742745           0.000842             0.735580   \n",
       "12             0.731359           0.000839             0.723648   \n",
       "13             0.721175           0.000751             0.712848   \n",
       "14             0.711928           0.000663             0.703092   \n",
       "15             0.703844           0.000747             0.694363   \n",
       "16             0.696552           0.000713             0.686581   \n",
       "17             0.690000           0.000748             0.679398   \n",
       "18             0.684249           0.000812             0.673071   \n",
       "19             0.678897           0.000783             0.667106   \n",
       "20             0.674060           0.000734             0.661728   \n",
       "21             0.669659           0.000787             0.656795   \n",
       "22             0.665737           0.000806             0.652210   \n",
       "23             0.661993           0.000863             0.647932   \n",
       "24             0.658576           0.000971             0.643955   \n",
       "25             0.655558           0.001017             0.640371   \n",
       "26             0.652632           0.001041             0.636893   \n",
       "27             0.649970           0.001148             0.633653   \n",
       "28             0.647414           0.001106             0.630523   \n",
       "29             0.645133           0.001140             0.627714   \n",
       "..                  ...                ...                  ...   \n",
       "329            0.585787           0.002048             0.465708   \n",
       "330            0.585789           0.002076             0.465346   \n",
       "331            0.585771           0.002082             0.465074   \n",
       "332            0.585750           0.002098             0.464775   \n",
       "333            0.585722           0.002082             0.464524   \n",
       "334            0.585712           0.002083             0.464291   \n",
       "335            0.585669           0.002103             0.464000   \n",
       "336            0.585641           0.002102             0.463725   \n",
       "337            0.585648           0.002097             0.463478   \n",
       "338            0.585617           0.002098             0.463127   \n",
       "339            0.585623           0.002093             0.462917   \n",
       "340            0.585652           0.002083             0.462679   \n",
       "341            0.585630           0.002089             0.462272   \n",
       "342            0.585643           0.002077             0.461936   \n",
       "343            0.585651           0.002086             0.461704   \n",
       "344            0.585672           0.002058             0.461324   \n",
       "345            0.585693           0.002045             0.460983   \n",
       "346            0.585699           0.002027             0.460722   \n",
       "347            0.585658           0.002057             0.460431   \n",
       "348            0.585686           0.002064             0.460175   \n",
       "349            0.585663           0.002099             0.459873   \n",
       "350            0.585650           0.002121             0.459558   \n",
       "351            0.585639           0.002073             0.459190   \n",
       "352            0.585624           0.002071             0.458948   \n",
       "353            0.585584           0.002107             0.458639   \n",
       "354            0.585605           0.002117             0.458401   \n",
       "355            0.585614           0.002144             0.458165   \n",
       "356            0.585595           0.002170             0.457867   \n",
       "357            0.585597           0.002164             0.457611   \n",
       "358            0.585557           0.002149             0.457399   \n",
       "\n",
       "     train-mlogloss-std  \n",
       "0              0.000175  \n",
       "1              0.000245  \n",
       "2              0.000441  \n",
       "3              0.000520  \n",
       "4              0.000632  \n",
       "5              0.000825  \n",
       "6              0.000891  \n",
       "7              0.001011  \n",
       "8              0.001053  \n",
       "9              0.001085  \n",
       "10             0.001213  \n",
       "11             0.001258  \n",
       "12             0.001374  \n",
       "13             0.001459  \n",
       "14             0.001552  \n",
       "15             0.001499  \n",
       "16             0.001580  \n",
       "17             0.001518  \n",
       "18             0.001473  \n",
       "19             0.001434  \n",
       "20             0.001478  \n",
       "21             0.001468  \n",
       "22             0.001407  \n",
       "23             0.001415  \n",
       "24             0.001292  \n",
       "25             0.001224  \n",
       "26             0.001318  \n",
       "27             0.001229  \n",
       "28             0.001262  \n",
       "29             0.001224  \n",
       "..                  ...  \n",
       "329            0.001063  \n",
       "330            0.001046  \n",
       "331            0.001053  \n",
       "332            0.001055  \n",
       "333            0.000979  \n",
       "334            0.000923  \n",
       "335            0.000864  \n",
       "336            0.000942  \n",
       "337            0.000928  \n",
       "338            0.000920  \n",
       "339            0.000891  \n",
       "340            0.000760  \n",
       "341            0.000693  \n",
       "342            0.000695  \n",
       "343            0.000654  \n",
       "344            0.000613  \n",
       "345            0.000611  \n",
       "346            0.000711  \n",
       "347            0.000750  \n",
       "348            0.000763  \n",
       "349            0.000809  \n",
       "350            0.000863  \n",
       "351            0.000896  \n",
       "352            0.000896  \n",
       "353            0.000901  \n",
       "354            0.000966  \n",
       "355            0.000915  \n",
       "356            0.000829  \n",
       "357            0.000843  \n",
       "358            0.000799  \n",
       "\n",
       "[359 rows x 4 columns]"
      ]
     },
     "execution_count": 12,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "tmp = time.time()\n",
    "max_depth = range(3,10,2)\n",
    "min_child_weight = range(1,6,2)\n",
    "param_search_depth_weight = {'max_depth':list(max_depth), 'min_child_weight':list(min_child_weight)}\n",
    "\n",
    "searchResult_depth_weight = SearchUseXgboostNative(param_search_depth_weight, data_train_dmatrix)\n",
    "searchResult_depth_weight.sort(key = lambda x: x[0]['test-mlogloss-mean'].min())\n",
    "print(searchResult_depth_weight[0][1])\n",
    "print(\"Training Time: %s seconds\" % (str(time.time() - tmp)))\n",
    "searchResult_depth_weight[0][0]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "3. 对正则参数进行调优\n",
    "------------------"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0, 'reg_lambda': 0.01} estimaters： 350 cv_resultmin test_score :  0.5863280000000001\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0, 'reg_lambda': 0.1} estimaters： 355 cv_resultmin test_score :  0.5865446666666666\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0, 'reg_lambda': 1.0} estimaters： 359 cv_resultmin test_score :  0.5855566666666667\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0, 'reg_lambda': 10.0} estimaters： 479 cv_resultmin test_score :  0.5845686666666666\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0, 'reg_lambda': 100.0} estimaters： 684 cv_resultmin test_score :  0.584533\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 0.01} estimaters： 361 cv_resultmin test_score :  0.5866233333333333\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 0.1} estimaters： 336 cv_resultmin test_score :  0.5863839999999999\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 1.0} estimaters： 352 cv_resultmin test_score :  0.585973\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 10.0} estimaters： 449 cv_resultmin test_score :  0.584654\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.1, 'reg_lambda': 0.01} estimaters： 340 cv_resultmin test_score :  0.5861299999999999\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.1, 'reg_lambda': 0.1} estimaters： 296 cv_resultmin test_score :  0.5864893333333333\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.1, 'reg_lambda': 1.0} estimaters： 347 cv_resultmin test_score :  0.5860666666666666\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.1, 'reg_lambda': 10.0} estimaters： 462 cv_resultmin test_score :  0.5845666666666667\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.1, 'reg_lambda': 100.0} estimaters： 713 cv_resultmin test_score :  0.584472\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 1.0, 'reg_lambda': 0.01} estimaters： 344 cv_resultmin test_score :  0.5856260000000001\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 1.0, 'reg_lambda': 0.1} estimaters： 305 cv_resultmin test_score :  0.5860893333333334\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 1.0, 'reg_lambda': 1.0} estimaters： 332 cv_resultmin test_score :  0.5853076666666667\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 1.0, 'reg_lambda': 10.0} estimaters： 441 cv_resultmin test_score :  0.5851586666666666\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 1.0, 'reg_lambda': 100.0} estimaters： 696 cv_resultmin test_score :  0.5844713333333333\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 10.0, 'reg_lambda': 0.01} estimaters： 399 cv_resultmin test_score :  0.585967\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 10.0, 'reg_lambda': 0.1} estimaters： 435 cv_resultmin test_score :  0.5857363333333333\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 10.0, 'reg_lambda': 1.0} estimaters： 388 cv_resultmin test_score :  0.585943\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 10.0, 'reg_lambda': 10.0} estimaters： 406 cv_resultmin test_score :  0.5862243333333333\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 10.0, 'reg_lambda': 100.0} estimaters： 648 cv_resultmin test_score :  0.5868236666666666\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 100.0, 'reg_lambda': 0.01} estimaters： 284 cv_resultmin test_score :  0.6174726666666667\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 100.0, 'reg_lambda': 0.1} estimaters： 284 cv_resultmin test_score :  0.6176056666666666\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 100.0, 'reg_lambda': 1.0} estimaters： 274 cv_resultmin test_score :  0.6177583333333333\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 100.0, 'reg_lambda': 10.0} estimaters： 289 cv_resultmin test_score :  0.6176163333333334\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 100.0, 'reg_lambda': 100.0} estimaters： 304 cv_resultmin test_score :  0.6176983333333333\n",
      "{'silent': 0, 'eta': 0.1, 'objective': 'multi:softmax', 'eval_metric': 'mlogloss', 'num_class': 3, 'nthread': 7, 'tree_method': 'gpu_exact', 'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0}\n",
      "Training Time: 1890.5579733848572 seconds\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>test-mlogloss-mean</th>\n",
       "      <th>test-mlogloss-std</th>\n",
       "      <th>train-mlogloss-mean</th>\n",
       "      <th>train-mlogloss-std</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.042925</td>\n",
       "      <td>0.000234</td>\n",
       "      <td>1.042385</td>\n",
       "      <td>0.000122</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.995595</td>\n",
       "      <td>0.000295</td>\n",
       "      <td>0.994601</td>\n",
       "      <td>0.000302</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.954940</td>\n",
       "      <td>0.000554</td>\n",
       "      <td>0.953415</td>\n",
       "      <td>0.000284</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.919797</td>\n",
       "      <td>0.000740</td>\n",
       "      <td>0.917735</td>\n",
       "      <td>0.000370</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.889200</td>\n",
       "      <td>0.000940</td>\n",
       "      <td>0.886591</td>\n",
       "      <td>0.000457</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>0.862461</td>\n",
       "      <td>0.001007</td>\n",
       "      <td>0.859392</td>\n",
       "      <td>0.000540</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>0.838936</td>\n",
       "      <td>0.001091</td>\n",
       "      <td>0.835406</td>\n",
       "      <td>0.000676</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>0.818252</td>\n",
       "      <td>0.001186</td>\n",
       "      <td>0.814258</td>\n",
       "      <td>0.000797</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>0.799962</td>\n",
       "      <td>0.001276</td>\n",
       "      <td>0.795517</td>\n",
       "      <td>0.000865</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>0.783708</td>\n",
       "      <td>0.001287</td>\n",
       "      <td>0.778825</td>\n",
       "      <td>0.000921</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>0.769308</td>\n",
       "      <td>0.001258</td>\n",
       "      <td>0.764012</td>\n",
       "      <td>0.001027</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>0.756213</td>\n",
       "      <td>0.001349</td>\n",
       "      <td>0.750525</td>\n",
       "      <td>0.000941</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>0.744660</td>\n",
       "      <td>0.001365</td>\n",
       "      <td>0.738561</td>\n",
       "      <td>0.000984</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>0.734309</td>\n",
       "      <td>0.001315</td>\n",
       "      <td>0.727767</td>\n",
       "      <td>0.000997</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>0.725029</td>\n",
       "      <td>0.001256</td>\n",
       "      <td>0.718074</td>\n",
       "      <td>0.001170</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>0.716714</td>\n",
       "      <td>0.001316</td>\n",
       "      <td>0.709357</td>\n",
       "      <td>0.001124</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>0.709220</td>\n",
       "      <td>0.001464</td>\n",
       "      <td>0.701495</td>\n",
       "      <td>0.001077</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>0.702427</td>\n",
       "      <td>0.001454</td>\n",
       "      <td>0.694344</td>\n",
       "      <td>0.001123</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>0.696217</td>\n",
       "      <td>0.001382</td>\n",
       "      <td>0.687805</td>\n",
       "      <td>0.001196</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>0.690596</td>\n",
       "      <td>0.001368</td>\n",
       "      <td>0.681821</td>\n",
       "      <td>0.001235</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20</th>\n",
       "      <td>0.685498</td>\n",
       "      <td>0.001306</td>\n",
       "      <td>0.676395</td>\n",
       "      <td>0.001311</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>21</th>\n",
       "      <td>0.680826</td>\n",
       "      <td>0.001398</td>\n",
       "      <td>0.671293</td>\n",
       "      <td>0.001311</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22</th>\n",
       "      <td>0.676539</td>\n",
       "      <td>0.001420</td>\n",
       "      <td>0.666732</td>\n",
       "      <td>0.001330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>23</th>\n",
       "      <td>0.672664</td>\n",
       "      <td>0.001411</td>\n",
       "      <td>0.662544</td>\n",
       "      <td>0.001400</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24</th>\n",
       "      <td>0.669183</td>\n",
       "      <td>0.001408</td>\n",
       "      <td>0.658687</td>\n",
       "      <td>0.001370</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>0.665934</td>\n",
       "      <td>0.001387</td>\n",
       "      <td>0.655106</td>\n",
       "      <td>0.001293</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>26</th>\n",
       "      <td>0.662950</td>\n",
       "      <td>0.001368</td>\n",
       "      <td>0.651760</td>\n",
       "      <td>0.001408</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>27</th>\n",
       "      <td>0.660091</td>\n",
       "      <td>0.001513</td>\n",
       "      <td>0.648586</td>\n",
       "      <td>0.001339</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>0.657548</td>\n",
       "      <td>0.001507</td>\n",
       "      <td>0.645699</td>\n",
       "      <td>0.001301</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29</th>\n",
       "      <td>0.655063</td>\n",
       "      <td>0.001415</td>\n",
       "      <td>0.642957</td>\n",
       "      <td>0.001353</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>713</th>\n",
       "      <td>0.584250</td>\n",
       "      <td>0.001509</td>\n",
       "      <td>0.474831</td>\n",
       "      <td>0.001424</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>714</th>\n",
       "      <td>0.584247</td>\n",
       "      <td>0.001521</td>\n",
       "      <td>0.474725</td>\n",
       "      <td>0.001436</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>715</th>\n",
       "      <td>0.584245</td>\n",
       "      <td>0.001526</td>\n",
       "      <td>0.474635</td>\n",
       "      <td>0.001452</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>716</th>\n",
       "      <td>0.584243</td>\n",
       "      <td>0.001527</td>\n",
       "      <td>0.474552</td>\n",
       "      <td>0.001480</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>717</th>\n",
       "      <td>0.584244</td>\n",
       "      <td>0.001523</td>\n",
       "      <td>0.474444</td>\n",
       "      <td>0.001520</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>718</th>\n",
       "      <td>0.584246</td>\n",
       "      <td>0.001519</td>\n",
       "      <td>0.474392</td>\n",
       "      <td>0.001500</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>719</th>\n",
       "      <td>0.584246</td>\n",
       "      <td>0.001507</td>\n",
       "      <td>0.474271</td>\n",
       "      <td>0.001519</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>720</th>\n",
       "      <td>0.584251</td>\n",
       "      <td>0.001501</td>\n",
       "      <td>0.474173</td>\n",
       "      <td>0.001500</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>721</th>\n",
       "      <td>0.584236</td>\n",
       "      <td>0.001499</td>\n",
       "      <td>0.474102</td>\n",
       "      <td>0.001497</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>722</th>\n",
       "      <td>0.584233</td>\n",
       "      <td>0.001491</td>\n",
       "      <td>0.473991</td>\n",
       "      <td>0.001476</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>723</th>\n",
       "      <td>0.584224</td>\n",
       "      <td>0.001476</td>\n",
       "      <td>0.473874</td>\n",
       "      <td>0.001501</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>724</th>\n",
       "      <td>0.584221</td>\n",
       "      <td>0.001458</td>\n",
       "      <td>0.473785</td>\n",
       "      <td>0.001524</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>725</th>\n",
       "      <td>0.584236</td>\n",
       "      <td>0.001457</td>\n",
       "      <td>0.473679</td>\n",
       "      <td>0.001523</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>726</th>\n",
       "      <td>0.584232</td>\n",
       "      <td>0.001448</td>\n",
       "      <td>0.473554</td>\n",
       "      <td>0.001523</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>727</th>\n",
       "      <td>0.584232</td>\n",
       "      <td>0.001444</td>\n",
       "      <td>0.473440</td>\n",
       "      <td>0.001509</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>728</th>\n",
       "      <td>0.584222</td>\n",
       "      <td>0.001445</td>\n",
       "      <td>0.473368</td>\n",
       "      <td>0.001531</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>729</th>\n",
       "      <td>0.584237</td>\n",
       "      <td>0.001449</td>\n",
       "      <td>0.473288</td>\n",
       "      <td>0.001565</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>730</th>\n",
       "      <td>0.584238</td>\n",
       "      <td>0.001448</td>\n",
       "      <td>0.473219</td>\n",
       "      <td>0.001583</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>731</th>\n",
       "      <td>0.584240</td>\n",
       "      <td>0.001449</td>\n",
       "      <td>0.473132</td>\n",
       "      <td>0.001606</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>732</th>\n",
       "      <td>0.584234</td>\n",
       "      <td>0.001445</td>\n",
       "      <td>0.473043</td>\n",
       "      <td>0.001605</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>733</th>\n",
       "      <td>0.584237</td>\n",
       "      <td>0.001446</td>\n",
       "      <td>0.472958</td>\n",
       "      <td>0.001640</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>734</th>\n",
       "      <td>0.584233</td>\n",
       "      <td>0.001449</td>\n",
       "      <td>0.472890</td>\n",
       "      <td>0.001645</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>735</th>\n",
       "      <td>0.584232</td>\n",
       "      <td>0.001445</td>\n",
       "      <td>0.472833</td>\n",
       "      <td>0.001648</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>736</th>\n",
       "      <td>0.584225</td>\n",
       "      <td>0.001444</td>\n",
       "      <td>0.472781</td>\n",
       "      <td>0.001639</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>737</th>\n",
       "      <td>0.584225</td>\n",
       "      <td>0.001442</td>\n",
       "      <td>0.472688</td>\n",
       "      <td>0.001592</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>738</th>\n",
       "      <td>0.584210</td>\n",
       "      <td>0.001438</td>\n",
       "      <td>0.472617</td>\n",
       "      <td>0.001544</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>739</th>\n",
       "      <td>0.584215</td>\n",
       "      <td>0.001434</td>\n",
       "      <td>0.472543</td>\n",
       "      <td>0.001520</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>740</th>\n",
       "      <td>0.584209</td>\n",
       "      <td>0.001425</td>\n",
       "      <td>0.472451</td>\n",
       "      <td>0.001526</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>741</th>\n",
       "      <td>0.584207</td>\n",
       "      <td>0.001424</td>\n",
       "      <td>0.472388</td>\n",
       "      <td>0.001501</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>742</th>\n",
       "      <td>0.584196</td>\n",
       "      <td>0.001415</td>\n",
       "      <td>0.472310</td>\n",
       "      <td>0.001500</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>743 rows × 4 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "     test-mlogloss-mean  test-mlogloss-std  train-mlogloss-mean  \\\n",
       "0              1.042925           0.000234             1.042385   \n",
       "1              0.995595           0.000295             0.994601   \n",
       "2              0.954940           0.000554             0.953415   \n",
       "3              0.919797           0.000740             0.917735   \n",
       "4              0.889200           0.000940             0.886591   \n",
       "5              0.862461           0.001007             0.859392   \n",
       "6              0.838936           0.001091             0.835406   \n",
       "7              0.818252           0.001186             0.814258   \n",
       "8              0.799962           0.001276             0.795517   \n",
       "9              0.783708           0.001287             0.778825   \n",
       "10             0.769308           0.001258             0.764012   \n",
       "11             0.756213           0.001349             0.750525   \n",
       "12             0.744660           0.001365             0.738561   \n",
       "13             0.734309           0.001315             0.727767   \n",
       "14             0.725029           0.001256             0.718074   \n",
       "15             0.716714           0.001316             0.709357   \n",
       "16             0.709220           0.001464             0.701495   \n",
       "17             0.702427           0.001454             0.694344   \n",
       "18             0.696217           0.001382             0.687805   \n",
       "19             0.690596           0.001368             0.681821   \n",
       "20             0.685498           0.001306             0.676395   \n",
       "21             0.680826           0.001398             0.671293   \n",
       "22             0.676539           0.001420             0.666732   \n",
       "23             0.672664           0.001411             0.662544   \n",
       "24             0.669183           0.001408             0.658687   \n",
       "25             0.665934           0.001387             0.655106   \n",
       "26             0.662950           0.001368             0.651760   \n",
       "27             0.660091           0.001513             0.648586   \n",
       "28             0.657548           0.001507             0.645699   \n",
       "29             0.655063           0.001415             0.642957   \n",
       "..                  ...                ...                  ...   \n",
       "713            0.584250           0.001509             0.474831   \n",
       "714            0.584247           0.001521             0.474725   \n",
       "715            0.584245           0.001526             0.474635   \n",
       "716            0.584243           0.001527             0.474552   \n",
       "717            0.584244           0.001523             0.474444   \n",
       "718            0.584246           0.001519             0.474392   \n",
       "719            0.584246           0.001507             0.474271   \n",
       "720            0.584251           0.001501             0.474173   \n",
       "721            0.584236           0.001499             0.474102   \n",
       "722            0.584233           0.001491             0.473991   \n",
       "723            0.584224           0.001476             0.473874   \n",
       "724            0.584221           0.001458             0.473785   \n",
       "725            0.584236           0.001457             0.473679   \n",
       "726            0.584232           0.001448             0.473554   \n",
       "727            0.584232           0.001444             0.473440   \n",
       "728            0.584222           0.001445             0.473368   \n",
       "729            0.584237           0.001449             0.473288   \n",
       "730            0.584238           0.001448             0.473219   \n",
       "731            0.584240           0.001449             0.473132   \n",
       "732            0.584234           0.001445             0.473043   \n",
       "733            0.584237           0.001446             0.472958   \n",
       "734            0.584233           0.001449             0.472890   \n",
       "735            0.584232           0.001445             0.472833   \n",
       "736            0.584225           0.001444             0.472781   \n",
       "737            0.584225           0.001442             0.472688   \n",
       "738            0.584210           0.001438             0.472617   \n",
       "739            0.584215           0.001434             0.472543   \n",
       "740            0.584209           0.001425             0.472451   \n",
       "741            0.584207           0.001424             0.472388   \n",
       "742            0.584196           0.001415             0.472310   \n",
       "\n",
       "     train-mlogloss-std  \n",
       "0              0.000122  \n",
       "1              0.000302  \n",
       "2              0.000284  \n",
       "3              0.000370  \n",
       "4              0.000457  \n",
       "5              0.000540  \n",
       "6              0.000676  \n",
       "7              0.000797  \n",
       "8              0.000865  \n",
       "9              0.000921  \n",
       "10             0.001027  \n",
       "11             0.000941  \n",
       "12             0.000984  \n",
       "13             0.000997  \n",
       "14             0.001170  \n",
       "15             0.001124  \n",
       "16             0.001077  \n",
       "17             0.001123  \n",
       "18             0.001196  \n",
       "19             0.001235  \n",
       "20             0.001311  \n",
       "21             0.001311  \n",
       "22             0.001330  \n",
       "23             0.001400  \n",
       "24             0.001370  \n",
       "25             0.001293  \n",
       "26             0.001408  \n",
       "27             0.001339  \n",
       "28             0.001301  \n",
       "29             0.001353  \n",
       "..                  ...  \n",
       "713            0.001424  \n",
       "714            0.001436  \n",
       "715            0.001452  \n",
       "716            0.001480  \n",
       "717            0.001520  \n",
       "718            0.001500  \n",
       "719            0.001519  \n",
       "720            0.001500  \n",
       "721            0.001497  \n",
       "722            0.001476  \n",
       "723            0.001501  \n",
       "724            0.001524  \n",
       "725            0.001523  \n",
       "726            0.001523  \n",
       "727            0.001509  \n",
       "728            0.001531  \n",
       "729            0.001565  \n",
       "730            0.001583  \n",
       "731            0.001606  \n",
       "732            0.001605  \n",
       "733            0.001640  \n",
       "734            0.001645  \n",
       "735            0.001648  \n",
       "736            0.001639  \n",
       "737            0.001592  \n",
       "738            0.001544  \n",
       "739            0.001520  \n",
       "740            0.001526  \n",
       "741            0.001501  \n",
       "742            0.001500  \n",
       "\n",
       "[743 rows x 4 columns]"
      ]
     },
     "execution_count": 13,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "tmp = time.time()\n",
    "reg_alpha = list(np.logspace(-2, 2, 5))\n",
    "reg_alpha.insert(0, 0)\n",
    "reg_lambda = list(np.logspace(-2, 2, 5))\n",
    "param_search_reg = {\n",
    "    'max_depth': [ searchResult_depth_weight[0][1]['max_depth'] ],  #use last select parameter\n",
    "    'min_child_weight': [ searchResult_depth_weight[0][1]['min_child_weight'] ], #use last select parameter\n",
    "    'reg_alpha': reg_alpha,\n",
    "    'reg_lambda': reg_lambda,\n",
    "}\n",
    "\n",
    "searchResult_reg = SearchUseXgboostNative(param_search_reg, data_train_dmatrix)\n",
    "searchResult_reg.sort(key = lambda x: x[0]['test-mlogloss-mean'].min())\n",
    "print(searchResult_reg[0][1])\n",
    "print(\"Training Time: %s seconds\" % (str(time.time() - tmp)))\n",
    "searchResult_reg[0][0]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "4. 重新调整弱学习器数目\n",
    "--------------------"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "best n_estimators  743\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>test-mlogloss-mean</th>\n",
       "      <th>test-mlogloss-std</th>\n",
       "      <th>train-mlogloss-mean</th>\n",
       "      <th>train-mlogloss-std</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.042925</td>\n",
       "      <td>0.000234</td>\n",
       "      <td>1.042385</td>\n",
       "      <td>0.000122</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.995595</td>\n",
       "      <td>0.000295</td>\n",
       "      <td>0.994601</td>\n",
       "      <td>0.000302</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.954940</td>\n",
       "      <td>0.000554</td>\n",
       "      <td>0.953415</td>\n",
       "      <td>0.000284</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.919797</td>\n",
       "      <td>0.000740</td>\n",
       "      <td>0.917735</td>\n",
       "      <td>0.000370</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.889200</td>\n",
       "      <td>0.000940</td>\n",
       "      <td>0.886591</td>\n",
       "      <td>0.000457</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>0.862461</td>\n",
       "      <td>0.001007</td>\n",
       "      <td>0.859392</td>\n",
       "      <td>0.000540</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>0.838936</td>\n",
       "      <td>0.001091</td>\n",
       "      <td>0.835406</td>\n",
       "      <td>0.000676</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>0.818252</td>\n",
       "      <td>0.001186</td>\n",
       "      <td>0.814258</td>\n",
       "      <td>0.000797</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>0.799962</td>\n",
       "      <td>0.001276</td>\n",
       "      <td>0.795517</td>\n",
       "      <td>0.000865</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>0.783708</td>\n",
       "      <td>0.001287</td>\n",
       "      <td>0.778825</td>\n",
       "      <td>0.000921</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>0.769308</td>\n",
       "      <td>0.001258</td>\n",
       "      <td>0.764012</td>\n",
       "      <td>0.001027</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>0.756213</td>\n",
       "      <td>0.001349</td>\n",
       "      <td>0.750525</td>\n",
       "      <td>0.000941</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>0.744660</td>\n",
       "      <td>0.001365</td>\n",
       "      <td>0.738561</td>\n",
       "      <td>0.000984</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>0.734309</td>\n",
       "      <td>0.001315</td>\n",
       "      <td>0.727767</td>\n",
       "      <td>0.000997</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>0.725029</td>\n",
       "      <td>0.001256</td>\n",
       "      <td>0.718074</td>\n",
       "      <td>0.001170</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>0.716714</td>\n",
       "      <td>0.001316</td>\n",
       "      <td>0.709357</td>\n",
       "      <td>0.001124</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>0.709220</td>\n",
       "      <td>0.001464</td>\n",
       "      <td>0.701495</td>\n",
       "      <td>0.001077</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>0.702427</td>\n",
       "      <td>0.001454</td>\n",
       "      <td>0.694344</td>\n",
       "      <td>0.001123</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>0.696217</td>\n",
       "      <td>0.001382</td>\n",
       "      <td>0.687805</td>\n",
       "      <td>0.001196</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>0.690596</td>\n",
       "      <td>0.001368</td>\n",
       "      <td>0.681821</td>\n",
       "      <td>0.001235</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20</th>\n",
       "      <td>0.685498</td>\n",
       "      <td>0.001306</td>\n",
       "      <td>0.676395</td>\n",
       "      <td>0.001311</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>21</th>\n",
       "      <td>0.680826</td>\n",
       "      <td>0.001398</td>\n",
       "      <td>0.671293</td>\n",
       "      <td>0.001311</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22</th>\n",
       "      <td>0.676539</td>\n",
       "      <td>0.001420</td>\n",
       "      <td>0.666732</td>\n",
       "      <td>0.001330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>23</th>\n",
       "      <td>0.672664</td>\n",
       "      <td>0.001411</td>\n",
       "      <td>0.662544</td>\n",
       "      <td>0.001400</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24</th>\n",
       "      <td>0.669183</td>\n",
       "      <td>0.001408</td>\n",
       "      <td>0.658687</td>\n",
       "      <td>0.001370</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>0.665934</td>\n",
       "      <td>0.001387</td>\n",
       "      <td>0.655106</td>\n",
       "      <td>0.001293</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>26</th>\n",
       "      <td>0.662950</td>\n",
       "      <td>0.001368</td>\n",
       "      <td>0.651760</td>\n",
       "      <td>0.001408</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>27</th>\n",
       "      <td>0.660091</td>\n",
       "      <td>0.001513</td>\n",
       "      <td>0.648586</td>\n",
       "      <td>0.001339</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>0.657548</td>\n",
       "      <td>0.001507</td>\n",
       "      <td>0.645699</td>\n",
       "      <td>0.001301</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29</th>\n",
       "      <td>0.655063</td>\n",
       "      <td>0.001415</td>\n",
       "      <td>0.642957</td>\n",
       "      <td>0.001353</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>713</th>\n",
       "      <td>0.584250</td>\n",
       "      <td>0.001509</td>\n",
       "      <td>0.474831</td>\n",
       "      <td>0.001424</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>714</th>\n",
       "      <td>0.584247</td>\n",
       "      <td>0.001521</td>\n",
       "      <td>0.474725</td>\n",
       "      <td>0.001436</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>715</th>\n",
       "      <td>0.584245</td>\n",
       "      <td>0.001526</td>\n",
       "      <td>0.474635</td>\n",
       "      <td>0.001452</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>716</th>\n",
       "      <td>0.584243</td>\n",
       "      <td>0.001527</td>\n",
       "      <td>0.474552</td>\n",
       "      <td>0.001480</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>717</th>\n",
       "      <td>0.584244</td>\n",
       "      <td>0.001523</td>\n",
       "      <td>0.474444</td>\n",
       "      <td>0.001520</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>718</th>\n",
       "      <td>0.584246</td>\n",
       "      <td>0.001519</td>\n",
       "      <td>0.474392</td>\n",
       "      <td>0.001500</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>719</th>\n",
       "      <td>0.584246</td>\n",
       "      <td>0.001507</td>\n",
       "      <td>0.474271</td>\n",
       "      <td>0.001519</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>720</th>\n",
       "      <td>0.584251</td>\n",
       "      <td>0.001501</td>\n",
       "      <td>0.474173</td>\n",
       "      <td>0.001500</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>721</th>\n",
       "      <td>0.584236</td>\n",
       "      <td>0.001499</td>\n",
       "      <td>0.474102</td>\n",
       "      <td>0.001497</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>722</th>\n",
       "      <td>0.584233</td>\n",
       "      <td>0.001491</td>\n",
       "      <td>0.473991</td>\n",
       "      <td>0.001476</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>723</th>\n",
       "      <td>0.584224</td>\n",
       "      <td>0.001476</td>\n",
       "      <td>0.473874</td>\n",
       "      <td>0.001501</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>724</th>\n",
       "      <td>0.584221</td>\n",
       "      <td>0.001458</td>\n",
       "      <td>0.473785</td>\n",
       "      <td>0.001524</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>725</th>\n",
       "      <td>0.584236</td>\n",
       "      <td>0.001457</td>\n",
       "      <td>0.473679</td>\n",
       "      <td>0.001523</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>726</th>\n",
       "      <td>0.584232</td>\n",
       "      <td>0.001448</td>\n",
       "      <td>0.473554</td>\n",
       "      <td>0.001523</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>727</th>\n",
       "      <td>0.584232</td>\n",
       "      <td>0.001444</td>\n",
       "      <td>0.473440</td>\n",
       "      <td>0.001509</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>728</th>\n",
       "      <td>0.584222</td>\n",
       "      <td>0.001445</td>\n",
       "      <td>0.473368</td>\n",
       "      <td>0.001531</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>729</th>\n",
       "      <td>0.584237</td>\n",
       "      <td>0.001449</td>\n",
       "      <td>0.473288</td>\n",
       "      <td>0.001565</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>730</th>\n",
       "      <td>0.584238</td>\n",
       "      <td>0.001448</td>\n",
       "      <td>0.473219</td>\n",
       "      <td>0.001583</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>731</th>\n",
       "      <td>0.584240</td>\n",
       "      <td>0.001449</td>\n",
       "      <td>0.473132</td>\n",
       "      <td>0.001606</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>732</th>\n",
       "      <td>0.584234</td>\n",
       "      <td>0.001445</td>\n",
       "      <td>0.473043</td>\n",
       "      <td>0.001605</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>733</th>\n",
       "      <td>0.584237</td>\n",
       "      <td>0.001446</td>\n",
       "      <td>0.472958</td>\n",
       "      <td>0.001640</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>734</th>\n",
       "      <td>0.584233</td>\n",
       "      <td>0.001449</td>\n",
       "      <td>0.472890</td>\n",
       "      <td>0.001645</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>735</th>\n",
       "      <td>0.584232</td>\n",
       "      <td>0.001445</td>\n",
       "      <td>0.472833</td>\n",
       "      <td>0.001648</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>736</th>\n",
       "      <td>0.584225</td>\n",
       "      <td>0.001444</td>\n",
       "      <td>0.472781</td>\n",
       "      <td>0.001639</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>737</th>\n",
       "      <td>0.584225</td>\n",
       "      <td>0.001442</td>\n",
       "      <td>0.472688</td>\n",
       "      <td>0.001592</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>738</th>\n",
       "      <td>0.584210</td>\n",
       "      <td>0.001438</td>\n",
       "      <td>0.472617</td>\n",
       "      <td>0.001544</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>739</th>\n",
       "      <td>0.584215</td>\n",
       "      <td>0.001434</td>\n",
       "      <td>0.472543</td>\n",
       "      <td>0.001520</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>740</th>\n",
       "      <td>0.584209</td>\n",
       "      <td>0.001425</td>\n",
       "      <td>0.472451</td>\n",
       "      <td>0.001526</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>741</th>\n",
       "      <td>0.584207</td>\n",
       "      <td>0.001424</td>\n",
       "      <td>0.472388</td>\n",
       "      <td>0.001501</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>742</th>\n",
       "      <td>0.584196</td>\n",
       "      <td>0.001415</td>\n",
       "      <td>0.472310</td>\n",
       "      <td>0.001500</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>743 rows × 4 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "     test-mlogloss-mean  test-mlogloss-std  train-mlogloss-mean  \\\n",
       "0              1.042925           0.000234             1.042385   \n",
       "1              0.995595           0.000295             0.994601   \n",
       "2              0.954940           0.000554             0.953415   \n",
       "3              0.919797           0.000740             0.917735   \n",
       "4              0.889200           0.000940             0.886591   \n",
       "5              0.862461           0.001007             0.859392   \n",
       "6              0.838936           0.001091             0.835406   \n",
       "7              0.818252           0.001186             0.814258   \n",
       "8              0.799962           0.001276             0.795517   \n",
       "9              0.783708           0.001287             0.778825   \n",
       "10             0.769308           0.001258             0.764012   \n",
       "11             0.756213           0.001349             0.750525   \n",
       "12             0.744660           0.001365             0.738561   \n",
       "13             0.734309           0.001315             0.727767   \n",
       "14             0.725029           0.001256             0.718074   \n",
       "15             0.716714           0.001316             0.709357   \n",
       "16             0.709220           0.001464             0.701495   \n",
       "17             0.702427           0.001454             0.694344   \n",
       "18             0.696217           0.001382             0.687805   \n",
       "19             0.690596           0.001368             0.681821   \n",
       "20             0.685498           0.001306             0.676395   \n",
       "21             0.680826           0.001398             0.671293   \n",
       "22             0.676539           0.001420             0.666732   \n",
       "23             0.672664           0.001411             0.662544   \n",
       "24             0.669183           0.001408             0.658687   \n",
       "25             0.665934           0.001387             0.655106   \n",
       "26             0.662950           0.001368             0.651760   \n",
       "27             0.660091           0.001513             0.648586   \n",
       "28             0.657548           0.001507             0.645699   \n",
       "29             0.655063           0.001415             0.642957   \n",
       "..                  ...                ...                  ...   \n",
       "713            0.584250           0.001509             0.474831   \n",
       "714            0.584247           0.001521             0.474725   \n",
       "715            0.584245           0.001526             0.474635   \n",
       "716            0.584243           0.001527             0.474552   \n",
       "717            0.584244           0.001523             0.474444   \n",
       "718            0.584246           0.001519             0.474392   \n",
       "719            0.584246           0.001507             0.474271   \n",
       "720            0.584251           0.001501             0.474173   \n",
       "721            0.584236           0.001499             0.474102   \n",
       "722            0.584233           0.001491             0.473991   \n",
       "723            0.584224           0.001476             0.473874   \n",
       "724            0.584221           0.001458             0.473785   \n",
       "725            0.584236           0.001457             0.473679   \n",
       "726            0.584232           0.001448             0.473554   \n",
       "727            0.584232           0.001444             0.473440   \n",
       "728            0.584222           0.001445             0.473368   \n",
       "729            0.584237           0.001449             0.473288   \n",
       "730            0.584238           0.001448             0.473219   \n",
       "731            0.584240           0.001449             0.473132   \n",
       "732            0.584234           0.001445             0.473043   \n",
       "733            0.584237           0.001446             0.472958   \n",
       "734            0.584233           0.001449             0.472890   \n",
       "735            0.584232           0.001445             0.472833   \n",
       "736            0.584225           0.001444             0.472781   \n",
       "737            0.584225           0.001442             0.472688   \n",
       "738            0.584210           0.001438             0.472617   \n",
       "739            0.584215           0.001434             0.472543   \n",
       "740            0.584209           0.001425             0.472451   \n",
       "741            0.584207           0.001424             0.472388   \n",
       "742            0.584196           0.001415             0.472310   \n",
       "\n",
       "     train-mlogloss-std  \n",
       "0              0.000122  \n",
       "1              0.000302  \n",
       "2              0.000284  \n",
       "3              0.000370  \n",
       "4              0.000457  \n",
       "5              0.000540  \n",
       "6              0.000676  \n",
       "7              0.000797  \n",
       "8              0.000865  \n",
       "9              0.000921  \n",
       "10             0.001027  \n",
       "11             0.000941  \n",
       "12             0.000984  \n",
       "13             0.000997  \n",
       "14             0.001170  \n",
       "15             0.001124  \n",
       "16             0.001077  \n",
       "17             0.001123  \n",
       "18             0.001196  \n",
       "19             0.001235  \n",
       "20             0.001311  \n",
       "21             0.001311  \n",
       "22             0.001330  \n",
       "23             0.001400  \n",
       "24             0.001370  \n",
       "25             0.001293  \n",
       "26             0.001408  \n",
       "27             0.001339  \n",
       "28             0.001301  \n",
       "29             0.001353  \n",
       "..                  ...  \n",
       "713            0.001424  \n",
       "714            0.001436  \n",
       "715            0.001452  \n",
       "716            0.001480  \n",
       "717            0.001520  \n",
       "718            0.001500  \n",
       "719            0.001519  \n",
       "720            0.001500  \n",
       "721            0.001497  \n",
       "722            0.001476  \n",
       "723            0.001501  \n",
       "724            0.001524  \n",
       "725            0.001523  \n",
       "726            0.001523  \n",
       "727            0.001509  \n",
       "728            0.001531  \n",
       "729            0.001565  \n",
       "730            0.001583  \n",
       "731            0.001606  \n",
       "732            0.001605  \n",
       "733            0.001640  \n",
       "734            0.001645  \n",
       "735            0.001648  \n",
       "736            0.001639  \n",
       "737            0.001592  \n",
       "738            0.001544  \n",
       "739            0.001520  \n",
       "740            0.001526  \n",
       "741            0.001501  \n",
       "742            0.001500  \n",
       "\n",
       "[743 rows x 4 columns]"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "tmp_parameter = param_default.copy()\n",
    "tmp_parameter.update(searchResult_reg[0][1])\n",
    "\n",
    "kfold = StratifiedKFold(n_splits=5, shuffle=True, random_state=3)\n",
    "cv_result = xgb.cv(tmp_parameter, data_train_dmatrix, folds =kfold, num_boost_round = 2000, early_stopping_rounds = 100, metrics='mlogloss')\n",
    "n_estimators = cv_result.shape[0]\n",
    "print('best n_estimators ', n_estimators)\n",
    "cv_result"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 48,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'eta': 0.1}   min test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'eta': 0.2}   min test_score :  0.5850523333333333\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'eta': 0.30000000000000004}   min test_score :  0.5859163333333334\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'eta': 0.4}   min test_score :  0.5859386666666667\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'eta': 0.5}   min test_score :  0.5875706666666667\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'eta': 0.6}   min test_score :  0.5888203333333334\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'eta': 0.7000000000000001}   min test_score :  0.5895493333333334\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'eta': 0.8}   min test_score :  0.5926633333333333\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'eta': 0.9}   min test_score :  0.594459\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'eta': 1.0}   min test_score :  0.59463\n",
      "{'silent': 0, 'eta': 0.1, 'objective': 'multi:softmax', 'eval_metric': 'mlogloss', 'num_class': 3, 'nthread': 7, 'tree_method': 'gpu_exact', 'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0}\n",
      "Training Time: 350.03132128715515 seconds\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>test-mlogloss-mean</th>\n",
       "      <th>test-mlogloss-std</th>\n",
       "      <th>train-mlogloss-mean</th>\n",
       "      <th>train-mlogloss-std</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.042925</td>\n",
       "      <td>0.000234</td>\n",
       "      <td>1.042385</td>\n",
       "      <td>0.000122</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.995595</td>\n",
       "      <td>0.000295</td>\n",
       "      <td>0.994601</td>\n",
       "      <td>0.000302</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.954940</td>\n",
       "      <td>0.000554</td>\n",
       "      <td>0.953415</td>\n",
       "      <td>0.000284</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.919797</td>\n",
       "      <td>0.000740</td>\n",
       "      <td>0.917735</td>\n",
       "      <td>0.000370</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.889200</td>\n",
       "      <td>0.000940</td>\n",
       "      <td>0.886591</td>\n",
       "      <td>0.000457</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>0.862461</td>\n",
       "      <td>0.001007</td>\n",
       "      <td>0.859392</td>\n",
       "      <td>0.000540</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>0.838936</td>\n",
       "      <td>0.001091</td>\n",
       "      <td>0.835406</td>\n",
       "      <td>0.000676</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>0.818252</td>\n",
       "      <td>0.001186</td>\n",
       "      <td>0.814258</td>\n",
       "      <td>0.000797</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>0.799962</td>\n",
       "      <td>0.001276</td>\n",
       "      <td>0.795517</td>\n",
       "      <td>0.000865</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>0.783708</td>\n",
       "      <td>0.001287</td>\n",
       "      <td>0.778825</td>\n",
       "      <td>0.000921</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>0.769308</td>\n",
       "      <td>0.001258</td>\n",
       "      <td>0.764012</td>\n",
       "      <td>0.001027</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>0.756213</td>\n",
       "      <td>0.001349</td>\n",
       "      <td>0.750525</td>\n",
       "      <td>0.000941</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>0.744660</td>\n",
       "      <td>0.001365</td>\n",
       "      <td>0.738561</td>\n",
       "      <td>0.000984</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>0.734309</td>\n",
       "      <td>0.001315</td>\n",
       "      <td>0.727767</td>\n",
       "      <td>0.000997</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>0.725029</td>\n",
       "      <td>0.001256</td>\n",
       "      <td>0.718074</td>\n",
       "      <td>0.001170</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>0.716714</td>\n",
       "      <td>0.001316</td>\n",
       "      <td>0.709357</td>\n",
       "      <td>0.001124</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>0.709220</td>\n",
       "      <td>0.001464</td>\n",
       "      <td>0.701495</td>\n",
       "      <td>0.001077</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>0.702427</td>\n",
       "      <td>0.001454</td>\n",
       "      <td>0.694344</td>\n",
       "      <td>0.001123</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>0.696217</td>\n",
       "      <td>0.001382</td>\n",
       "      <td>0.687805</td>\n",
       "      <td>0.001196</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>0.690596</td>\n",
       "      <td>0.001368</td>\n",
       "      <td>0.681821</td>\n",
       "      <td>0.001235</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20</th>\n",
       "      <td>0.685498</td>\n",
       "      <td>0.001306</td>\n",
       "      <td>0.676395</td>\n",
       "      <td>0.001311</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>21</th>\n",
       "      <td>0.680826</td>\n",
       "      <td>0.001398</td>\n",
       "      <td>0.671293</td>\n",
       "      <td>0.001311</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22</th>\n",
       "      <td>0.676539</td>\n",
       "      <td>0.001420</td>\n",
       "      <td>0.666732</td>\n",
       "      <td>0.001330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>23</th>\n",
       "      <td>0.672664</td>\n",
       "      <td>0.001411</td>\n",
       "      <td>0.662544</td>\n",
       "      <td>0.001400</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24</th>\n",
       "      <td>0.669183</td>\n",
       "      <td>0.001408</td>\n",
       "      <td>0.658687</td>\n",
       "      <td>0.001370</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>0.665934</td>\n",
       "      <td>0.001387</td>\n",
       "      <td>0.655106</td>\n",
       "      <td>0.001293</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>26</th>\n",
       "      <td>0.662950</td>\n",
       "      <td>0.001368</td>\n",
       "      <td>0.651760</td>\n",
       "      <td>0.001408</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>27</th>\n",
       "      <td>0.660091</td>\n",
       "      <td>0.001513</td>\n",
       "      <td>0.648586</td>\n",
       "      <td>0.001339</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>0.657548</td>\n",
       "      <td>0.001507</td>\n",
       "      <td>0.645699</td>\n",
       "      <td>0.001301</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29</th>\n",
       "      <td>0.655063</td>\n",
       "      <td>0.001415</td>\n",
       "      <td>0.642957</td>\n",
       "      <td>0.001353</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>713</th>\n",
       "      <td>0.584250</td>\n",
       "      <td>0.001509</td>\n",
       "      <td>0.474831</td>\n",
       "      <td>0.001424</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>714</th>\n",
       "      <td>0.584247</td>\n",
       "      <td>0.001521</td>\n",
       "      <td>0.474725</td>\n",
       "      <td>0.001436</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>715</th>\n",
       "      <td>0.584245</td>\n",
       "      <td>0.001526</td>\n",
       "      <td>0.474635</td>\n",
       "      <td>0.001452</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>716</th>\n",
       "      <td>0.584243</td>\n",
       "      <td>0.001527</td>\n",
       "      <td>0.474552</td>\n",
       "      <td>0.001480</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>717</th>\n",
       "      <td>0.584244</td>\n",
       "      <td>0.001523</td>\n",
       "      <td>0.474444</td>\n",
       "      <td>0.001520</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>718</th>\n",
       "      <td>0.584246</td>\n",
       "      <td>0.001519</td>\n",
       "      <td>0.474392</td>\n",
       "      <td>0.001500</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>719</th>\n",
       "      <td>0.584246</td>\n",
       "      <td>0.001507</td>\n",
       "      <td>0.474271</td>\n",
       "      <td>0.001519</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>720</th>\n",
       "      <td>0.584251</td>\n",
       "      <td>0.001501</td>\n",
       "      <td>0.474173</td>\n",
       "      <td>0.001500</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>721</th>\n",
       "      <td>0.584236</td>\n",
       "      <td>0.001499</td>\n",
       "      <td>0.474102</td>\n",
       "      <td>0.001497</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>722</th>\n",
       "      <td>0.584233</td>\n",
       "      <td>0.001491</td>\n",
       "      <td>0.473991</td>\n",
       "      <td>0.001476</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>723</th>\n",
       "      <td>0.584224</td>\n",
       "      <td>0.001476</td>\n",
       "      <td>0.473874</td>\n",
       "      <td>0.001501</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>724</th>\n",
       "      <td>0.584221</td>\n",
       "      <td>0.001458</td>\n",
       "      <td>0.473785</td>\n",
       "      <td>0.001524</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>725</th>\n",
       "      <td>0.584236</td>\n",
       "      <td>0.001457</td>\n",
       "      <td>0.473679</td>\n",
       "      <td>0.001523</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>726</th>\n",
       "      <td>0.584232</td>\n",
       "      <td>0.001448</td>\n",
       "      <td>0.473554</td>\n",
       "      <td>0.001523</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>727</th>\n",
       "      <td>0.584232</td>\n",
       "      <td>0.001444</td>\n",
       "      <td>0.473440</td>\n",
       "      <td>0.001509</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>728</th>\n",
       "      <td>0.584222</td>\n",
       "      <td>0.001445</td>\n",
       "      <td>0.473368</td>\n",
       "      <td>0.001531</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>729</th>\n",
       "      <td>0.584237</td>\n",
       "      <td>0.001449</td>\n",
       "      <td>0.473288</td>\n",
       "      <td>0.001565</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>730</th>\n",
       "      <td>0.584238</td>\n",
       "      <td>0.001448</td>\n",
       "      <td>0.473219</td>\n",
       "      <td>0.001583</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>731</th>\n",
       "      <td>0.584240</td>\n",
       "      <td>0.001449</td>\n",
       "      <td>0.473132</td>\n",
       "      <td>0.001606</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>732</th>\n",
       "      <td>0.584234</td>\n",
       "      <td>0.001445</td>\n",
       "      <td>0.473043</td>\n",
       "      <td>0.001605</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>733</th>\n",
       "      <td>0.584237</td>\n",
       "      <td>0.001446</td>\n",
       "      <td>0.472958</td>\n",
       "      <td>0.001640</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>734</th>\n",
       "      <td>0.584233</td>\n",
       "      <td>0.001449</td>\n",
       "      <td>0.472890</td>\n",
       "      <td>0.001645</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>735</th>\n",
       "      <td>0.584232</td>\n",
       "      <td>0.001445</td>\n",
       "      <td>0.472833</td>\n",
       "      <td>0.001648</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>736</th>\n",
       "      <td>0.584225</td>\n",
       "      <td>0.001444</td>\n",
       "      <td>0.472781</td>\n",
       "      <td>0.001639</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>737</th>\n",
       "      <td>0.584225</td>\n",
       "      <td>0.001442</td>\n",
       "      <td>0.472688</td>\n",
       "      <td>0.001592</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>738</th>\n",
       "      <td>0.584210</td>\n",
       "      <td>0.001438</td>\n",
       "      <td>0.472617</td>\n",
       "      <td>0.001544</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>739</th>\n",
       "      <td>0.584215</td>\n",
       "      <td>0.001434</td>\n",
       "      <td>0.472543</td>\n",
       "      <td>0.001520</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>740</th>\n",
       "      <td>0.584209</td>\n",
       "      <td>0.001425</td>\n",
       "      <td>0.472451</td>\n",
       "      <td>0.001526</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>741</th>\n",
       "      <td>0.584207</td>\n",
       "      <td>0.001424</td>\n",
       "      <td>0.472388</td>\n",
       "      <td>0.001501</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>742</th>\n",
       "      <td>0.584196</td>\n",
       "      <td>0.001415</td>\n",
       "      <td>0.472310</td>\n",
       "      <td>0.001500</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>743 rows × 4 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "     test-mlogloss-mean  test-mlogloss-std  train-mlogloss-mean  \\\n",
       "0              1.042925           0.000234             1.042385   \n",
       "1              0.995595           0.000295             0.994601   \n",
       "2              0.954940           0.000554             0.953415   \n",
       "3              0.919797           0.000740             0.917735   \n",
       "4              0.889200           0.000940             0.886591   \n",
       "5              0.862461           0.001007             0.859392   \n",
       "6              0.838936           0.001091             0.835406   \n",
       "7              0.818252           0.001186             0.814258   \n",
       "8              0.799962           0.001276             0.795517   \n",
       "9              0.783708           0.001287             0.778825   \n",
       "10             0.769308           0.001258             0.764012   \n",
       "11             0.756213           0.001349             0.750525   \n",
       "12             0.744660           0.001365             0.738561   \n",
       "13             0.734309           0.001315             0.727767   \n",
       "14             0.725029           0.001256             0.718074   \n",
       "15             0.716714           0.001316             0.709357   \n",
       "16             0.709220           0.001464             0.701495   \n",
       "17             0.702427           0.001454             0.694344   \n",
       "18             0.696217           0.001382             0.687805   \n",
       "19             0.690596           0.001368             0.681821   \n",
       "20             0.685498           0.001306             0.676395   \n",
       "21             0.680826           0.001398             0.671293   \n",
       "22             0.676539           0.001420             0.666732   \n",
       "23             0.672664           0.001411             0.662544   \n",
       "24             0.669183           0.001408             0.658687   \n",
       "25             0.665934           0.001387             0.655106   \n",
       "26             0.662950           0.001368             0.651760   \n",
       "27             0.660091           0.001513             0.648586   \n",
       "28             0.657548           0.001507             0.645699   \n",
       "29             0.655063           0.001415             0.642957   \n",
       "..                  ...                ...                  ...   \n",
       "713            0.584250           0.001509             0.474831   \n",
       "714            0.584247           0.001521             0.474725   \n",
       "715            0.584245           0.001526             0.474635   \n",
       "716            0.584243           0.001527             0.474552   \n",
       "717            0.584244           0.001523             0.474444   \n",
       "718            0.584246           0.001519             0.474392   \n",
       "719            0.584246           0.001507             0.474271   \n",
       "720            0.584251           0.001501             0.474173   \n",
       "721            0.584236           0.001499             0.474102   \n",
       "722            0.584233           0.001491             0.473991   \n",
       "723            0.584224           0.001476             0.473874   \n",
       "724            0.584221           0.001458             0.473785   \n",
       "725            0.584236           0.001457             0.473679   \n",
       "726            0.584232           0.001448             0.473554   \n",
       "727            0.584232           0.001444             0.473440   \n",
       "728            0.584222           0.001445             0.473368   \n",
       "729            0.584237           0.001449             0.473288   \n",
       "730            0.584238           0.001448             0.473219   \n",
       "731            0.584240           0.001449             0.473132   \n",
       "732            0.584234           0.001445             0.473043   \n",
       "733            0.584237           0.001446             0.472958   \n",
       "734            0.584233           0.001449             0.472890   \n",
       "735            0.584232           0.001445             0.472833   \n",
       "736            0.584225           0.001444             0.472781   \n",
       "737            0.584225           0.001442             0.472688   \n",
       "738            0.584210           0.001438             0.472617   \n",
       "739            0.584215           0.001434             0.472543   \n",
       "740            0.584209           0.001425             0.472451   \n",
       "741            0.584207           0.001424             0.472388   \n",
       "742            0.584196           0.001415             0.472310   \n",
       "\n",
       "     train-mlogloss-std  \n",
       "0              0.000122  \n",
       "1              0.000302  \n",
       "2              0.000284  \n",
       "3              0.000370  \n",
       "4              0.000457  \n",
       "5              0.000540  \n",
       "6              0.000676  \n",
       "7              0.000797  \n",
       "8              0.000865  \n",
       "9              0.000921  \n",
       "10             0.001027  \n",
       "11             0.000941  \n",
       "12             0.000984  \n",
       "13             0.000997  \n",
       "14             0.001170  \n",
       "15             0.001124  \n",
       "16             0.001077  \n",
       "17             0.001123  \n",
       "18             0.001196  \n",
       "19             0.001235  \n",
       "20             0.001311  \n",
       "21             0.001311  \n",
       "22             0.001330  \n",
       "23             0.001400  \n",
       "24             0.001370  \n",
       "25             0.001293  \n",
       "26             0.001408  \n",
       "27             0.001339  \n",
       "28             0.001301  \n",
       "29             0.001353  \n",
       "..                  ...  \n",
       "713            0.001424  \n",
       "714            0.001436  \n",
       "715            0.001452  \n",
       "716            0.001480  \n",
       "717            0.001520  \n",
       "718            0.001500  \n",
       "719            0.001519  \n",
       "720            0.001500  \n",
       "721            0.001497  \n",
       "722            0.001476  \n",
       "723            0.001501  \n",
       "724            0.001524  \n",
       "725            0.001523  \n",
       "726            0.001523  \n",
       "727            0.001509  \n",
       "728            0.001531  \n",
       "729            0.001565  \n",
       "730            0.001583  \n",
       "731            0.001606  \n",
       "732            0.001605  \n",
       "733            0.001640  \n",
       "734            0.001645  \n",
       "735            0.001648  \n",
       "736            0.001639  \n",
       "737            0.001592  \n",
       "738            0.001544  \n",
       "739            0.001520  \n",
       "740            0.001526  \n",
       "741            0.001501  \n",
       "742            0.001500  \n",
       "\n",
       "[743 rows x 4 columns]"
      ]
     },
     "execution_count": 48,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "tmp = time.time()\n",
    "eta = list(np.linspace(0.1, 1, 10))\n",
    "param_search_eta = {\n",
    "    'max_depth': [ searchResult_depth_weight[0][1]['max_depth'] ],  #use last select parameter\n",
    "    'min_child_weight': [ searchResult_depth_weight[0][1]['min_child_weight'] ], #use last select parameter\n",
    "    'reg_alpha': [ searchResult_reg[0][1]['reg_alpha'] ],\n",
    "    'reg_lambda': [ searchResult_reg[0][1]['reg_lambda'] ],\n",
    "    'eta': eta,\n",
    "}\n",
    "\n",
    "searchResult_eta = SearchUseXgboostNative(param_search_eta, data_train_dmatrix)\n",
    "searchResult_eta.sort(key = lambda x: x[0]['test-mlogloss-mean'].min())\n",
    "print(searchResult_eta[0][1])\n",
    "print(\"Training Time: %s seconds\" % (str(time.time() - tmp)))\n",
    "searchResult_eta[0][0]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "5. 行列重采样参数调整\n",
    "------------------"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 0.3, 'colsample_bytree': 0.3} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 0.3, 'colsample_bytree': 0.5333333333333333} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 0.3, 'colsample_bytree': 0.7666666666666666} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 0.3, 'colsample_bytree': 1.0} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 0.5333333333333333, 'colsample_bytree': 0.3} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 0.5333333333333333, 'colsample_bytree': 0.5333333333333333} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 0.5333333333333333, 'colsample_bytree': 0.7666666666666666} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 0.5333333333333333, 'colsample_bytree': 1.0} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 0.7666666666666666, 'colsample_bytree': 0.3} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 0.7666666666666666, 'colsample_bytree': 0.5333333333333333} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 0.7666666666666666, 'colsample_bytree': 0.7666666666666666} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 0.7666666666666666, 'colsample_bytree': 1.0} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 1.0, 'colsample_bytree': 0.3} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 1.0, 'colsample_bytree': 0.5333333333333333} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 1.0, 'colsample_bytree': 0.7666666666666666} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 1.0, 'colsample_bytree': 1.0} estimaters： 743 cv_resultmin test_score :  0.584196\n",
      "{'silent': 0, 'eta': 0.1, 'objective': 'multi:softmax', 'eval_metric': 'mlogloss', 'num_class': 3, 'nthread': 7, 'tree_method': 'gpu_exact', 'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 0.3, 'colsample_bytree': 0.3}\n",
      "Training Time: 1653.5483856201172 seconds\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>test-mlogloss-mean</th>\n",
       "      <th>test-mlogloss-std</th>\n",
       "      <th>train-mlogloss-mean</th>\n",
       "      <th>train-mlogloss-std</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.042925</td>\n",
       "      <td>0.000234</td>\n",
       "      <td>1.042385</td>\n",
       "      <td>0.000122</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.995595</td>\n",
       "      <td>0.000295</td>\n",
       "      <td>0.994601</td>\n",
       "      <td>0.000302</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.954940</td>\n",
       "      <td>0.000554</td>\n",
       "      <td>0.953415</td>\n",
       "      <td>0.000284</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.919797</td>\n",
       "      <td>0.000740</td>\n",
       "      <td>0.917735</td>\n",
       "      <td>0.000370</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.889200</td>\n",
       "      <td>0.000940</td>\n",
       "      <td>0.886591</td>\n",
       "      <td>0.000457</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>0.862461</td>\n",
       "      <td>0.001007</td>\n",
       "      <td>0.859392</td>\n",
       "      <td>0.000540</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>0.838936</td>\n",
       "      <td>0.001091</td>\n",
       "      <td>0.835406</td>\n",
       "      <td>0.000676</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>0.818252</td>\n",
       "      <td>0.001186</td>\n",
       "      <td>0.814258</td>\n",
       "      <td>0.000797</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>0.799962</td>\n",
       "      <td>0.001276</td>\n",
       "      <td>0.795517</td>\n",
       "      <td>0.000865</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>0.783708</td>\n",
       "      <td>0.001287</td>\n",
       "      <td>0.778825</td>\n",
       "      <td>0.000921</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>0.769308</td>\n",
       "      <td>0.001258</td>\n",
       "      <td>0.764012</td>\n",
       "      <td>0.001027</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>0.756213</td>\n",
       "      <td>0.001349</td>\n",
       "      <td>0.750525</td>\n",
       "      <td>0.000941</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>0.744660</td>\n",
       "      <td>0.001365</td>\n",
       "      <td>0.738561</td>\n",
       "      <td>0.000984</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>0.734309</td>\n",
       "      <td>0.001315</td>\n",
       "      <td>0.727767</td>\n",
       "      <td>0.000997</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>0.725029</td>\n",
       "      <td>0.001256</td>\n",
       "      <td>0.718074</td>\n",
       "      <td>0.001170</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>0.716714</td>\n",
       "      <td>0.001316</td>\n",
       "      <td>0.709357</td>\n",
       "      <td>0.001124</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>0.709220</td>\n",
       "      <td>0.001464</td>\n",
       "      <td>0.701495</td>\n",
       "      <td>0.001077</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>0.702427</td>\n",
       "      <td>0.001454</td>\n",
       "      <td>0.694344</td>\n",
       "      <td>0.001123</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>0.696217</td>\n",
       "      <td>0.001382</td>\n",
       "      <td>0.687805</td>\n",
       "      <td>0.001196</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>0.690596</td>\n",
       "      <td>0.001368</td>\n",
       "      <td>0.681821</td>\n",
       "      <td>0.001235</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20</th>\n",
       "      <td>0.685498</td>\n",
       "      <td>0.001306</td>\n",
       "      <td>0.676395</td>\n",
       "      <td>0.001311</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>21</th>\n",
       "      <td>0.680826</td>\n",
       "      <td>0.001398</td>\n",
       "      <td>0.671293</td>\n",
       "      <td>0.001311</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22</th>\n",
       "      <td>0.676539</td>\n",
       "      <td>0.001420</td>\n",
       "      <td>0.666732</td>\n",
       "      <td>0.001330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>23</th>\n",
       "      <td>0.672664</td>\n",
       "      <td>0.001411</td>\n",
       "      <td>0.662544</td>\n",
       "      <td>0.001400</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24</th>\n",
       "      <td>0.669183</td>\n",
       "      <td>0.001408</td>\n",
       "      <td>0.658687</td>\n",
       "      <td>0.001370</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>0.665934</td>\n",
       "      <td>0.001387</td>\n",
       "      <td>0.655106</td>\n",
       "      <td>0.001293</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>26</th>\n",
       "      <td>0.662950</td>\n",
       "      <td>0.001368</td>\n",
       "      <td>0.651760</td>\n",
       "      <td>0.001408</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>27</th>\n",
       "      <td>0.660091</td>\n",
       "      <td>0.001513</td>\n",
       "      <td>0.648586</td>\n",
       "      <td>0.001339</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>0.657548</td>\n",
       "      <td>0.001507</td>\n",
       "      <td>0.645699</td>\n",
       "      <td>0.001301</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29</th>\n",
       "      <td>0.655063</td>\n",
       "      <td>0.001415</td>\n",
       "      <td>0.642957</td>\n",
       "      <td>0.001353</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>713</th>\n",
       "      <td>0.584250</td>\n",
       "      <td>0.001509</td>\n",
       "      <td>0.474831</td>\n",
       "      <td>0.001424</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>714</th>\n",
       "      <td>0.584247</td>\n",
       "      <td>0.001521</td>\n",
       "      <td>0.474725</td>\n",
       "      <td>0.001436</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>715</th>\n",
       "      <td>0.584245</td>\n",
       "      <td>0.001526</td>\n",
       "      <td>0.474635</td>\n",
       "      <td>0.001452</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>716</th>\n",
       "      <td>0.584243</td>\n",
       "      <td>0.001527</td>\n",
       "      <td>0.474552</td>\n",
       "      <td>0.001480</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>717</th>\n",
       "      <td>0.584244</td>\n",
       "      <td>0.001523</td>\n",
       "      <td>0.474444</td>\n",
       "      <td>0.001520</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>718</th>\n",
       "      <td>0.584246</td>\n",
       "      <td>0.001519</td>\n",
       "      <td>0.474392</td>\n",
       "      <td>0.001500</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>719</th>\n",
       "      <td>0.584246</td>\n",
       "      <td>0.001507</td>\n",
       "      <td>0.474271</td>\n",
       "      <td>0.001519</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>720</th>\n",
       "      <td>0.584251</td>\n",
       "      <td>0.001501</td>\n",
       "      <td>0.474173</td>\n",
       "      <td>0.001500</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>721</th>\n",
       "      <td>0.584236</td>\n",
       "      <td>0.001499</td>\n",
       "      <td>0.474102</td>\n",
       "      <td>0.001497</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>722</th>\n",
       "      <td>0.584233</td>\n",
       "      <td>0.001491</td>\n",
       "      <td>0.473991</td>\n",
       "      <td>0.001476</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>723</th>\n",
       "      <td>0.584224</td>\n",
       "      <td>0.001476</td>\n",
       "      <td>0.473874</td>\n",
       "      <td>0.001501</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>724</th>\n",
       "      <td>0.584221</td>\n",
       "      <td>0.001458</td>\n",
       "      <td>0.473785</td>\n",
       "      <td>0.001524</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>725</th>\n",
       "      <td>0.584236</td>\n",
       "      <td>0.001457</td>\n",
       "      <td>0.473679</td>\n",
       "      <td>0.001523</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>726</th>\n",
       "      <td>0.584232</td>\n",
       "      <td>0.001448</td>\n",
       "      <td>0.473554</td>\n",
       "      <td>0.001523</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>727</th>\n",
       "      <td>0.584232</td>\n",
       "      <td>0.001444</td>\n",
       "      <td>0.473440</td>\n",
       "      <td>0.001509</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>728</th>\n",
       "      <td>0.584222</td>\n",
       "      <td>0.001445</td>\n",
       "      <td>0.473368</td>\n",
       "      <td>0.001531</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>729</th>\n",
       "      <td>0.584237</td>\n",
       "      <td>0.001449</td>\n",
       "      <td>0.473288</td>\n",
       "      <td>0.001565</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>730</th>\n",
       "      <td>0.584238</td>\n",
       "      <td>0.001448</td>\n",
       "      <td>0.473219</td>\n",
       "      <td>0.001583</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>731</th>\n",
       "      <td>0.584240</td>\n",
       "      <td>0.001449</td>\n",
       "      <td>0.473132</td>\n",
       "      <td>0.001606</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>732</th>\n",
       "      <td>0.584234</td>\n",
       "      <td>0.001445</td>\n",
       "      <td>0.473043</td>\n",
       "      <td>0.001605</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>733</th>\n",
       "      <td>0.584237</td>\n",
       "      <td>0.001446</td>\n",
       "      <td>0.472958</td>\n",
       "      <td>0.001640</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>734</th>\n",
       "      <td>0.584233</td>\n",
       "      <td>0.001449</td>\n",
       "      <td>0.472890</td>\n",
       "      <td>0.001645</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>735</th>\n",
       "      <td>0.584232</td>\n",
       "      <td>0.001445</td>\n",
       "      <td>0.472833</td>\n",
       "      <td>0.001648</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>736</th>\n",
       "      <td>0.584225</td>\n",
       "      <td>0.001444</td>\n",
       "      <td>0.472781</td>\n",
       "      <td>0.001639</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>737</th>\n",
       "      <td>0.584225</td>\n",
       "      <td>0.001442</td>\n",
       "      <td>0.472688</td>\n",
       "      <td>0.001592</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>738</th>\n",
       "      <td>0.584210</td>\n",
       "      <td>0.001438</td>\n",
       "      <td>0.472617</td>\n",
       "      <td>0.001544</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>739</th>\n",
       "      <td>0.584215</td>\n",
       "      <td>0.001434</td>\n",
       "      <td>0.472543</td>\n",
       "      <td>0.001520</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>740</th>\n",
       "      <td>0.584209</td>\n",
       "      <td>0.001425</td>\n",
       "      <td>0.472451</td>\n",
       "      <td>0.001526</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>741</th>\n",
       "      <td>0.584207</td>\n",
       "      <td>0.001424</td>\n",
       "      <td>0.472388</td>\n",
       "      <td>0.001501</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>742</th>\n",
       "      <td>0.584196</td>\n",
       "      <td>0.001415</td>\n",
       "      <td>0.472310</td>\n",
       "      <td>0.001500</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>743 rows × 4 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "     test-mlogloss-mean  test-mlogloss-std  train-mlogloss-mean  \\\n",
       "0              1.042925           0.000234             1.042385   \n",
       "1              0.995595           0.000295             0.994601   \n",
       "2              0.954940           0.000554             0.953415   \n",
       "3              0.919797           0.000740             0.917735   \n",
       "4              0.889200           0.000940             0.886591   \n",
       "5              0.862461           0.001007             0.859392   \n",
       "6              0.838936           0.001091             0.835406   \n",
       "7              0.818252           0.001186             0.814258   \n",
       "8              0.799962           0.001276             0.795517   \n",
       "9              0.783708           0.001287             0.778825   \n",
       "10             0.769308           0.001258             0.764012   \n",
       "11             0.756213           0.001349             0.750525   \n",
       "12             0.744660           0.001365             0.738561   \n",
       "13             0.734309           0.001315             0.727767   \n",
       "14             0.725029           0.001256             0.718074   \n",
       "15             0.716714           0.001316             0.709357   \n",
       "16             0.709220           0.001464             0.701495   \n",
       "17             0.702427           0.001454             0.694344   \n",
       "18             0.696217           0.001382             0.687805   \n",
       "19             0.690596           0.001368             0.681821   \n",
       "20             0.685498           0.001306             0.676395   \n",
       "21             0.680826           0.001398             0.671293   \n",
       "22             0.676539           0.001420             0.666732   \n",
       "23             0.672664           0.001411             0.662544   \n",
       "24             0.669183           0.001408             0.658687   \n",
       "25             0.665934           0.001387             0.655106   \n",
       "26             0.662950           0.001368             0.651760   \n",
       "27             0.660091           0.001513             0.648586   \n",
       "28             0.657548           0.001507             0.645699   \n",
       "29             0.655063           0.001415             0.642957   \n",
       "..                  ...                ...                  ...   \n",
       "713            0.584250           0.001509             0.474831   \n",
       "714            0.584247           0.001521             0.474725   \n",
       "715            0.584245           0.001526             0.474635   \n",
       "716            0.584243           0.001527             0.474552   \n",
       "717            0.584244           0.001523             0.474444   \n",
       "718            0.584246           0.001519             0.474392   \n",
       "719            0.584246           0.001507             0.474271   \n",
       "720            0.584251           0.001501             0.474173   \n",
       "721            0.584236           0.001499             0.474102   \n",
       "722            0.584233           0.001491             0.473991   \n",
       "723            0.584224           0.001476             0.473874   \n",
       "724            0.584221           0.001458             0.473785   \n",
       "725            0.584236           0.001457             0.473679   \n",
       "726            0.584232           0.001448             0.473554   \n",
       "727            0.584232           0.001444             0.473440   \n",
       "728            0.584222           0.001445             0.473368   \n",
       "729            0.584237           0.001449             0.473288   \n",
       "730            0.584238           0.001448             0.473219   \n",
       "731            0.584240           0.001449             0.473132   \n",
       "732            0.584234           0.001445             0.473043   \n",
       "733            0.584237           0.001446             0.472958   \n",
       "734            0.584233           0.001449             0.472890   \n",
       "735            0.584232           0.001445             0.472833   \n",
       "736            0.584225           0.001444             0.472781   \n",
       "737            0.584225           0.001442             0.472688   \n",
       "738            0.584210           0.001438             0.472617   \n",
       "739            0.584215           0.001434             0.472543   \n",
       "740            0.584209           0.001425             0.472451   \n",
       "741            0.584207           0.001424             0.472388   \n",
       "742            0.584196           0.001415             0.472310   \n",
       "\n",
       "     train-mlogloss-std  \n",
       "0              0.000122  \n",
       "1              0.000302  \n",
       "2              0.000284  \n",
       "3              0.000370  \n",
       "4              0.000457  \n",
       "5              0.000540  \n",
       "6              0.000676  \n",
       "7              0.000797  \n",
       "8              0.000865  \n",
       "9              0.000921  \n",
       "10             0.001027  \n",
       "11             0.000941  \n",
       "12             0.000984  \n",
       "13             0.000997  \n",
       "14             0.001170  \n",
       "15             0.001124  \n",
       "16             0.001077  \n",
       "17             0.001123  \n",
       "18             0.001196  \n",
       "19             0.001235  \n",
       "20             0.001311  \n",
       "21             0.001311  \n",
       "22             0.001330  \n",
       "23             0.001400  \n",
       "24             0.001370  \n",
       "25             0.001293  \n",
       "26             0.001408  \n",
       "27             0.001339  \n",
       "28             0.001301  \n",
       "29             0.001353  \n",
       "..                  ...  \n",
       "713            0.001424  \n",
       "714            0.001436  \n",
       "715            0.001452  \n",
       "716            0.001480  \n",
       "717            0.001520  \n",
       "718            0.001500  \n",
       "719            0.001519  \n",
       "720            0.001500  \n",
       "721            0.001497  \n",
       "722            0.001476  \n",
       "723            0.001501  \n",
       "724            0.001524  \n",
       "725            0.001523  \n",
       "726            0.001523  \n",
       "727            0.001509  \n",
       "728            0.001531  \n",
       "729            0.001565  \n",
       "730            0.001583  \n",
       "731            0.001606  \n",
       "732            0.001605  \n",
       "733            0.001640  \n",
       "734            0.001645  \n",
       "735            0.001648  \n",
       "736            0.001639  \n",
       "737            0.001592  \n",
       "738            0.001544  \n",
       "739            0.001520  \n",
       "740            0.001526  \n",
       "741            0.001501  \n",
       "742            0.001500  \n",
       "\n",
       "[743 rows x 4 columns]"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "tmp = time.time()\n",
    "subsample = list(np.linspace(0.3, 1, 4))\n",
    "colsample_bytree = list(np.linspace(0.3, 1, 4))\n",
    "param_search_sample = {\n",
    "    'max_depth': [ searchResult_depth_weight[0][1]['max_depth'] ],  #use last select parameter\n",
    "    'min_child_weight': [ searchResult_depth_weight[0][1]['min_child_weight'] ], #use last select parameter\n",
    "    'reg_alpha': [ searchResult_reg[0][1]['reg_alpha'] ],\n",
    "    'reg_lambda': [ searchResult_reg[0][1]['reg_lambda'] ],\n",
    "    'subsample': subsample,\n",
    "    'colsample_bytree': colsample_bytree,\n",
    "}\n",
    "\n",
    "searchResult_sample = SearchUseXgboostNative(param_search_sample, data_train_dmatrix)\n",
    "searchResult_sample.sort(key = lambda x: x[0]['test-mlogloss-mean'].min())\n",
    "print(searchResult_sample[0][1])\n",
    "print(\"Training Time: %s seconds\" % (str(time.time() - tmp)))\n",
    "searchResult_sample[0][0]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "6. 调用模型进行测试\n",
    "----------------"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "best parameter:  {'silent': 0, 'eta': 0.1, 'objective': 'multi:softmax', 'eval_metric': 'mlogloss', 'num_class': 3, 'nthread': 7, 'tree_method': 'gpu_exact', 'max_depth': 5, 'min_child_weight': 3, 'reg_alpha': 0.01, 'reg_lambda': 100.0, 'subsample': 0.3, 'colsample_bytree': 0.3}\n"
     ]
    }
   ],
   "source": [
    "#use best parameter to train a model\n",
    "best_parameter = param_default.copy()\n",
    "best_parameter.update(searchResult_sample[0][1])\n",
    "n_estimators = searchResult_sample[0][0].shape[0]\n",
    "print('best parameter: ', best_parameter)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "# this parameter we find last from last search, we can use this in case program crash\n",
    "best_parameter = {'silent': 0, \n",
    "                  'eta': 0.1, \n",
    "                  'objective': 'multi:softmax', \n",
    "                  'eval_metric': 'mlogloss', \n",
    "                  'num_class': 3, \n",
    "                  'nthread': 7, \n",
    "                  'tree_method': 'gpu_exact', \n",
    "                  'max_depth': 5, \n",
    "                  'min_child_weight': 3, \n",
    "                  'reg_alpha': 0.01, \n",
    "                  'reg_lambda': 100.0, \n",
    "                  'subsample': 0.3, \n",
    "                  'colsample_bytree': 0.3}\n",
    "n_estimators = 743\n",
    "\n",
    "# xgb_best = XGBClassifier(\n",
    "#         learning_rate = best_parameter['eta'],\n",
    "#         n_jobs = best_parameter['nthread'],\n",
    "#         tree_method = best_parameter['tree_method'],\n",
    "#         objective= best_parameter['objective'],\n",
    "#         num_class = best_parameter['num_class'],\n",
    "#         n_estimators=n_estimators,\n",
    "#         max_depth=best_parameter['max_depth'],\n",
    "#         min_child_weight=best_parameter['min_child_weight'],\n",
    "#         reg_alpha = best_parameter['reg_alpha'],\n",
    "#         reg_lambda = best_parameter['reg_lambda'],\n",
    "#         subsample=best_parameter['subsample'],\n",
    "#         colsample_bytree=best_parameter['colsample_bytree']\n",
    "#         )\n",
    "\n",
    "# xgb_best.fit(data_train_pd.drop(['interest_level'], axis = 1), data_train_pd['interest_level'], eval_metric = best_parameter['eval_metric'])\n",
    "xgb_best = xgb.train(best_parameter, data_train_dmatrix, n_estimators)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "train Accuracy: 79.38%\n"
     ]
    }
   ],
   "source": [
    "#this code should not run , it seems we can only predict once\n",
    "if False:\n",
    "    # make prediction\n",
    "    # preds = xgb_best.predict(data_train_pd.drop(['interest_level'], axis = 1))\n",
    "    # y_pred = [round(value) for value in preds]\n",
    "    # y_test = data_train_pd['interest_level']\n",
    "    preds = xgb_best.predict(data_train_dmatrix)\n",
    "    y_pred = [round(value) for value in preds]\n",
    "    y_test = data_train_dmatrix.get_label()\n",
    "    \n",
    "    train_accuracy = accuracy_score(y_test, y_pred)\n",
    "    print(\"train Accuracy: %.2f%%\" % (train_accuracy * 100.0))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [],
   "source": [
    "#predict\n",
    "y_test_pred = xgb_best.predict(data_test_dmatrix)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "#save result\n",
    "dpath_out = '.\\\\out\\\\'\n",
    "data_test_result = data_test_pd.copy()\n",
    "data_test_result['interest_level'] = pd.Series(y_test_pred)\n",
    "data_test_result.to_csv(dpath_out + 'Rent_Listing_Inquries_result.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.4"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
