{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Rental Listing Inquiries\n",
    "========================\n",
    "模型拟合-use processed feature and split 0.2 sample as test set\n",
    "--------------------------------------------------------------\n",
    "> Rental Listing Inquiries数据集是Kaggle平台上的一个分类竞赛任务，需要根据公寓的特征来预测其受欢迎程度（用户感兴趣程度分为高、中、低三类）。其中房屋的特征x共有14维，响应值y为用户对该公寓的感兴趣程度。评价标准为logloss"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "from xgboost import XGBClassifier\n",
    "import xgboost as xgb\n",
    "import matplotlib.pyplot as plt\n",
    "from sklearn.metrics import accuracy_score\n",
    "from sklearn.model_selection import GridSearchCV, StratifiedKFold\n",
    "import pandas as pd\n",
    "import multiprocessing\n",
    "import numpy as np\n",
    "from functools import reduce\n",
    "import seaborn as sns\n",
    "import time\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "读入数据\n",
    "------"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "dpath_out = '.\\\\out\\\\'\n",
    "data_train = pd.read_csv(dpath_out + 'RentListingInquries_FE_train_train.csv')\n",
    "data_verify = pd.read_csv(dpath_out + 'RentListingInquries_FE_train_test.csv')\n",
    "\n",
    "data_train_dmatrix = xgb.DMatrix(data_train.drop(['interest_level'], axis = 1), label = data_train['interest_level'])\n",
    "data_verify_dmatrix = xgb.DMatrix(data_verify.drop(['interest_level'], axis = 1), label = data_verify['interest_level'])"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "> 编写调参用的search函数，sklearn的gridsearchcv太慢，我们需要用原始的xgboost接口调参"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "function build Time: 0.0010027885437011719 seconds\n"
     ]
    }
   ],
   "source": [
    "tmp = time.time()\n",
    "\n",
    "param_default = {\n",
    "    'silent': 0, \n",
    "    'eta': 0.1,\n",
    "    'objective': 'multi:softmax',\n",
    "    'eval_metric': 'mlogloss',\n",
    "    'num_class': 3,\n",
    "    'nthread': multiprocessing.cpu_count() - 1, #leave one cpu to let other task run, so we can continue edit code\n",
    "    'tree_method': 'gpu_exact'\n",
    "}\n",
    "\n",
    "param_cv_default = {\n",
    "    'num_boost_round' : 2000, # use a max value to let model stop automatically\n",
    "    'early_stopping_rounds': 100,  # use a max value to let model stop automatically\n",
    "    'folds' : StratifiedKFold(n_splits=5, shuffle=True, random_state=3),\n",
    "    'metrics': 'mlogloss'\n",
    "}\n",
    "\n",
    "#this is major function we used to search best parameter\n",
    "def SearchUseXgboostNative(param_grid, data_dmatrix, param_default = param_default, param_cv_default = param_cv_default):\n",
    "    if type(param_grid) == list:\n",
    "        tmp_grid = param_grid\n",
    "    elif type(param_grid) == dict:\n",
    "        tmp_grid = [ param_grid ]\n",
    "    else:\n",
    "        raise TypeError('unsupport parameter type...')\n",
    "        \n",
    "    param_search_list = []\n",
    "    for grid in tmp_grid:\n",
    "        for key,item in grid.items():\n",
    "            if type(item) != list:\n",
    "                raise TypeError('unsupport parameter type...')\n",
    "            if param_search_list:\n",
    "                tmp = []\n",
    "                for x in param_search_list:\n",
    "                    for y in item:\n",
    "                        param_dict = x.copy()\n",
    "                        param_dict[key] = y\n",
    "                        tmp.append(param_dict)\n",
    "                param_search_list = tmp\n",
    "            else:\n",
    "                param_search_list = [{key: x} for x in item]\n",
    "    \n",
    "    ret = []\n",
    "    for param_dict in param_search_list:\n",
    "        tmp_param_dict = param_default.copy()\n",
    "        tmp_param_dict.update(param_dict)\n",
    "        print('param : ', param_dict, end = \" \")\n",
    "        cv_result = xgb.cv(tmp_param_dict, data_dmatrix, **param_cv_default)\n",
    "        print('cv_result estimaters：', cv_result.shape[0], 'min test_score : ', cv_result['test-mlogloss-mean'].min())\n",
    "        ret.append((cv_result, tmp_param_dict))\n",
    "        \n",
    "                \n",
    "    return ret\n",
    "print(\"function build Time: %s seconds\" % (str(time.time() - tmp)))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "搜索最优max_depth 和 min_child_weight\n",
    "-------------------------------------"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "param :  {'max_depth': 3, 'min_child_weight': 1} cv_result estimaters： 959 min test_score :  0.587114\n",
      "param :  {'max_depth': 3, 'min_child_weight': 3} cv_result estimaters： 931 min test_score :  0.5870879999999999\n",
      "param :  {'max_depth': 3, 'min_child_weight': 5} cv_result estimaters： 860 min test_score :  0.5869209999999999\n",
      "param :  {'max_depth': 5, 'min_child_weight': 1} cv_result estimaters： 324 min test_score :  0.5844796666666666\n",
      "param :  {'max_depth': 5, 'min_child_weight': 3} cv_result estimaters： 314 min test_score :  0.584409\n",
      "param :  {'max_depth': 5, 'min_child_weight': 5} cv_result estimaters： 289 min test_score :  0.5844873333333335\n",
      "param :  {'max_depth': 7, 'min_child_weight': 1} cv_result estimaters： 161 min test_score :  0.5852043333333333\n",
      "param :  {'max_depth': 7, 'min_child_weight': 3} cv_result estimaters： 164 min test_score :  0.5859886666666667\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5} cv_result estimaters： 166 min test_score :  0.584274\n",
      "param :  {'max_depth': 9, 'min_child_weight': 1} cv_result estimaters： 89 min test_score :  0.5925266666666666\n",
      "param :  {'max_depth': 9, 'min_child_weight': 3} cv_result estimaters： 91 min test_score :  0.592494\n",
      "param :  {'max_depth': 9, 'min_child_weight': 5} cv_result estimaters： 111 min test_score :  0.5899646666666666\n",
      "{'silent': 0, 'eta': 0.1, 'objective': 'multi:softmax', 'eval_metric': 'mlogloss', 'num_class': 3, 'nthread': 7, 'tree_method': 'gpu_exact', 'max_depth': 7, 'min_child_weight': 5}\n",
      "Training Time: 2723.04571723938 seconds\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>test-mlogloss-mean</th>\n",
       "      <th>test-mlogloss-std</th>\n",
       "      <th>train-mlogloss-mean</th>\n",
       "      <th>train-mlogloss-std</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.035848</td>\n",
       "      <td>0.000240</td>\n",
       "      <td>1.033512</td>\n",
       "      <td>0.000227</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.983270</td>\n",
       "      <td>0.000559</td>\n",
       "      <td>0.978552</td>\n",
       "      <td>0.000336</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.938713</td>\n",
       "      <td>0.000782</td>\n",
       "      <td>0.931677</td>\n",
       "      <td>0.000355</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.900515</td>\n",
       "      <td>0.000977</td>\n",
       "      <td>0.891204</td>\n",
       "      <td>0.000418</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.867424</td>\n",
       "      <td>0.001161</td>\n",
       "      <td>0.856020</td>\n",
       "      <td>0.000463</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>0.838857</td>\n",
       "      <td>0.001247</td>\n",
       "      <td>0.825321</td>\n",
       "      <td>0.000523</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>0.813742</td>\n",
       "      <td>0.001535</td>\n",
       "      <td>0.798197</td>\n",
       "      <td>0.000346</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>0.791720</td>\n",
       "      <td>0.001603</td>\n",
       "      <td>0.774169</td>\n",
       "      <td>0.000419</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>0.772361</td>\n",
       "      <td>0.001647</td>\n",
       "      <td>0.752716</td>\n",
       "      <td>0.000534</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>0.755232</td>\n",
       "      <td>0.001664</td>\n",
       "      <td>0.733577</td>\n",
       "      <td>0.000576</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>0.740083</td>\n",
       "      <td>0.001828</td>\n",
       "      <td>0.716431</td>\n",
       "      <td>0.000521</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>0.726610</td>\n",
       "      <td>0.001847</td>\n",
       "      <td>0.701054</td>\n",
       "      <td>0.000691</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>0.714758</td>\n",
       "      <td>0.001961</td>\n",
       "      <td>0.687316</td>\n",
       "      <td>0.000676</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>0.704245</td>\n",
       "      <td>0.002105</td>\n",
       "      <td>0.674797</td>\n",
       "      <td>0.000621</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>0.694700</td>\n",
       "      <td>0.002252</td>\n",
       "      <td>0.663342</td>\n",
       "      <td>0.000670</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>0.686163</td>\n",
       "      <td>0.002199</td>\n",
       "      <td>0.653012</td>\n",
       "      <td>0.000755</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>0.678545</td>\n",
       "      <td>0.002198</td>\n",
       "      <td>0.643568</td>\n",
       "      <td>0.000753</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>0.671734</td>\n",
       "      <td>0.002157</td>\n",
       "      <td>0.634870</td>\n",
       "      <td>0.000777</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>0.665756</td>\n",
       "      <td>0.002153</td>\n",
       "      <td>0.626873</td>\n",
       "      <td>0.000764</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>0.660346</td>\n",
       "      <td>0.002100</td>\n",
       "      <td>0.619519</td>\n",
       "      <td>0.000814</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20</th>\n",
       "      <td>0.655255</td>\n",
       "      <td>0.002061</td>\n",
       "      <td>0.612889</td>\n",
       "      <td>0.000882</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>21</th>\n",
       "      <td>0.650690</td>\n",
       "      <td>0.002008</td>\n",
       "      <td>0.606649</td>\n",
       "      <td>0.001012</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22</th>\n",
       "      <td>0.646646</td>\n",
       "      <td>0.002040</td>\n",
       "      <td>0.600819</td>\n",
       "      <td>0.001132</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>23</th>\n",
       "      <td>0.643111</td>\n",
       "      <td>0.002037</td>\n",
       "      <td>0.595559</td>\n",
       "      <td>0.001213</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24</th>\n",
       "      <td>0.639644</td>\n",
       "      <td>0.002008</td>\n",
       "      <td>0.590557</td>\n",
       "      <td>0.001123</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>0.636654</td>\n",
       "      <td>0.002077</td>\n",
       "      <td>0.585710</td>\n",
       "      <td>0.001203</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>26</th>\n",
       "      <td>0.633758</td>\n",
       "      <td>0.002078</td>\n",
       "      <td>0.581204</td>\n",
       "      <td>0.001280</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>27</th>\n",
       "      <td>0.631049</td>\n",
       "      <td>0.002082</td>\n",
       "      <td>0.577118</td>\n",
       "      <td>0.001220</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>0.628686</td>\n",
       "      <td>0.002081</td>\n",
       "      <td>0.573068</td>\n",
       "      <td>0.001138</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29</th>\n",
       "      <td>0.626427</td>\n",
       "      <td>0.002178</td>\n",
       "      <td>0.569290</td>\n",
       "      <td>0.001043</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>136</th>\n",
       "      <td>0.584768</td>\n",
       "      <td>0.003496</td>\n",
       "      <td>0.422632</td>\n",
       "      <td>0.004246</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>137</th>\n",
       "      <td>0.584644</td>\n",
       "      <td>0.003517</td>\n",
       "      <td>0.421897</td>\n",
       "      <td>0.004114</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>138</th>\n",
       "      <td>0.584604</td>\n",
       "      <td>0.003562</td>\n",
       "      <td>0.421158</td>\n",
       "      <td>0.004053</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>139</th>\n",
       "      <td>0.584532</td>\n",
       "      <td>0.003506</td>\n",
       "      <td>0.420350</td>\n",
       "      <td>0.003898</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>140</th>\n",
       "      <td>0.584426</td>\n",
       "      <td>0.003502</td>\n",
       "      <td>0.419374</td>\n",
       "      <td>0.003925</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>141</th>\n",
       "      <td>0.584440</td>\n",
       "      <td>0.003468</td>\n",
       "      <td>0.418607</td>\n",
       "      <td>0.003724</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>142</th>\n",
       "      <td>0.584451</td>\n",
       "      <td>0.003504</td>\n",
       "      <td>0.418170</td>\n",
       "      <td>0.003806</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>143</th>\n",
       "      <td>0.584414</td>\n",
       "      <td>0.003473</td>\n",
       "      <td>0.417515</td>\n",
       "      <td>0.003924</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>144</th>\n",
       "      <td>0.584430</td>\n",
       "      <td>0.003494</td>\n",
       "      <td>0.416610</td>\n",
       "      <td>0.003848</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>145</th>\n",
       "      <td>0.584376</td>\n",
       "      <td>0.003482</td>\n",
       "      <td>0.415800</td>\n",
       "      <td>0.003798</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>146</th>\n",
       "      <td>0.584379</td>\n",
       "      <td>0.003526</td>\n",
       "      <td>0.415057</td>\n",
       "      <td>0.003834</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>147</th>\n",
       "      <td>0.584354</td>\n",
       "      <td>0.003526</td>\n",
       "      <td>0.414144</td>\n",
       "      <td>0.003841</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>148</th>\n",
       "      <td>0.584347</td>\n",
       "      <td>0.003528</td>\n",
       "      <td>0.413269</td>\n",
       "      <td>0.003856</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>149</th>\n",
       "      <td>0.584370</td>\n",
       "      <td>0.003488</td>\n",
       "      <td>0.412530</td>\n",
       "      <td>0.003775</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>150</th>\n",
       "      <td>0.584331</td>\n",
       "      <td>0.003466</td>\n",
       "      <td>0.411690</td>\n",
       "      <td>0.003640</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>151</th>\n",
       "      <td>0.584382</td>\n",
       "      <td>0.003432</td>\n",
       "      <td>0.411012</td>\n",
       "      <td>0.003613</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>152</th>\n",
       "      <td>0.584391</td>\n",
       "      <td>0.003488</td>\n",
       "      <td>0.410301</td>\n",
       "      <td>0.003369</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>153</th>\n",
       "      <td>0.584349</td>\n",
       "      <td>0.003456</td>\n",
       "      <td>0.409519</td>\n",
       "      <td>0.003693</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>154</th>\n",
       "      <td>0.584423</td>\n",
       "      <td>0.003459</td>\n",
       "      <td>0.408746</td>\n",
       "      <td>0.003686</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>155</th>\n",
       "      <td>0.584451</td>\n",
       "      <td>0.003503</td>\n",
       "      <td>0.408068</td>\n",
       "      <td>0.003830</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>156</th>\n",
       "      <td>0.584424</td>\n",
       "      <td>0.003552</td>\n",
       "      <td>0.407498</td>\n",
       "      <td>0.003889</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>157</th>\n",
       "      <td>0.584468</td>\n",
       "      <td>0.003504</td>\n",
       "      <td>0.406884</td>\n",
       "      <td>0.003879</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>158</th>\n",
       "      <td>0.584475</td>\n",
       "      <td>0.003476</td>\n",
       "      <td>0.406264</td>\n",
       "      <td>0.003837</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>159</th>\n",
       "      <td>0.584437</td>\n",
       "      <td>0.003476</td>\n",
       "      <td>0.405267</td>\n",
       "      <td>0.003945</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>160</th>\n",
       "      <td>0.584445</td>\n",
       "      <td>0.003492</td>\n",
       "      <td>0.404266</td>\n",
       "      <td>0.003987</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>161</th>\n",
       "      <td>0.584467</td>\n",
       "      <td>0.003426</td>\n",
       "      <td>0.403542</td>\n",
       "      <td>0.003855</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>162</th>\n",
       "      <td>0.584428</td>\n",
       "      <td>0.003445</td>\n",
       "      <td>0.402802</td>\n",
       "      <td>0.003757</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>163</th>\n",
       "      <td>0.584414</td>\n",
       "      <td>0.003353</td>\n",
       "      <td>0.402141</td>\n",
       "      <td>0.003817</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>164</th>\n",
       "      <td>0.584362</td>\n",
       "      <td>0.003298</td>\n",
       "      <td>0.401376</td>\n",
       "      <td>0.003797</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>165</th>\n",
       "      <td>0.584274</td>\n",
       "      <td>0.003347</td>\n",
       "      <td>0.400695</td>\n",
       "      <td>0.003893</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>166 rows × 4 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "     test-mlogloss-mean  test-mlogloss-std  train-mlogloss-mean  \\\n",
       "0              1.035848           0.000240             1.033512   \n",
       "1              0.983270           0.000559             0.978552   \n",
       "2              0.938713           0.000782             0.931677   \n",
       "3              0.900515           0.000977             0.891204   \n",
       "4              0.867424           0.001161             0.856020   \n",
       "5              0.838857           0.001247             0.825321   \n",
       "6              0.813742           0.001535             0.798197   \n",
       "7              0.791720           0.001603             0.774169   \n",
       "8              0.772361           0.001647             0.752716   \n",
       "9              0.755232           0.001664             0.733577   \n",
       "10             0.740083           0.001828             0.716431   \n",
       "11             0.726610           0.001847             0.701054   \n",
       "12             0.714758           0.001961             0.687316   \n",
       "13             0.704245           0.002105             0.674797   \n",
       "14             0.694700           0.002252             0.663342   \n",
       "15             0.686163           0.002199             0.653012   \n",
       "16             0.678545           0.002198             0.643568   \n",
       "17             0.671734           0.002157             0.634870   \n",
       "18             0.665756           0.002153             0.626873   \n",
       "19             0.660346           0.002100             0.619519   \n",
       "20             0.655255           0.002061             0.612889   \n",
       "21             0.650690           0.002008             0.606649   \n",
       "22             0.646646           0.002040             0.600819   \n",
       "23             0.643111           0.002037             0.595559   \n",
       "24             0.639644           0.002008             0.590557   \n",
       "25             0.636654           0.002077             0.585710   \n",
       "26             0.633758           0.002078             0.581204   \n",
       "27             0.631049           0.002082             0.577118   \n",
       "28             0.628686           0.002081             0.573068   \n",
       "29             0.626427           0.002178             0.569290   \n",
       "..                  ...                ...                  ...   \n",
       "136            0.584768           0.003496             0.422632   \n",
       "137            0.584644           0.003517             0.421897   \n",
       "138            0.584604           0.003562             0.421158   \n",
       "139            0.584532           0.003506             0.420350   \n",
       "140            0.584426           0.003502             0.419374   \n",
       "141            0.584440           0.003468             0.418607   \n",
       "142            0.584451           0.003504             0.418170   \n",
       "143            0.584414           0.003473             0.417515   \n",
       "144            0.584430           0.003494             0.416610   \n",
       "145            0.584376           0.003482             0.415800   \n",
       "146            0.584379           0.003526             0.415057   \n",
       "147            0.584354           0.003526             0.414144   \n",
       "148            0.584347           0.003528             0.413269   \n",
       "149            0.584370           0.003488             0.412530   \n",
       "150            0.584331           0.003466             0.411690   \n",
       "151            0.584382           0.003432             0.411012   \n",
       "152            0.584391           0.003488             0.410301   \n",
       "153            0.584349           0.003456             0.409519   \n",
       "154            0.584423           0.003459             0.408746   \n",
       "155            0.584451           0.003503             0.408068   \n",
       "156            0.584424           0.003552             0.407498   \n",
       "157            0.584468           0.003504             0.406884   \n",
       "158            0.584475           0.003476             0.406264   \n",
       "159            0.584437           0.003476             0.405267   \n",
       "160            0.584445           0.003492             0.404266   \n",
       "161            0.584467           0.003426             0.403542   \n",
       "162            0.584428           0.003445             0.402802   \n",
       "163            0.584414           0.003353             0.402141   \n",
       "164            0.584362           0.003298             0.401376   \n",
       "165            0.584274           0.003347             0.400695   \n",
       "\n",
       "     train-mlogloss-std  \n",
       "0              0.000227  \n",
       "1              0.000336  \n",
       "2              0.000355  \n",
       "3              0.000418  \n",
       "4              0.000463  \n",
       "5              0.000523  \n",
       "6              0.000346  \n",
       "7              0.000419  \n",
       "8              0.000534  \n",
       "9              0.000576  \n",
       "10             0.000521  \n",
       "11             0.000691  \n",
       "12             0.000676  \n",
       "13             0.000621  \n",
       "14             0.000670  \n",
       "15             0.000755  \n",
       "16             0.000753  \n",
       "17             0.000777  \n",
       "18             0.000764  \n",
       "19             0.000814  \n",
       "20             0.000882  \n",
       "21             0.001012  \n",
       "22             0.001132  \n",
       "23             0.001213  \n",
       "24             0.001123  \n",
       "25             0.001203  \n",
       "26             0.001280  \n",
       "27             0.001220  \n",
       "28             0.001138  \n",
       "29             0.001043  \n",
       "..                  ...  \n",
       "136            0.004246  \n",
       "137            0.004114  \n",
       "138            0.004053  \n",
       "139            0.003898  \n",
       "140            0.003925  \n",
       "141            0.003724  \n",
       "142            0.003806  \n",
       "143            0.003924  \n",
       "144            0.003848  \n",
       "145            0.003798  \n",
       "146            0.003834  \n",
       "147            0.003841  \n",
       "148            0.003856  \n",
       "149            0.003775  \n",
       "150            0.003640  \n",
       "151            0.003613  \n",
       "152            0.003369  \n",
       "153            0.003693  \n",
       "154            0.003686  \n",
       "155            0.003830  \n",
       "156            0.003889  \n",
       "157            0.003879  \n",
       "158            0.003837  \n",
       "159            0.003945  \n",
       "160            0.003987  \n",
       "161            0.003855  \n",
       "162            0.003757  \n",
       "163            0.003817  \n",
       "164            0.003797  \n",
       "165            0.003893  \n",
       "\n",
       "[166 rows x 4 columns]"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "tmp = time.time()\n",
    "max_depth = range(3,10,2)\n",
    "min_child_weight = range(1,6,2)\n",
    "param_search_depth_weight = {'max_depth':list(max_depth), 'min_child_weight':list(min_child_weight)}\n",
    "\n",
    "searchResult_depth_weight = SearchUseXgboostNative(param_search_depth_weight, data_train_dmatrix)\n",
    "searchResult_depth_weight.sort(key = lambda x: x[0]['test-mlogloss-mean'].min())\n",
    "print(searchResult_depth_weight[0][1])\n",
    "print(\"Training Time: %s seconds\" % (str(time.time() - tmp)))\n",
    "searchResult_depth_weight[0][0]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "搜索最优的正则参数\n",
    "----------------"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 0.01} cv_result estimaters： 140 min test_score :  0.5849506666666667\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 0.1} cv_result estimaters： 148 min test_score :  0.5852603333333333\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 1.0} cv_result estimaters： 166 min test_score :  0.584274\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 10.0} cv_result estimaters： 194 min test_score :  0.583469\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0.01, 'reg_lambda': 0.01} cv_result estimaters： 150 min test_score :  0.5856286666666667\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0.01, 'reg_lambda': 0.1} cv_result estimaters： 153 min test_score :  0.5855673333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0.01, 'reg_lambda': 1.0} cv_result estimaters： 157 min test_score :  0.5849733333333335\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0.01, 'reg_lambda': 10.0} cv_result estimaters： 209 min test_score :  0.5822790000000001\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0.01, 'reg_lambda': 100.0} cv_result estimaters： 304 min test_score :  0.5824036666666667\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0.1, 'reg_lambda': 0.01} cv_result estimaters： 151 min test_score :  0.585693\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0.1, 'reg_lambda': 0.1} cv_result estimaters： 148 min test_score :  0.5853856666666667\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0.1, 'reg_lambda': 1.0} cv_result estimaters： 165 min test_score :  0.5848633333333333\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0.1, 'reg_lambda': 10.0} cv_result estimaters： 197 min test_score :  0.58291\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0.1, 'reg_lambda': 100.0} cv_result estimaters： 328 min test_score :  0.5820393333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 1.0, 'reg_lambda': 0.01} cv_result estimaters： 142 min test_score :  0.5855503333333333\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 1.0, 'reg_lambda': 0.1} cv_result estimaters： 142 min test_score :  0.584267\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 1.0, 'reg_lambda': 1.0} cv_result estimaters： 161 min test_score :  0.5842553333333335\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 1.0, 'reg_lambda': 10.0} cv_result estimaters： 183 min test_score :  0.5830143333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 1.0, 'reg_lambda': 100.0} cv_result estimaters： 332 min test_score :  0.5823676666666667\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 10.0, 'reg_lambda': 0.01} cv_result estimaters： 217 min test_score :  0.5844520000000001\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 10.0, 'reg_lambda': 0.1} cv_result estimaters： 241 min test_score :  0.5838533333333333\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 10.0, 'reg_lambda': 1.0} cv_result estimaters： 228 min test_score :  0.583511\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 10.0, 'reg_lambda': 10.0} cv_result estimaters： 255 min test_score :  0.5840086666666667\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 10.0, 'reg_lambda': 100.0} cv_result estimaters： 340 min test_score :  0.584326\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 100.0, 'reg_lambda': 0.01} cv_result estimaters： 232 min test_score :  0.6199133333333333\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 100.0, 'reg_lambda': 0.1} cv_result estimaters： 233 min test_score :  0.6197643333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 100.0, 'reg_lambda': 1.0} cv_result estimaters： 227 min test_score :  0.619808\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 100.0, 'reg_lambda': 10.0} cv_result estimaters： 227 min test_score :  0.6197946666666666\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 100.0, 'reg_lambda': 100.0} cv_result estimaters： 246 min test_score :  0.6200586666666666\n",
      "{'silent': 0, 'eta': 0.1, 'objective': 'multi:softmax', 'eval_metric': 'mlogloss', 'num_class': 3, 'nthread': 7, 'tree_method': 'gpu_exact', 'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0}\n",
      "Training Time: 6932.767691850662 seconds\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>test-mlogloss-mean</th>\n",
       "      <th>test-mlogloss-std</th>\n",
       "      <th>train-mlogloss-mean</th>\n",
       "      <th>train-mlogloss-std</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.042582</td>\n",
       "      <td>0.000076</td>\n",
       "      <td>1.041286</td>\n",
       "      <td>0.000131</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.994701</td>\n",
       "      <td>0.000091</td>\n",
       "      <td>0.992401</td>\n",
       "      <td>0.000313</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.953581</td>\n",
       "      <td>0.000229</td>\n",
       "      <td>0.950225</td>\n",
       "      <td>0.000333</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.917925</td>\n",
       "      <td>0.000334</td>\n",
       "      <td>0.913502</td>\n",
       "      <td>0.000484</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.886963</td>\n",
       "      <td>0.000577</td>\n",
       "      <td>0.881458</td>\n",
       "      <td>0.000493</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>0.859825</td>\n",
       "      <td>0.000613</td>\n",
       "      <td>0.853254</td>\n",
       "      <td>0.000587</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>0.835959</td>\n",
       "      <td>0.000827</td>\n",
       "      <td>0.828371</td>\n",
       "      <td>0.000591</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>0.814804</td>\n",
       "      <td>0.000953</td>\n",
       "      <td>0.806257</td>\n",
       "      <td>0.000672</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>0.796083</td>\n",
       "      <td>0.001265</td>\n",
       "      <td>0.786595</td>\n",
       "      <td>0.000652</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>0.779316</td>\n",
       "      <td>0.001536</td>\n",
       "      <td>0.768879</td>\n",
       "      <td>0.000431</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>0.764408</td>\n",
       "      <td>0.001607</td>\n",
       "      <td>0.753079</td>\n",
       "      <td>0.000462</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>0.751028</td>\n",
       "      <td>0.001742</td>\n",
       "      <td>0.738792</td>\n",
       "      <td>0.000394</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>0.739179</td>\n",
       "      <td>0.001750</td>\n",
       "      <td>0.726071</td>\n",
       "      <td>0.000442</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>0.728467</td>\n",
       "      <td>0.001782</td>\n",
       "      <td>0.714473</td>\n",
       "      <td>0.000483</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>0.718854</td>\n",
       "      <td>0.001974</td>\n",
       "      <td>0.703955</td>\n",
       "      <td>0.000386</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>0.710024</td>\n",
       "      <td>0.002090</td>\n",
       "      <td>0.694309</td>\n",
       "      <td>0.000482</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>0.702217</td>\n",
       "      <td>0.002172</td>\n",
       "      <td>0.685645</td>\n",
       "      <td>0.000526</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>0.695147</td>\n",
       "      <td>0.002152</td>\n",
       "      <td>0.677694</td>\n",
       "      <td>0.000636</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>0.688734</td>\n",
       "      <td>0.002191</td>\n",
       "      <td>0.670443</td>\n",
       "      <td>0.000636</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>0.682780</td>\n",
       "      <td>0.002201</td>\n",
       "      <td>0.663809</td>\n",
       "      <td>0.000776</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20</th>\n",
       "      <td>0.677580</td>\n",
       "      <td>0.002266</td>\n",
       "      <td>0.657716</td>\n",
       "      <td>0.000759</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>21</th>\n",
       "      <td>0.672737</td>\n",
       "      <td>0.002238</td>\n",
       "      <td>0.652055</td>\n",
       "      <td>0.000770</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22</th>\n",
       "      <td>0.668234</td>\n",
       "      <td>0.002302</td>\n",
       "      <td>0.646704</td>\n",
       "      <td>0.000656</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>23</th>\n",
       "      <td>0.664178</td>\n",
       "      <td>0.002341</td>\n",
       "      <td>0.641937</td>\n",
       "      <td>0.000604</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24</th>\n",
       "      <td>0.660289</td>\n",
       "      <td>0.002453</td>\n",
       "      <td>0.637208</td>\n",
       "      <td>0.000477</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>0.656680</td>\n",
       "      <td>0.002446</td>\n",
       "      <td>0.632980</td>\n",
       "      <td>0.000481</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>26</th>\n",
       "      <td>0.653412</td>\n",
       "      <td>0.002396</td>\n",
       "      <td>0.628935</td>\n",
       "      <td>0.000673</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>27</th>\n",
       "      <td>0.650493</td>\n",
       "      <td>0.002465</td>\n",
       "      <td>0.625325</td>\n",
       "      <td>0.000660</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>0.647760</td>\n",
       "      <td>0.002442</td>\n",
       "      <td>0.621814</td>\n",
       "      <td>0.000780</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29</th>\n",
       "      <td>0.645217</td>\n",
       "      <td>0.002465</td>\n",
       "      <td>0.618476</td>\n",
       "      <td>0.000589</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>309</th>\n",
       "      <td>0.581914</td>\n",
       "      <td>0.003092</td>\n",
       "      <td>0.445500</td>\n",
       "      <td>0.001295</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>310</th>\n",
       "      <td>0.581893</td>\n",
       "      <td>0.003088</td>\n",
       "      <td>0.445239</td>\n",
       "      <td>0.001246</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>311</th>\n",
       "      <td>0.581864</td>\n",
       "      <td>0.003096</td>\n",
       "      <td>0.444972</td>\n",
       "      <td>0.001304</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>312</th>\n",
       "      <td>0.581835</td>\n",
       "      <td>0.003107</td>\n",
       "      <td>0.444628</td>\n",
       "      <td>0.001330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>313</th>\n",
       "      <td>0.581841</td>\n",
       "      <td>0.003129</td>\n",
       "      <td>0.444370</td>\n",
       "      <td>0.001270</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>314</th>\n",
       "      <td>0.581838</td>\n",
       "      <td>0.003120</td>\n",
       "      <td>0.443970</td>\n",
       "      <td>0.001334</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>315</th>\n",
       "      <td>0.581868</td>\n",
       "      <td>0.003129</td>\n",
       "      <td>0.443594</td>\n",
       "      <td>0.001288</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>316</th>\n",
       "      <td>0.581876</td>\n",
       "      <td>0.003140</td>\n",
       "      <td>0.443314</td>\n",
       "      <td>0.001225</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>317</th>\n",
       "      <td>0.581872</td>\n",
       "      <td>0.003151</td>\n",
       "      <td>0.443016</td>\n",
       "      <td>0.001155</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>318</th>\n",
       "      <td>0.581886</td>\n",
       "      <td>0.003124</td>\n",
       "      <td>0.442692</td>\n",
       "      <td>0.001092</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>319</th>\n",
       "      <td>0.581902</td>\n",
       "      <td>0.003131</td>\n",
       "      <td>0.442382</td>\n",
       "      <td>0.000996</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>320</th>\n",
       "      <td>0.581906</td>\n",
       "      <td>0.003124</td>\n",
       "      <td>0.442040</td>\n",
       "      <td>0.001011</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>321</th>\n",
       "      <td>0.581907</td>\n",
       "      <td>0.003112</td>\n",
       "      <td>0.441748</td>\n",
       "      <td>0.001010</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>322</th>\n",
       "      <td>0.581921</td>\n",
       "      <td>0.003090</td>\n",
       "      <td>0.441465</td>\n",
       "      <td>0.001004</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>323</th>\n",
       "      <td>0.581923</td>\n",
       "      <td>0.003049</td>\n",
       "      <td>0.441190</td>\n",
       "      <td>0.000892</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>324</th>\n",
       "      <td>0.581925</td>\n",
       "      <td>0.003052</td>\n",
       "      <td>0.440961</td>\n",
       "      <td>0.000911</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>325</th>\n",
       "      <td>0.581935</td>\n",
       "      <td>0.003028</td>\n",
       "      <td>0.440694</td>\n",
       "      <td>0.000972</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>326</th>\n",
       "      <td>0.581951</td>\n",
       "      <td>0.003038</td>\n",
       "      <td>0.440507</td>\n",
       "      <td>0.001052</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>327</th>\n",
       "      <td>0.581950</td>\n",
       "      <td>0.003047</td>\n",
       "      <td>0.440223</td>\n",
       "      <td>0.001115</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>328</th>\n",
       "      <td>0.581947</td>\n",
       "      <td>0.003041</td>\n",
       "      <td>0.439918</td>\n",
       "      <td>0.001113</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>329</th>\n",
       "      <td>0.581935</td>\n",
       "      <td>0.003046</td>\n",
       "      <td>0.439676</td>\n",
       "      <td>0.001136</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>330</th>\n",
       "      <td>0.581936</td>\n",
       "      <td>0.003050</td>\n",
       "      <td>0.439425</td>\n",
       "      <td>0.001129</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>331</th>\n",
       "      <td>0.581889</td>\n",
       "      <td>0.003045</td>\n",
       "      <td>0.439154</td>\n",
       "      <td>0.001242</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>332</th>\n",
       "      <td>0.581881</td>\n",
       "      <td>0.003055</td>\n",
       "      <td>0.438793</td>\n",
       "      <td>0.001369</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>333</th>\n",
       "      <td>0.581869</td>\n",
       "      <td>0.003041</td>\n",
       "      <td>0.438566</td>\n",
       "      <td>0.001408</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>334</th>\n",
       "      <td>0.581868</td>\n",
       "      <td>0.003072</td>\n",
       "      <td>0.438280</td>\n",
       "      <td>0.001494</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>335</th>\n",
       "      <td>0.581863</td>\n",
       "      <td>0.003070</td>\n",
       "      <td>0.438065</td>\n",
       "      <td>0.001512</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>336</th>\n",
       "      <td>0.581853</td>\n",
       "      <td>0.003065</td>\n",
       "      <td>0.437792</td>\n",
       "      <td>0.001486</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>337</th>\n",
       "      <td>0.581829</td>\n",
       "      <td>0.003082</td>\n",
       "      <td>0.437574</td>\n",
       "      <td>0.001463</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>338</th>\n",
       "      <td>0.581805</td>\n",
       "      <td>0.003077</td>\n",
       "      <td>0.437330</td>\n",
       "      <td>0.001469</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>339 rows × 4 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "     test-mlogloss-mean  test-mlogloss-std  train-mlogloss-mean  \\\n",
       "0              1.042582           0.000076             1.041286   \n",
       "1              0.994701           0.000091             0.992401   \n",
       "2              0.953581           0.000229             0.950225   \n",
       "3              0.917925           0.000334             0.913502   \n",
       "4              0.886963           0.000577             0.881458   \n",
       "5              0.859825           0.000613             0.853254   \n",
       "6              0.835959           0.000827             0.828371   \n",
       "7              0.814804           0.000953             0.806257   \n",
       "8              0.796083           0.001265             0.786595   \n",
       "9              0.779316           0.001536             0.768879   \n",
       "10             0.764408           0.001607             0.753079   \n",
       "11             0.751028           0.001742             0.738792   \n",
       "12             0.739179           0.001750             0.726071   \n",
       "13             0.728467           0.001782             0.714473   \n",
       "14             0.718854           0.001974             0.703955   \n",
       "15             0.710024           0.002090             0.694309   \n",
       "16             0.702217           0.002172             0.685645   \n",
       "17             0.695147           0.002152             0.677694   \n",
       "18             0.688734           0.002191             0.670443   \n",
       "19             0.682780           0.002201             0.663809   \n",
       "20             0.677580           0.002266             0.657716   \n",
       "21             0.672737           0.002238             0.652055   \n",
       "22             0.668234           0.002302             0.646704   \n",
       "23             0.664178           0.002341             0.641937   \n",
       "24             0.660289           0.002453             0.637208   \n",
       "25             0.656680           0.002446             0.632980   \n",
       "26             0.653412           0.002396             0.628935   \n",
       "27             0.650493           0.002465             0.625325   \n",
       "28             0.647760           0.002442             0.621814   \n",
       "29             0.645217           0.002465             0.618476   \n",
       "..                  ...                ...                  ...   \n",
       "309            0.581914           0.003092             0.445500   \n",
       "310            0.581893           0.003088             0.445239   \n",
       "311            0.581864           0.003096             0.444972   \n",
       "312            0.581835           0.003107             0.444628   \n",
       "313            0.581841           0.003129             0.444370   \n",
       "314            0.581838           0.003120             0.443970   \n",
       "315            0.581868           0.003129             0.443594   \n",
       "316            0.581876           0.003140             0.443314   \n",
       "317            0.581872           0.003151             0.443016   \n",
       "318            0.581886           0.003124             0.442692   \n",
       "319            0.581902           0.003131             0.442382   \n",
       "320            0.581906           0.003124             0.442040   \n",
       "321            0.581907           0.003112             0.441748   \n",
       "322            0.581921           0.003090             0.441465   \n",
       "323            0.581923           0.003049             0.441190   \n",
       "324            0.581925           0.003052             0.440961   \n",
       "325            0.581935           0.003028             0.440694   \n",
       "326            0.581951           0.003038             0.440507   \n",
       "327            0.581950           0.003047             0.440223   \n",
       "328            0.581947           0.003041             0.439918   \n",
       "329            0.581935           0.003046             0.439676   \n",
       "330            0.581936           0.003050             0.439425   \n",
       "331            0.581889           0.003045             0.439154   \n",
       "332            0.581881           0.003055             0.438793   \n",
       "333            0.581869           0.003041             0.438566   \n",
       "334            0.581868           0.003072             0.438280   \n",
       "335            0.581863           0.003070             0.438065   \n",
       "336            0.581853           0.003065             0.437792   \n",
       "337            0.581829           0.003082             0.437574   \n",
       "338            0.581805           0.003077             0.437330   \n",
       "\n",
       "     train-mlogloss-std  \n",
       "0              0.000131  \n",
       "1              0.000313  \n",
       "2              0.000333  \n",
       "3              0.000484  \n",
       "4              0.000493  \n",
       "5              0.000587  \n",
       "6              0.000591  \n",
       "7              0.000672  \n",
       "8              0.000652  \n",
       "9              0.000431  \n",
       "10             0.000462  \n",
       "11             0.000394  \n",
       "12             0.000442  \n",
       "13             0.000483  \n",
       "14             0.000386  \n",
       "15             0.000482  \n",
       "16             0.000526  \n",
       "17             0.000636  \n",
       "18             0.000636  \n",
       "19             0.000776  \n",
       "20             0.000759  \n",
       "21             0.000770  \n",
       "22             0.000656  \n",
       "23             0.000604  \n",
       "24             0.000477  \n",
       "25             0.000481  \n",
       "26             0.000673  \n",
       "27             0.000660  \n",
       "28             0.000780  \n",
       "29             0.000589  \n",
       "..                  ...  \n",
       "309            0.001295  \n",
       "310            0.001246  \n",
       "311            0.001304  \n",
       "312            0.001330  \n",
       "313            0.001270  \n",
       "314            0.001334  \n",
       "315            0.001288  \n",
       "316            0.001225  \n",
       "317            0.001155  \n",
       "318            0.001092  \n",
       "319            0.000996  \n",
       "320            0.001011  \n",
       "321            0.001010  \n",
       "322            0.001004  \n",
       "323            0.000892  \n",
       "324            0.000911  \n",
       "325            0.000972  \n",
       "326            0.001052  \n",
       "327            0.001115  \n",
       "328            0.001113  \n",
       "329            0.001136  \n",
       "330            0.001129  \n",
       "331            0.001242  \n",
       "332            0.001369  \n",
       "333            0.001408  \n",
       "334            0.001494  \n",
       "335            0.001512  \n",
       "336            0.001486  \n",
       "337            0.001463  \n",
       "338            0.001469  \n",
       "\n",
       "[339 rows x 4 columns]"
      ]
     },
     "execution_count": 7,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "tmp = time.time()\n",
    "reg_alpha = list(np.logspace(-2, 2, 5))\n",
    "reg_alpha.insert(0, 0)\n",
    "reg_lambda = list(np.logspace(-2, 2, 5))\n",
    "param_search_reg = {\n",
    "    'max_depth': [ searchResult_depth_weight[0][1]['max_depth'] ],  #use last select parameter\n",
    "    'min_child_weight': [ searchResult_depth_weight[0][1]['min_child_weight'] ], #use last select parameter\n",
    "    'reg_alpha': reg_alpha,\n",
    "    'reg_lambda': reg_lambda,\n",
    "}\n",
    "\n",
    "searchResult_reg = SearchUseXgboostNative(param_search_reg, data_train_dmatrix)\n",
    "searchResult_reg.sort(key = lambda x: x[0]['test-mlogloss-mean'].min())\n",
    "print(searchResult_reg[0][1])\n",
    "print(\"Training Time: %s seconds\" % (str(time.time() - tmp)))\n",
    "searchResult_reg[0][0]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "搜索最优的采样比例\n",
    "---------------"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 0.3, 'colsample_bytree': 0.3} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 0.3, 'colsample_bytree': 0.5333333333333333} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 0.3, 'colsample_bytree': 0.7666666666666666} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 0.3, 'colsample_bytree': 1.0} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 0.5333333333333333, 'colsample_bytree': 0.3} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 0.5333333333333333, 'colsample_bytree': 0.5333333333333333} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 0.5333333333333333, 'colsample_bytree': 0.7666666666666666} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 0.5333333333333333, 'colsample_bytree': 1.0} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 0.7666666666666666, 'colsample_bytree': 0.3} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 0.7666666666666666, 'colsample_bytree': 0.5333333333333333} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 0.7666666666666666, 'colsample_bytree': 0.7666666666666666} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 0.7666666666666666, 'colsample_bytree': 1.0} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 1.0, 'colsample_bytree': 0.3} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 1.0, 'colsample_bytree': 0.5333333333333333} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 1.0, 'colsample_bytree': 0.7666666666666666} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "param :  {'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 1.0, 'colsample_bytree': 1.0} cv_result estimaters： 339 min test_score :  0.5818053333333334\n",
      "{'silent': 0, 'eta': 0.1, 'objective': 'multi:softmax', 'eval_metric': 'mlogloss', 'num_class': 3, 'nthread': 7, 'tree_method': 'gpu_exact', 'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 0.3, 'colsample_bytree': 0.3}\n",
      "Training Time: 5192.070896625519 seconds\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style scoped>\n",
       "    .dataframe tbody tr th:only-of-type {\n",
       "        vertical-align: middle;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: right;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>test-mlogloss-mean</th>\n",
       "      <th>test-mlogloss-std</th>\n",
       "      <th>train-mlogloss-mean</th>\n",
       "      <th>train-mlogloss-std</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.042582</td>\n",
       "      <td>0.000076</td>\n",
       "      <td>1.041286</td>\n",
       "      <td>0.000131</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>0.994701</td>\n",
       "      <td>0.000091</td>\n",
       "      <td>0.992401</td>\n",
       "      <td>0.000313</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>0.953581</td>\n",
       "      <td>0.000229</td>\n",
       "      <td>0.950225</td>\n",
       "      <td>0.000333</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>0.917925</td>\n",
       "      <td>0.000334</td>\n",
       "      <td>0.913502</td>\n",
       "      <td>0.000484</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>0.886963</td>\n",
       "      <td>0.000577</td>\n",
       "      <td>0.881458</td>\n",
       "      <td>0.000493</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>0.859825</td>\n",
       "      <td>0.000613</td>\n",
       "      <td>0.853254</td>\n",
       "      <td>0.000587</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>0.835959</td>\n",
       "      <td>0.000827</td>\n",
       "      <td>0.828371</td>\n",
       "      <td>0.000591</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>0.814804</td>\n",
       "      <td>0.000953</td>\n",
       "      <td>0.806257</td>\n",
       "      <td>0.000672</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>0.796083</td>\n",
       "      <td>0.001265</td>\n",
       "      <td>0.786595</td>\n",
       "      <td>0.000652</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>0.779316</td>\n",
       "      <td>0.001536</td>\n",
       "      <td>0.768879</td>\n",
       "      <td>0.000431</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>0.764408</td>\n",
       "      <td>0.001607</td>\n",
       "      <td>0.753079</td>\n",
       "      <td>0.000462</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>0.751028</td>\n",
       "      <td>0.001742</td>\n",
       "      <td>0.738792</td>\n",
       "      <td>0.000394</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>0.739179</td>\n",
       "      <td>0.001750</td>\n",
       "      <td>0.726071</td>\n",
       "      <td>0.000442</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>0.728467</td>\n",
       "      <td>0.001782</td>\n",
       "      <td>0.714473</td>\n",
       "      <td>0.000483</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>0.718854</td>\n",
       "      <td>0.001974</td>\n",
       "      <td>0.703955</td>\n",
       "      <td>0.000386</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>0.710024</td>\n",
       "      <td>0.002090</td>\n",
       "      <td>0.694309</td>\n",
       "      <td>0.000482</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>0.702217</td>\n",
       "      <td>0.002172</td>\n",
       "      <td>0.685645</td>\n",
       "      <td>0.000526</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>0.695147</td>\n",
       "      <td>0.002152</td>\n",
       "      <td>0.677694</td>\n",
       "      <td>0.000636</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>0.688734</td>\n",
       "      <td>0.002191</td>\n",
       "      <td>0.670443</td>\n",
       "      <td>0.000636</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>0.682780</td>\n",
       "      <td>0.002201</td>\n",
       "      <td>0.663809</td>\n",
       "      <td>0.000776</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20</th>\n",
       "      <td>0.677580</td>\n",
       "      <td>0.002266</td>\n",
       "      <td>0.657716</td>\n",
       "      <td>0.000759</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>21</th>\n",
       "      <td>0.672737</td>\n",
       "      <td>0.002238</td>\n",
       "      <td>0.652055</td>\n",
       "      <td>0.000770</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22</th>\n",
       "      <td>0.668234</td>\n",
       "      <td>0.002302</td>\n",
       "      <td>0.646704</td>\n",
       "      <td>0.000656</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>23</th>\n",
       "      <td>0.664178</td>\n",
       "      <td>0.002341</td>\n",
       "      <td>0.641937</td>\n",
       "      <td>0.000604</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24</th>\n",
       "      <td>0.660289</td>\n",
       "      <td>0.002453</td>\n",
       "      <td>0.637208</td>\n",
       "      <td>0.000477</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>0.656680</td>\n",
       "      <td>0.002446</td>\n",
       "      <td>0.632980</td>\n",
       "      <td>0.000481</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>26</th>\n",
       "      <td>0.653412</td>\n",
       "      <td>0.002396</td>\n",
       "      <td>0.628935</td>\n",
       "      <td>0.000673</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>27</th>\n",
       "      <td>0.650493</td>\n",
       "      <td>0.002465</td>\n",
       "      <td>0.625325</td>\n",
       "      <td>0.000660</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>0.647760</td>\n",
       "      <td>0.002442</td>\n",
       "      <td>0.621814</td>\n",
       "      <td>0.000780</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29</th>\n",
       "      <td>0.645217</td>\n",
       "      <td>0.002465</td>\n",
       "      <td>0.618476</td>\n",
       "      <td>0.000589</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>...</th>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "      <td>...</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>309</th>\n",
       "      <td>0.581914</td>\n",
       "      <td>0.003092</td>\n",
       "      <td>0.445500</td>\n",
       "      <td>0.001295</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>310</th>\n",
       "      <td>0.581893</td>\n",
       "      <td>0.003088</td>\n",
       "      <td>0.445239</td>\n",
       "      <td>0.001246</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>311</th>\n",
       "      <td>0.581864</td>\n",
       "      <td>0.003096</td>\n",
       "      <td>0.444972</td>\n",
       "      <td>0.001304</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>312</th>\n",
       "      <td>0.581835</td>\n",
       "      <td>0.003107</td>\n",
       "      <td>0.444628</td>\n",
       "      <td>0.001330</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>313</th>\n",
       "      <td>0.581841</td>\n",
       "      <td>0.003129</td>\n",
       "      <td>0.444370</td>\n",
       "      <td>0.001270</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>314</th>\n",
       "      <td>0.581838</td>\n",
       "      <td>0.003120</td>\n",
       "      <td>0.443970</td>\n",
       "      <td>0.001334</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>315</th>\n",
       "      <td>0.581868</td>\n",
       "      <td>0.003129</td>\n",
       "      <td>0.443594</td>\n",
       "      <td>0.001288</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>316</th>\n",
       "      <td>0.581876</td>\n",
       "      <td>0.003140</td>\n",
       "      <td>0.443314</td>\n",
       "      <td>0.001225</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>317</th>\n",
       "      <td>0.581872</td>\n",
       "      <td>0.003151</td>\n",
       "      <td>0.443016</td>\n",
       "      <td>0.001155</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>318</th>\n",
       "      <td>0.581886</td>\n",
       "      <td>0.003124</td>\n",
       "      <td>0.442692</td>\n",
       "      <td>0.001092</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>319</th>\n",
       "      <td>0.581902</td>\n",
       "      <td>0.003131</td>\n",
       "      <td>0.442382</td>\n",
       "      <td>0.000996</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>320</th>\n",
       "      <td>0.581906</td>\n",
       "      <td>0.003124</td>\n",
       "      <td>0.442040</td>\n",
       "      <td>0.001011</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>321</th>\n",
       "      <td>0.581907</td>\n",
       "      <td>0.003112</td>\n",
       "      <td>0.441748</td>\n",
       "      <td>0.001010</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>322</th>\n",
       "      <td>0.581921</td>\n",
       "      <td>0.003090</td>\n",
       "      <td>0.441465</td>\n",
       "      <td>0.001004</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>323</th>\n",
       "      <td>0.581923</td>\n",
       "      <td>0.003049</td>\n",
       "      <td>0.441190</td>\n",
       "      <td>0.000892</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>324</th>\n",
       "      <td>0.581925</td>\n",
       "      <td>0.003052</td>\n",
       "      <td>0.440961</td>\n",
       "      <td>0.000911</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>325</th>\n",
       "      <td>0.581935</td>\n",
       "      <td>0.003028</td>\n",
       "      <td>0.440694</td>\n",
       "      <td>0.000972</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>326</th>\n",
       "      <td>0.581951</td>\n",
       "      <td>0.003038</td>\n",
       "      <td>0.440507</td>\n",
       "      <td>0.001052</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>327</th>\n",
       "      <td>0.581950</td>\n",
       "      <td>0.003047</td>\n",
       "      <td>0.440223</td>\n",
       "      <td>0.001115</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>328</th>\n",
       "      <td>0.581947</td>\n",
       "      <td>0.003041</td>\n",
       "      <td>0.439918</td>\n",
       "      <td>0.001113</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>329</th>\n",
       "      <td>0.581935</td>\n",
       "      <td>0.003046</td>\n",
       "      <td>0.439676</td>\n",
       "      <td>0.001136</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>330</th>\n",
       "      <td>0.581936</td>\n",
       "      <td>0.003050</td>\n",
       "      <td>0.439425</td>\n",
       "      <td>0.001129</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>331</th>\n",
       "      <td>0.581889</td>\n",
       "      <td>0.003045</td>\n",
       "      <td>0.439154</td>\n",
       "      <td>0.001242</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>332</th>\n",
       "      <td>0.581881</td>\n",
       "      <td>0.003055</td>\n",
       "      <td>0.438793</td>\n",
       "      <td>0.001369</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>333</th>\n",
       "      <td>0.581869</td>\n",
       "      <td>0.003041</td>\n",
       "      <td>0.438566</td>\n",
       "      <td>0.001408</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>334</th>\n",
       "      <td>0.581868</td>\n",
       "      <td>0.003072</td>\n",
       "      <td>0.438280</td>\n",
       "      <td>0.001494</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>335</th>\n",
       "      <td>0.581863</td>\n",
       "      <td>0.003070</td>\n",
       "      <td>0.438065</td>\n",
       "      <td>0.001512</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>336</th>\n",
       "      <td>0.581853</td>\n",
       "      <td>0.003065</td>\n",
       "      <td>0.437792</td>\n",
       "      <td>0.001486</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>337</th>\n",
       "      <td>0.581829</td>\n",
       "      <td>0.003082</td>\n",
       "      <td>0.437574</td>\n",
       "      <td>0.001463</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>338</th>\n",
       "      <td>0.581805</td>\n",
       "      <td>0.003077</td>\n",
       "      <td>0.437330</td>\n",
       "      <td>0.001469</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>339 rows × 4 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "     test-mlogloss-mean  test-mlogloss-std  train-mlogloss-mean  \\\n",
       "0              1.042582           0.000076             1.041286   \n",
       "1              0.994701           0.000091             0.992401   \n",
       "2              0.953581           0.000229             0.950225   \n",
       "3              0.917925           0.000334             0.913502   \n",
       "4              0.886963           0.000577             0.881458   \n",
       "5              0.859825           0.000613             0.853254   \n",
       "6              0.835959           0.000827             0.828371   \n",
       "7              0.814804           0.000953             0.806257   \n",
       "8              0.796083           0.001265             0.786595   \n",
       "9              0.779316           0.001536             0.768879   \n",
       "10             0.764408           0.001607             0.753079   \n",
       "11             0.751028           0.001742             0.738792   \n",
       "12             0.739179           0.001750             0.726071   \n",
       "13             0.728467           0.001782             0.714473   \n",
       "14             0.718854           0.001974             0.703955   \n",
       "15             0.710024           0.002090             0.694309   \n",
       "16             0.702217           0.002172             0.685645   \n",
       "17             0.695147           0.002152             0.677694   \n",
       "18             0.688734           0.002191             0.670443   \n",
       "19             0.682780           0.002201             0.663809   \n",
       "20             0.677580           0.002266             0.657716   \n",
       "21             0.672737           0.002238             0.652055   \n",
       "22             0.668234           0.002302             0.646704   \n",
       "23             0.664178           0.002341             0.641937   \n",
       "24             0.660289           0.002453             0.637208   \n",
       "25             0.656680           0.002446             0.632980   \n",
       "26             0.653412           0.002396             0.628935   \n",
       "27             0.650493           0.002465             0.625325   \n",
       "28             0.647760           0.002442             0.621814   \n",
       "29             0.645217           0.002465             0.618476   \n",
       "..                  ...                ...                  ...   \n",
       "309            0.581914           0.003092             0.445500   \n",
       "310            0.581893           0.003088             0.445239   \n",
       "311            0.581864           0.003096             0.444972   \n",
       "312            0.581835           0.003107             0.444628   \n",
       "313            0.581841           0.003129             0.444370   \n",
       "314            0.581838           0.003120             0.443970   \n",
       "315            0.581868           0.003129             0.443594   \n",
       "316            0.581876           0.003140             0.443314   \n",
       "317            0.581872           0.003151             0.443016   \n",
       "318            0.581886           0.003124             0.442692   \n",
       "319            0.581902           0.003131             0.442382   \n",
       "320            0.581906           0.003124             0.442040   \n",
       "321            0.581907           0.003112             0.441748   \n",
       "322            0.581921           0.003090             0.441465   \n",
       "323            0.581923           0.003049             0.441190   \n",
       "324            0.581925           0.003052             0.440961   \n",
       "325            0.581935           0.003028             0.440694   \n",
       "326            0.581951           0.003038             0.440507   \n",
       "327            0.581950           0.003047             0.440223   \n",
       "328            0.581947           0.003041             0.439918   \n",
       "329            0.581935           0.003046             0.439676   \n",
       "330            0.581936           0.003050             0.439425   \n",
       "331            0.581889           0.003045             0.439154   \n",
       "332            0.581881           0.003055             0.438793   \n",
       "333            0.581869           0.003041             0.438566   \n",
       "334            0.581868           0.003072             0.438280   \n",
       "335            0.581863           0.003070             0.438065   \n",
       "336            0.581853           0.003065             0.437792   \n",
       "337            0.581829           0.003082             0.437574   \n",
       "338            0.581805           0.003077             0.437330   \n",
       "\n",
       "     train-mlogloss-std  \n",
       "0              0.000131  \n",
       "1              0.000313  \n",
       "2              0.000333  \n",
       "3              0.000484  \n",
       "4              0.000493  \n",
       "5              0.000587  \n",
       "6              0.000591  \n",
       "7              0.000672  \n",
       "8              0.000652  \n",
       "9              0.000431  \n",
       "10             0.000462  \n",
       "11             0.000394  \n",
       "12             0.000442  \n",
       "13             0.000483  \n",
       "14             0.000386  \n",
       "15             0.000482  \n",
       "16             0.000526  \n",
       "17             0.000636  \n",
       "18             0.000636  \n",
       "19             0.000776  \n",
       "20             0.000759  \n",
       "21             0.000770  \n",
       "22             0.000656  \n",
       "23             0.000604  \n",
       "24             0.000477  \n",
       "25             0.000481  \n",
       "26             0.000673  \n",
       "27             0.000660  \n",
       "28             0.000780  \n",
       "29             0.000589  \n",
       "..                  ...  \n",
       "309            0.001295  \n",
       "310            0.001246  \n",
       "311            0.001304  \n",
       "312            0.001330  \n",
       "313            0.001270  \n",
       "314            0.001334  \n",
       "315            0.001288  \n",
       "316            0.001225  \n",
       "317            0.001155  \n",
       "318            0.001092  \n",
       "319            0.000996  \n",
       "320            0.001011  \n",
       "321            0.001010  \n",
       "322            0.001004  \n",
       "323            0.000892  \n",
       "324            0.000911  \n",
       "325            0.000972  \n",
       "326            0.001052  \n",
       "327            0.001115  \n",
       "328            0.001113  \n",
       "329            0.001136  \n",
       "330            0.001129  \n",
       "331            0.001242  \n",
       "332            0.001369  \n",
       "333            0.001408  \n",
       "334            0.001494  \n",
       "335            0.001512  \n",
       "336            0.001486  \n",
       "337            0.001463  \n",
       "338            0.001469  \n",
       "\n",
       "[339 rows x 4 columns]"
      ]
     },
     "execution_count": 8,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "tmp = time.time()\n",
    "subsample = list(np.linspace(0.3, 1, 4))\n",
    "colsample_bytree = list(np.linspace(0.3, 1, 4))\n",
    "param_search_sample = {\n",
    "    'max_depth': [ searchResult_depth_weight[0][1]['max_depth'] ],  #use last select parameter\n",
    "    'min_child_weight': [ searchResult_depth_weight[0][1]['min_child_weight'] ], #use last select parameter\n",
    "    'reg_alpha': [ searchResult_reg[0][1]['reg_alpha'] ],\n",
    "    'reg_lambda': [ searchResult_reg[0][1]['reg_lambda'] ],\n",
    "    'subsample': subsample,\n",
    "    'colsample_bytree': colsample_bytree,\n",
    "}\n",
    "\n",
    "searchResult_sample = SearchUseXgboostNative(param_search_sample, data_train_dmatrix)\n",
    "searchResult_sample.sort(key = lambda x: x[0]['test-mlogloss-mean'].min())\n",
    "print(searchResult_sample[0][1])\n",
    "print(\"Training Time: %s seconds\" % (str(time.time() - tmp)))\n",
    "searchResult_sample[0][0]"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "模型预测\n",
    "------"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "best parameter:  {'silent': 0, 'eta': 0.1, 'objective': 'multi:softmax', 'eval_metric': 'mlogloss', 'num_class': 3, 'nthread': 7, 'tree_method': 'gpu_exact', 'max_depth': 7, 'min_child_weight': 5, 'reg_alpha': 0, 'reg_lambda': 100.0, 'subsample': 0.3, 'colsample_bytree': 0.3}\n"
     ]
    }
   ],
   "source": [
    "#use best parameter to train a model\n",
    "best_parameter = param_default.copy()\n",
    "best_parameter.update(searchResult_sample[0][1])\n",
    "n_estimators = searchResult_sample[0][0].shape[0]\n",
    "print('best parameter: ', best_parameter)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=1,\n",
       "       colsample_bytree=0.3, gamma=0, learning_rate=0.1, max_delta_step=0,\n",
       "       max_depth=7, min_child_weight=5, missing=None, n_estimators=339,\n",
       "       n_jobs=7, nthread=None, num_class=3, objective='multi:softmax',\n",
       "       random_state=0, reg_alpha=0, reg_lambda=100.0, scale_pos_weight=1,\n",
       "       seed=None, silent=True, subsample=0.3, tree_method='gpu_exact')"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "xgb_best = XGBClassifier(\n",
    "        learning_rate = best_parameter['eta'],\n",
    "        n_jobs = best_parameter['nthread'],\n",
    "        tree_method = best_parameter['tree_method'],\n",
    "        objective= best_parameter['objective'],\n",
    "        num_class = best_parameter['num_class'],\n",
    "        n_estimators=n_estimators,\n",
    "        max_depth=best_parameter['max_depth'],\n",
    "        min_child_weight=best_parameter['min_child_weight'],\n",
    "        reg_alpha = best_parameter['reg_alpha'],\n",
    "        reg_lambda = best_parameter['reg_lambda'],\n",
    "        subsample=best_parameter['subsample'],\n",
    "        colsample_bytree=best_parameter['colsample_bytree']\n",
    "        )\n",
    "xgb_best"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=1,\n",
       "       colsample_bytree=0.3, gamma=0, learning_rate=0.1, max_delta_step=0,\n",
       "       max_depth=7, min_child_weight=5, missing=None, n_estimators=339,\n",
       "       n_jobs=7, nthread=None, num_class=3, objective='multi:softprob',\n",
       "       random_state=0, reg_alpha=0, reg_lambda=100.0, scale_pos_weight=1,\n",
       "       seed=None, silent=True, subsample=0.3, tree_method='gpu_exact')"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "xgb_best.fit(data_train.drop(['interest_level'], axis = 1), data_train['interest_level'], eval_metric = best_parameter['eval_metric'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\Programs\\Anaconda3\\lib\\site-packages\\sklearn\\preprocessing\\label.py:151: DeprecationWarning: The truth value of an empty array is ambiguous. Returning False, but in future this will result in an error. Use `array.size > 0` to check that an array is not empty.\n",
      "  if diff:\n"
     ]
    }
   ],
   "source": [
    "data_verify_pred = xgb_best.predict(data_verify.drop(['interest_level'], axis = 1))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "verify Accuracy: 73.47%\n"
     ]
    }
   ],
   "source": [
    "y_pred = [round(value) for value in data_verify_pred]\n",
    "\n",
    "verify_accuracy = accuracy_score(data_verify['interest_level'], y_pred)\n",
    "print(\"verify Accuracy: %.2f%%\" % (verify_accuracy * 100.0))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "xgb_best.score(data_train.drop(['interest_level'], axis = 1), data_train['interest_level'])"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "ename": "NameError",
     "evalue": "name 'xgb_best' is not defined",
     "output_type": "error",
     "traceback": [
      "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
      "\u001b[1;31mNameError\u001b[0m                                 Traceback (most recent call last)",
      "\u001b[1;32m<ipython-input-1-2c8fabb74cec>\u001b[0m in \u001b[0;36m<module>\u001b[1;34m()\u001b[0m\n\u001b[0;32m      1\u001b[0m \u001b[1;31m#predict\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m----> 2\u001b[1;33m \u001b[0my_test_pred\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mxgb_best\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mpredict\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mdata_test_dmatrix\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m      3\u001b[0m \u001b[1;31m#save result\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m      4\u001b[0m \u001b[0mdpath_out\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;34m'.\\\\out\\\\'\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m      5\u001b[0m \u001b[0mdata_test_result\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mdata_test_pd\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mcopy\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
      "\u001b[1;31mNameError\u001b[0m: name 'xgb_best' is not defined"
     ]
    }
   ],
   "source": [
    "#predict\n",
    "y_test_pred = xgb_best.predict(data_test_dmatrix)\n",
    "#save result\n",
    "dpath_out = '.\\\\out\\\\'\n",
    "data_test_result = data_test_pd.copy()\n",
    "data_test_result['interest_level'] = pd.Series(y_test_pred)\n",
    "data_test_result.to_csv(dpath_out + 'Rent_Listing_Inquries_result.csv')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.4"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
