{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# XGBoost Parameter Tuning for RentListingInquries"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### 4. 用GridSearchCV调整subsample与colsample_bytree"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#导入准备调用的模块\n",
    "from xgboost import XGBClassifier\n",
    "import xgboost as xgb\n",
    "\n",
    "import pandas as pd \n",
    "import numpy as np\n",
    "\n",
    "from sklearn.model_selection import GridSearchCV\n",
    "from sklearn.model_selection import StratifiedKFold\n",
    "\n",
    "from sklearn.metrics import log_loss\n",
    "\n",
    "from matplotlib import pyplot\n",
    "import seaborn as sns\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>bathrooms</th>\n",
       "      <th>bedrooms</th>\n",
       "      <th>price</th>\n",
       "      <th>price_bathrooms</th>\n",
       "      <th>price_bedrooms</th>\n",
       "      <th>room_diff</th>\n",
       "      <th>room_num</th>\n",
       "      <th>Year</th>\n",
       "      <th>Month</th>\n",
       "      <th>Day</th>\n",
       "      <th>...</th>\n",
       "      <th>walk</th>\n",
       "      <th>walls</th>\n",
       "      <th>war</th>\n",
       "      <th>washer</th>\n",
       "      <th>water</th>\n",
       "      <th>wheelchair</th>\n",
       "      <th>wifi</th>\n",
       "      <th>windows</th>\n",
       "      <th>work</th>\n",
       "      <th>interest_level</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.5</td>\n",
       "      <td>3</td>\n",
       "      <td>3000</td>\n",
       "      <td>1200.0</td>\n",
       "      <td>750.000000</td>\n",
       "      <td>-1.5</td>\n",
       "      <td>4.5</td>\n",
       "      <td>2016</td>\n",
       "      <td>6</td>\n",
       "      <td>24</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>1.0</td>\n",
       "      <td>2</td>\n",
       "      <td>5465</td>\n",
       "      <td>2732.5</td>\n",
       "      <td>1821.666667</td>\n",
       "      <td>-1.0</td>\n",
       "      <td>3.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>6</td>\n",
       "      <td>12</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>1.0</td>\n",
       "      <td>1</td>\n",
       "      <td>2850</td>\n",
       "      <td>1425.0</td>\n",
       "      <td>1425.000000</td>\n",
       "      <td>0.0</td>\n",
       "      <td>2.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>4</td>\n",
       "      <td>17</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>1.0</td>\n",
       "      <td>1</td>\n",
       "      <td>3275</td>\n",
       "      <td>1637.5</td>\n",
       "      <td>1637.500000</td>\n",
       "      <td>0.0</td>\n",
       "      <td>2.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>4</td>\n",
       "      <td>18</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>1.0</td>\n",
       "      <td>4</td>\n",
       "      <td>3350</td>\n",
       "      <td>1675.0</td>\n",
       "      <td>670.000000</td>\n",
       "      <td>-3.0</td>\n",
       "      <td>5.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>4</td>\n",
       "      <td>28</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 228 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "   bathrooms  bedrooms  price  price_bathrooms  price_bedrooms  room_diff  \\\n",
       "0        1.5         3   3000           1200.0      750.000000       -1.5   \n",
       "1        1.0         2   5465           2732.5     1821.666667       -1.0   \n",
       "2        1.0         1   2850           1425.0     1425.000000        0.0   \n",
       "3        1.0         1   3275           1637.5     1637.500000        0.0   \n",
       "4        1.0         4   3350           1675.0      670.000000       -3.0   \n",
       "\n",
       "   room_num  Year  Month  Day       ...        walk  walls  war  washer  \\\n",
       "0       4.5  2016      6   24       ...           0      0    0       0   \n",
       "1       3.0  2016      6   12       ...           0      0    0       0   \n",
       "2       2.0  2016      4   17       ...           0      0    0       0   \n",
       "3       2.0  2016      4   18       ...           0      0    0       0   \n",
       "4       5.0  2016      4   28       ...           0      0    1       0   \n",
       "\n",
       "   water  wheelchair  wifi  windows  work  interest_level  \n",
       "0      0           0     0        0     0               1  \n",
       "1      0           0     0        0     0               2  \n",
       "2      0           0     0        0     0               0  \n",
       "3      0           0     0        0     0               2  \n",
       "4      0           0     0        0     0               2  \n",
       "\n",
       "[5 rows x 228 columns]"
      ]
     },
     "execution_count": 2,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#读取train的数据文件并显示头5行数据\n",
    "train = pd.read_csv(\"./data/RentListingInquries_FE_train.csv\")\n",
    "train.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#分离特征列与目标列\n",
    "y_train = train['interest_level']\n",
    "\n",
    "X_train = train.drop(['interest_level'], axis=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#从前序代码的countplot图可知，三类目标的样本数量不均匀，故采用分层采样，考虑时间代价，将划分等级设为3，可能会影响最终的模型参数\n",
    "kfold = StratifiedKFold(n_splits=3, shuffle=True, random_state=3)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'colsample_bytree': [0.5, 0.6, 0.7, 0.8, 0.9],\n",
       " 'subsample': [0.4, 0.5, 0.6, 0.7, 0.8, 0.9]}"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#将两个参数一起进行调优，设定subsample，colsample_bytree调整的网格\n",
    "subsample = [i/10.0 for i in range(4,10)]\n",
    "colsample_bytree=[i/10.0 for i in range(5,10)]\n",
    "param_t4 = dict(subsample = subsample,colsample_bytree = colsample_bytree )\n",
    "param_t4"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#实例化subsample，colsample_bytree调优的XGBClassifier\n",
    "xgb4_1 = XGBClassifier(\n",
    "        learning_rate =0.1,\n",
    "        n_estimators=220,  #第一轮参数调整得到的n_estimators最优值\n",
    "        max_depth=5, #第二轮参数调优时得到的max_depth最佳值\n",
    "        min_child_weight=7, #第三轮参数调优时得到的min_child_weight最佳值\n",
    "        gamma=0,\n",
    "        subsample=0.3,\n",
    "        colsample_bytree=0.8,\n",
    "        colsample_bylevel = 0.7,\n",
    "        objective= 'multi:softprob',\n",
    "        seed=3)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "GridSearchCV(cv=StratifiedKFold(n_splits=3, random_state=3, shuffle=True),\n",
       "       error_score='raise',\n",
       "       estimator=XGBClassifier(base_score=0.5, booster='gbtree', colsample_bylevel=0.7,\n",
       "       colsample_bytree=0.8, gamma=0, learning_rate=0.1, max_delta_step=0,\n",
       "       max_depth=5, min_child_weight=7, missing=None, n_estimators=220,\n",
       "       n_jobs=1, nthread=None, objective='multi:softprob', random_state=0,\n",
       "       reg_alpha=0, reg_lambda=1, scale_pos_weight=1, seed=3, silent=True,\n",
       "       subsample=0.3),\n",
       "       fit_params=None, iid=True, n_jobs=-1,\n",
       "       param_grid={'subsample': [0.4, 0.5, 0.6, 0.7, 0.8, 0.9], 'colsample_bytree': [0.5, 0.6, 0.7, 0.8, 0.9]},\n",
       "       pre_dispatch='2*n_jobs', refit=True, return_train_score='warn',\n",
       "       scoring='neg_log_loss', verbose=0)"
      ]
     },
     "execution_count": 17,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "#实例化subsample，colsample_bytree调优的GridSearchCV,并将以上的调优取值集合与学习器代入\n",
    "gsearch4_1 = GridSearchCV(xgb4_1, param_grid = param_t4, scoring='neg_log_loss',n_jobs=-1, cv=kfold)\n",
    "gsearch4_1.fit(X_train , y_train) #利用实例好的GridSearchCV来训练数据"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('mean_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('split0_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('split1_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('split2_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('std_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>mean_fit_time</th>\n",
       "      <th>mean_score_time</th>\n",
       "      <th>mean_test_score</th>\n",
       "      <th>mean_train_score</th>\n",
       "      <th>param_colsample_bytree</th>\n",
       "      <th>param_subsample</th>\n",
       "      <th>params</th>\n",
       "      <th>rank_test_score</th>\n",
       "      <th>split0_test_score</th>\n",
       "      <th>split0_train_score</th>\n",
       "      <th>split1_test_score</th>\n",
       "      <th>split1_train_score</th>\n",
       "      <th>split2_test_score</th>\n",
       "      <th>split2_train_score</th>\n",
       "      <th>std_fit_time</th>\n",
       "      <th>std_score_time</th>\n",
       "      <th>std_test_score</th>\n",
       "      <th>std_train_score</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>184.872241</td>\n",
       "      <td>1.107730</td>\n",
       "      <td>-0.592135</td>\n",
       "      <td>-0.522147</td>\n",
       "      <td>0.5</td>\n",
       "      <td>0.4</td>\n",
       "      <td>{'colsample_bytree': 0.5, 'subsample': 0.4}</td>\n",
       "      <td>30</td>\n",
       "      <td>-0.586561</td>\n",
       "      <td>-0.524816</td>\n",
       "      <td>-0.595289</td>\n",
       "      <td>-0.521162</td>\n",
       "      <td>-0.594556</td>\n",
       "      <td>-0.520464</td>\n",
       "      <td>7.803396</td>\n",
       "      <td>0.042900</td>\n",
       "      <td>0.003953</td>\n",
       "      <td>0.001908</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>218.749178</td>\n",
       "      <td>1.212069</td>\n",
       "      <td>-0.591104</td>\n",
       "      <td>-0.519631</td>\n",
       "      <td>0.5</td>\n",
       "      <td>0.5</td>\n",
       "      <td>{'colsample_bytree': 0.5, 'subsample': 0.5}</td>\n",
       "      <td>27</td>\n",
       "      <td>-0.585935</td>\n",
       "      <td>-0.522621</td>\n",
       "      <td>-0.593692</td>\n",
       "      <td>-0.518491</td>\n",
       "      <td>-0.593684</td>\n",
       "      <td>-0.517780</td>\n",
       "      <td>35.766181</td>\n",
       "      <td>0.170638</td>\n",
       "      <td>0.003655</td>\n",
       "      <td>0.002134</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>229.759475</td>\n",
       "      <td>1.247738</td>\n",
       "      <td>-0.589622</td>\n",
       "      <td>-0.516882</td>\n",
       "      <td>0.5</td>\n",
       "      <td>0.6</td>\n",
       "      <td>{'colsample_bytree': 0.5, 'subsample': 0.6}</td>\n",
       "      <td>22</td>\n",
       "      <td>-0.585184</td>\n",
       "      <td>-0.520676</td>\n",
       "      <td>-0.591215</td>\n",
       "      <td>-0.514633</td>\n",
       "      <td>-0.592468</td>\n",
       "      <td>-0.515338</td>\n",
       "      <td>31.127118</td>\n",
       "      <td>0.178018</td>\n",
       "      <td>0.003180</td>\n",
       "      <td>0.002698</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>233.833375</td>\n",
       "      <td>1.239404</td>\n",
       "      <td>-0.589326</td>\n",
       "      <td>-0.516019</td>\n",
       "      <td>0.5</td>\n",
       "      <td>0.7</td>\n",
       "      <td>{'colsample_bytree': 0.5, 'subsample': 0.7}</td>\n",
       "      <td>21</td>\n",
       "      <td>-0.584294</td>\n",
       "      <td>-0.518159</td>\n",
       "      <td>-0.591018</td>\n",
       "      <td>-0.513941</td>\n",
       "      <td>-0.592667</td>\n",
       "      <td>-0.515956</td>\n",
       "      <td>29.325226</td>\n",
       "      <td>0.153328</td>\n",
       "      <td>0.003622</td>\n",
       "      <td>0.001723</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>208.760940</td>\n",
       "      <td>1.165400</td>\n",
       "      <td>-0.588123</td>\n",
       "      <td>-0.515470</td>\n",
       "      <td>0.5</td>\n",
       "      <td>0.8</td>\n",
       "      <td>{'colsample_bytree': 0.5, 'subsample': 0.8}</td>\n",
       "      <td>13</td>\n",
       "      <td>-0.583541</td>\n",
       "      <td>-0.518615</td>\n",
       "      <td>-0.589462</td>\n",
       "      <td>-0.512571</td>\n",
       "      <td>-0.591366</td>\n",
       "      <td>-0.515225</td>\n",
       "      <td>8.874768</td>\n",
       "      <td>0.069143</td>\n",
       "      <td>0.003332</td>\n",
       "      <td>0.002473</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>5</th>\n",
       "      <td>226.261608</td>\n",
       "      <td>1.181068</td>\n",
       "      <td>-0.587884</td>\n",
       "      <td>-0.515914</td>\n",
       "      <td>0.5</td>\n",
       "      <td>0.9</td>\n",
       "      <td>{'colsample_bytree': 0.5, 'subsample': 0.9}</td>\n",
       "      <td>11</td>\n",
       "      <td>-0.583318</td>\n",
       "      <td>-0.516821</td>\n",
       "      <td>-0.589832</td>\n",
       "      <td>-0.515521</td>\n",
       "      <td>-0.590502</td>\n",
       "      <td>-0.515401</td>\n",
       "      <td>32.353289</td>\n",
       "      <td>0.175373</td>\n",
       "      <td>0.003240</td>\n",
       "      <td>0.000643</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>6</th>\n",
       "      <td>207.421531</td>\n",
       "      <td>1.114064</td>\n",
       "      <td>-0.591066</td>\n",
       "      <td>-0.517208</td>\n",
       "      <td>0.6</td>\n",
       "      <td>0.4</td>\n",
       "      <td>{'colsample_bytree': 0.6, 'subsample': 0.4}</td>\n",
       "      <td>26</td>\n",
       "      <td>-0.585634</td>\n",
       "      <td>-0.518937</td>\n",
       "      <td>-0.593124</td>\n",
       "      <td>-0.515653</td>\n",
       "      <td>-0.594440</td>\n",
       "      <td>-0.517034</td>\n",
       "      <td>10.328690</td>\n",
       "      <td>0.050289</td>\n",
       "      <td>0.003878</td>\n",
       "      <td>0.001346</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>7</th>\n",
       "      <td>250.996023</td>\n",
       "      <td>1.300408</td>\n",
       "      <td>-0.589885</td>\n",
       "      <td>-0.515008</td>\n",
       "      <td>0.6</td>\n",
       "      <td>0.5</td>\n",
       "      <td>{'colsample_bytree': 0.6, 'subsample': 0.5}</td>\n",
       "      <td>24</td>\n",
       "      <td>-0.585587</td>\n",
       "      <td>-0.517520</td>\n",
       "      <td>-0.591258</td>\n",
       "      <td>-0.512951</td>\n",
       "      <td>-0.592811</td>\n",
       "      <td>-0.514552</td>\n",
       "      <td>36.599875</td>\n",
       "      <td>0.175347</td>\n",
       "      <td>0.003105</td>\n",
       "      <td>0.001893</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>8</th>\n",
       "      <td>231.768590</td>\n",
       "      <td>1.110063</td>\n",
       "      <td>-0.589279</td>\n",
       "      <td>-0.514278</td>\n",
       "      <td>0.6</td>\n",
       "      <td>0.6</td>\n",
       "      <td>{'colsample_bytree': 0.6, 'subsample': 0.6}</td>\n",
       "      <td>20</td>\n",
       "      <td>-0.583893</td>\n",
       "      <td>-0.516682</td>\n",
       "      <td>-0.590819</td>\n",
       "      <td>-0.512205</td>\n",
       "      <td>-0.593125</td>\n",
       "      <td>-0.513948</td>\n",
       "      <td>11.038396</td>\n",
       "      <td>0.059316</td>\n",
       "      <td>0.003923</td>\n",
       "      <td>0.001843</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>9</th>\n",
       "      <td>259.478841</td>\n",
       "      <td>1.284073</td>\n",
       "      <td>-0.588738</td>\n",
       "      <td>-0.513262</td>\n",
       "      <td>0.6</td>\n",
       "      <td>0.7</td>\n",
       "      <td>{'colsample_bytree': 0.6, 'subsample': 0.7}</td>\n",
       "      <td>16</td>\n",
       "      <td>-0.584421</td>\n",
       "      <td>-0.516528</td>\n",
       "      <td>-0.590395</td>\n",
       "      <td>-0.510976</td>\n",
       "      <td>-0.591397</td>\n",
       "      <td>-0.512283</td>\n",
       "      <td>33.028455</td>\n",
       "      <td>0.224677</td>\n",
       "      <td>0.003079</td>\n",
       "      <td>0.002370</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>10</th>\n",
       "      <td>256.223988</td>\n",
       "      <td>1.215736</td>\n",
       "      <td>-0.588009</td>\n",
       "      <td>-0.513207</td>\n",
       "      <td>0.6</td>\n",
       "      <td>0.8</td>\n",
       "      <td>{'colsample_bytree': 0.6, 'subsample': 0.8}</td>\n",
       "      <td>12</td>\n",
       "      <td>-0.583954</td>\n",
       "      <td>-0.514940</td>\n",
       "      <td>-0.588873</td>\n",
       "      <td>-0.511263</td>\n",
       "      <td>-0.591199</td>\n",
       "      <td>-0.513417</td>\n",
       "      <td>42.277474</td>\n",
       "      <td>0.147526</td>\n",
       "      <td>0.003020</td>\n",
       "      <td>0.001509</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>11</th>\n",
       "      <td>237.015223</td>\n",
       "      <td>1.094062</td>\n",
       "      <td>-0.587830</td>\n",
       "      <td>-0.513429</td>\n",
       "      <td>0.6</td>\n",
       "      <td>0.9</td>\n",
       "      <td>{'colsample_bytree': 0.6, 'subsample': 0.9}</td>\n",
       "      <td>9</td>\n",
       "      <td>-0.583329</td>\n",
       "      <td>-0.515677</td>\n",
       "      <td>-0.589195</td>\n",
       "      <td>-0.511208</td>\n",
       "      <td>-0.590968</td>\n",
       "      <td>-0.513400</td>\n",
       "      <td>8.248604</td>\n",
       "      <td>0.082786</td>\n",
       "      <td>0.003264</td>\n",
       "      <td>0.001825</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>12</th>\n",
       "      <td>246.225417</td>\n",
       "      <td>1.278740</td>\n",
       "      <td>-0.591399</td>\n",
       "      <td>-0.514949</td>\n",
       "      <td>0.7</td>\n",
       "      <td>0.4</td>\n",
       "      <td>{'colsample_bytree': 0.7, 'subsample': 0.4}</td>\n",
       "      <td>29</td>\n",
       "      <td>-0.586760</td>\n",
       "      <td>-0.518161</td>\n",
       "      <td>-0.593476</td>\n",
       "      <td>-0.511877</td>\n",
       "      <td>-0.593963</td>\n",
       "      <td>-0.514810</td>\n",
       "      <td>27.119144</td>\n",
       "      <td>0.214588</td>\n",
       "      <td>0.003287</td>\n",
       "      <td>0.002567</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>13</th>\n",
       "      <td>246.318422</td>\n",
       "      <td>1.109730</td>\n",
       "      <td>-0.589708</td>\n",
       "      <td>-0.512774</td>\n",
       "      <td>0.7</td>\n",
       "      <td>0.5</td>\n",
       "      <td>{'colsample_bytree': 0.7, 'subsample': 0.5}</td>\n",
       "      <td>23</td>\n",
       "      <td>-0.584554</td>\n",
       "      <td>-0.514984</td>\n",
       "      <td>-0.592318</td>\n",
       "      <td>-0.510281</td>\n",
       "      <td>-0.592251</td>\n",
       "      <td>-0.513058</td>\n",
       "      <td>11.346934</td>\n",
       "      <td>0.040436</td>\n",
       "      <td>0.003644</td>\n",
       "      <td>0.001930</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>14</th>\n",
       "      <td>280.323033</td>\n",
       "      <td>1.287740</td>\n",
       "      <td>-0.588990</td>\n",
       "      <td>-0.511567</td>\n",
       "      <td>0.7</td>\n",
       "      <td>0.6</td>\n",
       "      <td>{'colsample_bytree': 0.7, 'subsample': 0.6}</td>\n",
       "      <td>17</td>\n",
       "      <td>-0.585051</td>\n",
       "      <td>-0.513526</td>\n",
       "      <td>-0.590442</td>\n",
       "      <td>-0.508610</td>\n",
       "      <td>-0.591477</td>\n",
       "      <td>-0.512564</td>\n",
       "      <td>42.165272</td>\n",
       "      <td>0.184695</td>\n",
       "      <td>0.002817</td>\n",
       "      <td>0.002127</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>15</th>\n",
       "      <td>274.789717</td>\n",
       "      <td>1.241404</td>\n",
       "      <td>-0.587423</td>\n",
       "      <td>-0.510649</td>\n",
       "      <td>0.7</td>\n",
       "      <td>0.7</td>\n",
       "      <td>{'colsample_bytree': 0.7, 'subsample': 0.7}</td>\n",
       "      <td>5</td>\n",
       "      <td>-0.582859</td>\n",
       "      <td>-0.513095</td>\n",
       "      <td>-0.588574</td>\n",
       "      <td>-0.507916</td>\n",
       "      <td>-0.590837</td>\n",
       "      <td>-0.510938</td>\n",
       "      <td>26.532031</td>\n",
       "      <td>0.154940</td>\n",
       "      <td>0.003357</td>\n",
       "      <td>0.002124</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>16</th>\n",
       "      <td>260.397561</td>\n",
       "      <td>1.107730</td>\n",
       "      <td>-0.587390</td>\n",
       "      <td>-0.509997</td>\n",
       "      <td>0.7</td>\n",
       "      <td>0.8</td>\n",
       "      <td>{'colsample_bytree': 0.7, 'subsample': 0.8}</td>\n",
       "      <td>3</td>\n",
       "      <td>-0.583205</td>\n",
       "      <td>-0.512063</td>\n",
       "      <td>-0.588367</td>\n",
       "      <td>-0.508269</td>\n",
       "      <td>-0.590597</td>\n",
       "      <td>-0.509660</td>\n",
       "      <td>12.690308</td>\n",
       "      <td>0.066809</td>\n",
       "      <td>0.003096</td>\n",
       "      <td>0.001567</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>17</th>\n",
       "      <td>290.032589</td>\n",
       "      <td>1.210403</td>\n",
       "      <td>-0.587854</td>\n",
       "      <td>-0.511694</td>\n",
       "      <td>0.7</td>\n",
       "      <td>0.9</td>\n",
       "      <td>{'colsample_bytree': 0.7, 'subsample': 0.9}</td>\n",
       "      <td>10</td>\n",
       "      <td>-0.583778</td>\n",
       "      <td>-0.514390</td>\n",
       "      <td>-0.588713</td>\n",
       "      <td>-0.508413</td>\n",
       "      <td>-0.591071</td>\n",
       "      <td>-0.512278</td>\n",
       "      <td>38.399058</td>\n",
       "      <td>0.161007</td>\n",
       "      <td>0.003039</td>\n",
       "      <td>0.002475</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>18</th>\n",
       "      <td>268.258343</td>\n",
       "      <td>1.182068</td>\n",
       "      <td>-0.591379</td>\n",
       "      <td>-0.513537</td>\n",
       "      <td>0.8</td>\n",
       "      <td>0.4</td>\n",
       "      <td>{'colsample_bytree': 0.8, 'subsample': 0.4}</td>\n",
       "      <td>28</td>\n",
       "      <td>-0.587030</td>\n",
       "      <td>-0.516277</td>\n",
       "      <td>-0.593300</td>\n",
       "      <td>-0.511764</td>\n",
       "      <td>-0.593807</td>\n",
       "      <td>-0.512572</td>\n",
       "      <td>41.055340</td>\n",
       "      <td>0.143684</td>\n",
       "      <td>0.003082</td>\n",
       "      <td>0.001965</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>19</th>\n",
       "      <td>270.094448</td>\n",
       "      <td>1.118064</td>\n",
       "      <td>-0.589180</td>\n",
       "      <td>-0.510045</td>\n",
       "      <td>0.8</td>\n",
       "      <td>0.5</td>\n",
       "      <td>{'colsample_bytree': 0.8, 'subsample': 0.5}</td>\n",
       "      <td>19</td>\n",
       "      <td>-0.585103</td>\n",
       "      <td>-0.512504</td>\n",
       "      <td>-0.590557</td>\n",
       "      <td>-0.507558</td>\n",
       "      <td>-0.591879</td>\n",
       "      <td>-0.510071</td>\n",
       "      <td>12.565007</td>\n",
       "      <td>0.050342</td>\n",
       "      <td>0.002933</td>\n",
       "      <td>0.002019</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>20</th>\n",
       "      <td>314.606661</td>\n",
       "      <td>1.242738</td>\n",
       "      <td>-0.588243</td>\n",
       "      <td>-0.508720</td>\n",
       "      <td>0.8</td>\n",
       "      <td>0.6</td>\n",
       "      <td>{'colsample_bytree': 0.8, 'subsample': 0.6}</td>\n",
       "      <td>14</td>\n",
       "      <td>-0.583982</td>\n",
       "      <td>-0.511361</td>\n",
       "      <td>-0.590071</td>\n",
       "      <td>-0.506903</td>\n",
       "      <td>-0.590676</td>\n",
       "      <td>-0.507896</td>\n",
       "      <td>37.572580</td>\n",
       "      <td>0.138018</td>\n",
       "      <td>0.003023</td>\n",
       "      <td>0.001911</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>21</th>\n",
       "      <td>281.422763</td>\n",
       "      <td>1.101730</td>\n",
       "      <td>-0.587405</td>\n",
       "      <td>-0.508287</td>\n",
       "      <td>0.8</td>\n",
       "      <td>0.7</td>\n",
       "      <td>{'colsample_bytree': 0.8, 'subsample': 0.7}</td>\n",
       "      <td>4</td>\n",
       "      <td>-0.582829</td>\n",
       "      <td>-0.510417</td>\n",
       "      <td>-0.589334</td>\n",
       "      <td>-0.506683</td>\n",
       "      <td>-0.590052</td>\n",
       "      <td>-0.507763</td>\n",
       "      <td>13.259700</td>\n",
       "      <td>0.039754</td>\n",
       "      <td>0.003249</td>\n",
       "      <td>0.001569</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>22</th>\n",
       "      <td>311.609490</td>\n",
       "      <td>1.169734</td>\n",
       "      <td>-0.586808</td>\n",
       "      <td>-0.508027</td>\n",
       "      <td>0.8</td>\n",
       "      <td>0.8</td>\n",
       "      <td>{'colsample_bytree': 0.8, 'subsample': 0.8}</td>\n",
       "      <td>1</td>\n",
       "      <td>-0.582447</td>\n",
       "      <td>-0.509414</td>\n",
       "      <td>-0.588102</td>\n",
       "      <td>-0.506329</td>\n",
       "      <td>-0.589876</td>\n",
       "      <td>-0.508337</td>\n",
       "      <td>48.983323</td>\n",
       "      <td>0.155809</td>\n",
       "      <td>0.003168</td>\n",
       "      <td>0.001278</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>23</th>\n",
       "      <td>315.158026</td>\n",
       "      <td>1.219736</td>\n",
       "      <td>-0.587514</td>\n",
       "      <td>-0.509413</td>\n",
       "      <td>0.8</td>\n",
       "      <td>0.9</td>\n",
       "      <td>{'colsample_bytree': 0.8, 'subsample': 0.9}</td>\n",
       "      <td>6</td>\n",
       "      <td>-0.583083</td>\n",
       "      <td>-0.511427</td>\n",
       "      <td>-0.588333</td>\n",
       "      <td>-0.506369</td>\n",
       "      <td>-0.591128</td>\n",
       "      <td>-0.510444</td>\n",
       "      <td>33.706019</td>\n",
       "      <td>0.141887</td>\n",
       "      <td>0.003335</td>\n",
       "      <td>0.002190</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>24</th>\n",
       "      <td>268.944049</td>\n",
       "      <td>1.124398</td>\n",
       "      <td>-0.590438</td>\n",
       "      <td>-0.511034</td>\n",
       "      <td>0.9</td>\n",
       "      <td>0.4</td>\n",
       "      <td>{'colsample_bytree': 0.9, 'subsample': 0.4}</td>\n",
       "      <td>25</td>\n",
       "      <td>-0.586719</td>\n",
       "      <td>-0.512963</td>\n",
       "      <td>-0.591736</td>\n",
       "      <td>-0.510671</td>\n",
       "      <td>-0.592858</td>\n",
       "      <td>-0.509470</td>\n",
       "      <td>11.298871</td>\n",
       "      <td>0.054067</td>\n",
       "      <td>0.002669</td>\n",
       "      <td>0.001449</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>25</th>\n",
       "      <td>317.812844</td>\n",
       "      <td>1.118731</td>\n",
       "      <td>-0.589066</td>\n",
       "      <td>-0.508616</td>\n",
       "      <td>0.9</td>\n",
       "      <td>0.5</td>\n",
       "      <td>{'colsample_bytree': 0.9, 'subsample': 0.5}</td>\n",
       "      <td>18</td>\n",
       "      <td>-0.584143</td>\n",
       "      <td>-0.511093</td>\n",
       "      <td>-0.590964</td>\n",
       "      <td>-0.506753</td>\n",
       "      <td>-0.592090</td>\n",
       "      <td>-0.508002</td>\n",
       "      <td>46.298370</td>\n",
       "      <td>0.027184</td>\n",
       "      <td>0.003511</td>\n",
       "      <td>0.001824</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>26</th>\n",
       "      <td>305.227458</td>\n",
       "      <td>1.103063</td>\n",
       "      <td>-0.588424</td>\n",
       "      <td>-0.507541</td>\n",
       "      <td>0.9</td>\n",
       "      <td>0.6</td>\n",
       "      <td>{'colsample_bytree': 0.9, 'subsample': 0.6}</td>\n",
       "      <td>15</td>\n",
       "      <td>-0.584220</td>\n",
       "      <td>-0.510471</td>\n",
       "      <td>-0.590221</td>\n",
       "      <td>-0.505106</td>\n",
       "      <td>-0.590831</td>\n",
       "      <td>-0.507045</td>\n",
       "      <td>16.131373</td>\n",
       "      <td>0.046886</td>\n",
       "      <td>0.002983</td>\n",
       "      <td>0.002218</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>27</th>\n",
       "      <td>344.928062</td>\n",
       "      <td>1.217736</td>\n",
       "      <td>-0.587722</td>\n",
       "      <td>-0.505861</td>\n",
       "      <td>0.9</td>\n",
       "      <td>0.7</td>\n",
       "      <td>{'colsample_bytree': 0.9, 'subsample': 0.7}</td>\n",
       "      <td>8</td>\n",
       "      <td>-0.583192</td>\n",
       "      <td>-0.507960</td>\n",
       "      <td>-0.589158</td>\n",
       "      <td>-0.502744</td>\n",
       "      <td>-0.590817</td>\n",
       "      <td>-0.506880</td>\n",
       "      <td>47.285013</td>\n",
       "      <td>0.181376</td>\n",
       "      <td>0.003274</td>\n",
       "      <td>0.002248</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>28</th>\n",
       "      <td>341.576204</td>\n",
       "      <td>1.274740</td>\n",
       "      <td>-0.587247</td>\n",
       "      <td>-0.506754</td>\n",
       "      <td>0.9</td>\n",
       "      <td>0.8</td>\n",
       "      <td>{'colsample_bytree': 0.9, 'subsample': 0.8}</td>\n",
       "      <td>2</td>\n",
       "      <td>-0.582958</td>\n",
       "      <td>-0.509167</td>\n",
       "      <td>-0.588256</td>\n",
       "      <td>-0.504561</td>\n",
       "      <td>-0.590527</td>\n",
       "      <td>-0.506533</td>\n",
       "      <td>46.257670</td>\n",
       "      <td>0.123997</td>\n",
       "      <td>0.003171</td>\n",
       "      <td>0.001887</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>29</th>\n",
       "      <td>275.226075</td>\n",
       "      <td>0.908385</td>\n",
       "      <td>-0.587537</td>\n",
       "      <td>-0.507966</td>\n",
       "      <td>0.9</td>\n",
       "      <td>0.9</td>\n",
       "      <td>{'colsample_bytree': 0.9, 'subsample': 0.9}</td>\n",
       "      <td>7</td>\n",
       "      <td>-0.583583</td>\n",
       "      <td>-0.511219</td>\n",
       "      <td>-0.588609</td>\n",
       "      <td>-0.505398</td>\n",
       "      <td>-0.590419</td>\n",
       "      <td>-0.507280</td>\n",
       "      <td>30.504428</td>\n",
       "      <td>0.079827</td>\n",
       "      <td>0.002892</td>\n",
       "      <td>0.002425</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "</div>"
      ],
      "text/plain": [
       "    mean_fit_time  mean_score_time  mean_test_score  mean_train_score  \\\n",
       "0      184.872241         1.107730        -0.592135         -0.522147   \n",
       "1      218.749178         1.212069        -0.591104         -0.519631   \n",
       "2      229.759475         1.247738        -0.589622         -0.516882   \n",
       "3      233.833375         1.239404        -0.589326         -0.516019   \n",
       "4      208.760940         1.165400        -0.588123         -0.515470   \n",
       "5      226.261608         1.181068        -0.587884         -0.515914   \n",
       "6      207.421531         1.114064        -0.591066         -0.517208   \n",
       "7      250.996023         1.300408        -0.589885         -0.515008   \n",
       "8      231.768590         1.110063        -0.589279         -0.514278   \n",
       "9      259.478841         1.284073        -0.588738         -0.513262   \n",
       "10     256.223988         1.215736        -0.588009         -0.513207   \n",
       "11     237.015223         1.094062        -0.587830         -0.513429   \n",
       "12     246.225417         1.278740        -0.591399         -0.514949   \n",
       "13     246.318422         1.109730        -0.589708         -0.512774   \n",
       "14     280.323033         1.287740        -0.588990         -0.511567   \n",
       "15     274.789717         1.241404        -0.587423         -0.510649   \n",
       "16     260.397561         1.107730        -0.587390         -0.509997   \n",
       "17     290.032589         1.210403        -0.587854         -0.511694   \n",
       "18     268.258343         1.182068        -0.591379         -0.513537   \n",
       "19     270.094448         1.118064        -0.589180         -0.510045   \n",
       "20     314.606661         1.242738        -0.588243         -0.508720   \n",
       "21     281.422763         1.101730        -0.587405         -0.508287   \n",
       "22     311.609490         1.169734        -0.586808         -0.508027   \n",
       "23     315.158026         1.219736        -0.587514         -0.509413   \n",
       "24     268.944049         1.124398        -0.590438         -0.511034   \n",
       "25     317.812844         1.118731        -0.589066         -0.508616   \n",
       "26     305.227458         1.103063        -0.588424         -0.507541   \n",
       "27     344.928062         1.217736        -0.587722         -0.505861   \n",
       "28     341.576204         1.274740        -0.587247         -0.506754   \n",
       "29     275.226075         0.908385        -0.587537         -0.507966   \n",
       "\n",
       "   param_colsample_bytree param_subsample  \\\n",
       "0                     0.5             0.4   \n",
       "1                     0.5             0.5   \n",
       "2                     0.5             0.6   \n",
       "3                     0.5             0.7   \n",
       "4                     0.5             0.8   \n",
       "5                     0.5             0.9   \n",
       "6                     0.6             0.4   \n",
       "7                     0.6             0.5   \n",
       "8                     0.6             0.6   \n",
       "9                     0.6             0.7   \n",
       "10                    0.6             0.8   \n",
       "11                    0.6             0.9   \n",
       "12                    0.7             0.4   \n",
       "13                    0.7             0.5   \n",
       "14                    0.7             0.6   \n",
       "15                    0.7             0.7   \n",
       "16                    0.7             0.8   \n",
       "17                    0.7             0.9   \n",
       "18                    0.8             0.4   \n",
       "19                    0.8             0.5   \n",
       "20                    0.8             0.6   \n",
       "21                    0.8             0.7   \n",
       "22                    0.8             0.8   \n",
       "23                    0.8             0.9   \n",
       "24                    0.9             0.4   \n",
       "25                    0.9             0.5   \n",
       "26                    0.9             0.6   \n",
       "27                    0.9             0.7   \n",
       "28                    0.9             0.8   \n",
       "29                    0.9             0.9   \n",
       "\n",
       "                                         params  rank_test_score  \\\n",
       "0   {'colsample_bytree': 0.5, 'subsample': 0.4}               30   \n",
       "1   {'colsample_bytree': 0.5, 'subsample': 0.5}               27   \n",
       "2   {'colsample_bytree': 0.5, 'subsample': 0.6}               22   \n",
       "3   {'colsample_bytree': 0.5, 'subsample': 0.7}               21   \n",
       "4   {'colsample_bytree': 0.5, 'subsample': 0.8}               13   \n",
       "5   {'colsample_bytree': 0.5, 'subsample': 0.9}               11   \n",
       "6   {'colsample_bytree': 0.6, 'subsample': 0.4}               26   \n",
       "7   {'colsample_bytree': 0.6, 'subsample': 0.5}               24   \n",
       "8   {'colsample_bytree': 0.6, 'subsample': 0.6}               20   \n",
       "9   {'colsample_bytree': 0.6, 'subsample': 0.7}               16   \n",
       "10  {'colsample_bytree': 0.6, 'subsample': 0.8}               12   \n",
       "11  {'colsample_bytree': 0.6, 'subsample': 0.9}                9   \n",
       "12  {'colsample_bytree': 0.7, 'subsample': 0.4}               29   \n",
       "13  {'colsample_bytree': 0.7, 'subsample': 0.5}               23   \n",
       "14  {'colsample_bytree': 0.7, 'subsample': 0.6}               17   \n",
       "15  {'colsample_bytree': 0.7, 'subsample': 0.7}                5   \n",
       "16  {'colsample_bytree': 0.7, 'subsample': 0.8}                3   \n",
       "17  {'colsample_bytree': 0.7, 'subsample': 0.9}               10   \n",
       "18  {'colsample_bytree': 0.8, 'subsample': 0.4}               28   \n",
       "19  {'colsample_bytree': 0.8, 'subsample': 0.5}               19   \n",
       "20  {'colsample_bytree': 0.8, 'subsample': 0.6}               14   \n",
       "21  {'colsample_bytree': 0.8, 'subsample': 0.7}                4   \n",
       "22  {'colsample_bytree': 0.8, 'subsample': 0.8}                1   \n",
       "23  {'colsample_bytree': 0.8, 'subsample': 0.9}                6   \n",
       "24  {'colsample_bytree': 0.9, 'subsample': 0.4}               25   \n",
       "25  {'colsample_bytree': 0.9, 'subsample': 0.5}               18   \n",
       "26  {'colsample_bytree': 0.9, 'subsample': 0.6}               15   \n",
       "27  {'colsample_bytree': 0.9, 'subsample': 0.7}                8   \n",
       "28  {'colsample_bytree': 0.9, 'subsample': 0.8}                2   \n",
       "29  {'colsample_bytree': 0.9, 'subsample': 0.9}                7   \n",
       "\n",
       "    split0_test_score  split0_train_score  split1_test_score  \\\n",
       "0           -0.586561           -0.524816          -0.595289   \n",
       "1           -0.585935           -0.522621          -0.593692   \n",
       "2           -0.585184           -0.520676          -0.591215   \n",
       "3           -0.584294           -0.518159          -0.591018   \n",
       "4           -0.583541           -0.518615          -0.589462   \n",
       "5           -0.583318           -0.516821          -0.589832   \n",
       "6           -0.585634           -0.518937          -0.593124   \n",
       "7           -0.585587           -0.517520          -0.591258   \n",
       "8           -0.583893           -0.516682          -0.590819   \n",
       "9           -0.584421           -0.516528          -0.590395   \n",
       "10          -0.583954           -0.514940          -0.588873   \n",
       "11          -0.583329           -0.515677          -0.589195   \n",
       "12          -0.586760           -0.518161          -0.593476   \n",
       "13          -0.584554           -0.514984          -0.592318   \n",
       "14          -0.585051           -0.513526          -0.590442   \n",
       "15          -0.582859           -0.513095          -0.588574   \n",
       "16          -0.583205           -0.512063          -0.588367   \n",
       "17          -0.583778           -0.514390          -0.588713   \n",
       "18          -0.587030           -0.516277          -0.593300   \n",
       "19          -0.585103           -0.512504          -0.590557   \n",
       "20          -0.583982           -0.511361          -0.590071   \n",
       "21          -0.582829           -0.510417          -0.589334   \n",
       "22          -0.582447           -0.509414          -0.588102   \n",
       "23          -0.583083           -0.511427          -0.588333   \n",
       "24          -0.586719           -0.512963          -0.591736   \n",
       "25          -0.584143           -0.511093          -0.590964   \n",
       "26          -0.584220           -0.510471          -0.590221   \n",
       "27          -0.583192           -0.507960          -0.589158   \n",
       "28          -0.582958           -0.509167          -0.588256   \n",
       "29          -0.583583           -0.511219          -0.588609   \n",
       "\n",
       "    split1_train_score  split2_test_score  split2_train_score  std_fit_time  \\\n",
       "0            -0.521162          -0.594556           -0.520464      7.803396   \n",
       "1            -0.518491          -0.593684           -0.517780     35.766181   \n",
       "2            -0.514633          -0.592468           -0.515338     31.127118   \n",
       "3            -0.513941          -0.592667           -0.515956     29.325226   \n",
       "4            -0.512571          -0.591366           -0.515225      8.874768   \n",
       "5            -0.515521          -0.590502           -0.515401     32.353289   \n",
       "6            -0.515653          -0.594440           -0.517034     10.328690   \n",
       "7            -0.512951          -0.592811           -0.514552     36.599875   \n",
       "8            -0.512205          -0.593125           -0.513948     11.038396   \n",
       "9            -0.510976          -0.591397           -0.512283     33.028455   \n",
       "10           -0.511263          -0.591199           -0.513417     42.277474   \n",
       "11           -0.511208          -0.590968           -0.513400      8.248604   \n",
       "12           -0.511877          -0.593963           -0.514810     27.119144   \n",
       "13           -0.510281          -0.592251           -0.513058     11.346934   \n",
       "14           -0.508610          -0.591477           -0.512564     42.165272   \n",
       "15           -0.507916          -0.590837           -0.510938     26.532031   \n",
       "16           -0.508269          -0.590597           -0.509660     12.690308   \n",
       "17           -0.508413          -0.591071           -0.512278     38.399058   \n",
       "18           -0.511764          -0.593807           -0.512572     41.055340   \n",
       "19           -0.507558          -0.591879           -0.510071     12.565007   \n",
       "20           -0.506903          -0.590676           -0.507896     37.572580   \n",
       "21           -0.506683          -0.590052           -0.507763     13.259700   \n",
       "22           -0.506329          -0.589876           -0.508337     48.983323   \n",
       "23           -0.506369          -0.591128           -0.510444     33.706019   \n",
       "24           -0.510671          -0.592858           -0.509470     11.298871   \n",
       "25           -0.506753          -0.592090           -0.508002     46.298370   \n",
       "26           -0.505106          -0.590831           -0.507045     16.131373   \n",
       "27           -0.502744          -0.590817           -0.506880     47.285013   \n",
       "28           -0.504561          -0.590527           -0.506533     46.257670   \n",
       "29           -0.505398          -0.590419           -0.507280     30.504428   \n",
       "\n",
       "    std_score_time  std_test_score  std_train_score  \n",
       "0         0.042900        0.003953         0.001908  \n",
       "1         0.170638        0.003655         0.002134  \n",
       "2         0.178018        0.003180         0.002698  \n",
       "3         0.153328        0.003622         0.001723  \n",
       "4         0.069143        0.003332         0.002473  \n",
       "5         0.175373        0.003240         0.000643  \n",
       "6         0.050289        0.003878         0.001346  \n",
       "7         0.175347        0.003105         0.001893  \n",
       "8         0.059316        0.003923         0.001843  \n",
       "9         0.224677        0.003079         0.002370  \n",
       "10        0.147526        0.003020         0.001509  \n",
       "11        0.082786        0.003264         0.001825  \n",
       "12        0.214588        0.003287         0.002567  \n",
       "13        0.040436        0.003644         0.001930  \n",
       "14        0.184695        0.002817         0.002127  \n",
       "15        0.154940        0.003357         0.002124  \n",
       "16        0.066809        0.003096         0.001567  \n",
       "17        0.161007        0.003039         0.002475  \n",
       "18        0.143684        0.003082         0.001965  \n",
       "19        0.050342        0.002933         0.002019  \n",
       "20        0.138018        0.003023         0.001911  \n",
       "21        0.039754        0.003249         0.001569  \n",
       "22        0.155809        0.003168         0.001278  \n",
       "23        0.141887        0.003335         0.002190  \n",
       "24        0.054067        0.002669         0.001449  \n",
       "25        0.027184        0.003511         0.001824  \n",
       "26        0.046886        0.002983         0.002218  \n",
       "27        0.181376        0.003274         0.002248  \n",
       "28        0.123997        0.003171         0.001887  \n",
       "29        0.079827        0.002892         0.002425  "
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "pd.DataFrame(gsearch4_1.cv_results_)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Best: -0.586808 using {'colsample_bytree': 0.8, 'subsample': 0.8}\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('mean_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('std_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('split0_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('split1_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n",
      "D:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\utils\\deprecation.py:122: FutureWarning: You are accessing a training score ('split2_train_score'), which will not be available by default any more in 0.21. If you need training scores, please set return_train_score=True\n",
      "  warnings.warn(*warn_args, **warn_kwargs)\n"
     ]
    },
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEKCAYAAADjDHn2AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzsvXl4FFXa9/85Vd3ZCHsAwyI7jCwh\n7DDKEmJYFDMSFV8QIUFERhjg9QeK44LyAyeOPjwIqFw8KGETmcAIPoIKKMsIwZBIgyxCABmCIEYQ\nBrL1Uuf9oztFN+lOQkjYPJ/rqquqznp3Eepb55yq+xZSShQKhUKhKAntZhugUCgUilsfJRYKhUKh\nKBUlFgqFQqEoFSUWCoVCoSgVJRYKhUKhKBUlFgqFQqEoFSUWCoVCoSgVJRYKhUKhKBUlFgqFQqEo\nFcvNNqCiiIiIkE2aNLnZZigUCsVtRWZm5q9SyjqllbtjxKJJkyZkZGTcbDMUCoXitkII8e+ylFPT\nUAqFQqEoFSUWCoVCoSgVJRYKhUKhKBUlFgqFQqEoFSUWCoVCoSgVJRYKhUKhKBUlFgqFQqEolTvm\nO4vyUuBwMf/ro4QG6YRYdUKtOqFBGqFW7/MrxyGe81Crjq6Jm22+QqFQ3BB+92JxqcDJe1uPYpQj\nFHmQrhFi1Uzx8BaSUKtOiNexr+BoxdICtRFs0dCUKCkUipvM714swl0Gn1ENl0XDZfVsQTrOYB1n\niIY9WKcwRKcgVKPQInBoggJNUgDkOw0KHC7y7S4KnO59vsNFgcPFfwoc7nSHQb7jSl55MMXFjwAF\nW4rERStRoPyJU3iwhaohFoItGkIoQVIoFIH53YvFhUsXOCcvUMURTJjDSgg6lgA3TpeUFEqwG1Ao\nJXYkdl3isEhcVoERIiBUR4RbsdYMJbRaCFWqhhEeHkpImJWgUAsiSMOwCBwaFDgN8u0uCp0u8u0e\nUXG4KPAIS5HIFHgdF4lRvsNFbqGTc5ftV/I9ZQqdxjVdA4smCA9xC0d4sJWqwRavc/dxtRCr+9gr\nr2qw1aecEh2F4s7ldy8W4XWsXIj/kBwsFEpBoSFwOAUuu47MD0bLC0HPD8OaX4XggnBC86sTXliN\nKkYYVWUIYTKIIMMChbi3iwAOwIFLXsIuId+Ai6bQuPeF0iBfd1KgOyiwOrEHuXCFGOihAkuIwBqq\nEx5qoXYVK6FVgggLCyE8PJTwquFUrxpOlZAwqlirYNWtxX6TYchiIx1vMXKnO7lc6OJygZPLhQ4u\nFTi5XODkUqGTSwUOfrlUwPEcJ5cLnfynwIm9DAJk1YVntGL1EpkrAhMebHWLTFGap2zRedUQd7lg\ni17B/8oKheJ6+d2LRbAGtTiH05mLy3UZKZ3uDKtnq+a/Xr4Wgl2vwm96KBACMhhpBIErGBxWpCMI\nURCMKAxGzw/FUhhKSGEo1QurEOSoguYKQXOGoLlCEIYVUShwFUgKfzOwS7AbArv0jGAMyJP5XJD5\nFBpgl5I8nFzW87BbCnBaCnEG2ZFWBzLIBcEGIsRADxZYQgVBoTpBYTrBoVbCqgQTGhZMWLVQ6upB\nNNCsWDUrFs2CVbNi1a1YtTD3sefcIixIqWF3ahTaBfl2yLNLj7g4TJG5XOB0i05h0d7BmYsFXPbK\ns7tKF50gXfMd2VwlKldGNp5jzwinSHCKxCrIol72Uygqit+9WAQFRXDvH7cDIKXEMOy4XJdxuXJx\nOnNxui7jcnrOXbm4nJd9965cnJ58lzMXp+s8Tv0yhjUXI6SgTDYILGiEoctQNFeoW0DswQh7MGH2\nYMLtIejOEIQr1BQY3RkKzmBcRjBOZzAOZ1XshcEUXgrG6dKxG8IUmkLPqMYhIRf35tAKcWl2XCIP\nl+bEJZy4NCeGZ++d5hJOjKvOpWYgNQMsEjSJ0CXooFlA6GCxQC2rICJUoFncm27R0CwCKdyboYEL\nDQOBS2q4XBpOz+ZwCuxO9/6CXfBzrqDwAhQ4BPmF4HRpIC1IqYP0PtYBHSl1gnQr4dZgqoYEUzUk\niLAgCxZNoGsCTRTtuXKsCXRxJV8TlCnd3Avcxz7p7rJCXJWu4WnL157S0q/YECDd7AN0IbDoGmFB\nOlZdCafi+vjdi4U3Qgh0PRhdDwZqX3d7huHE5crD5bpsCorTFJVLuJy5ftOuiNA5HF5iBGV4ZUsK\nLK5ggpwhVHOG+o5gnKFoRihShoARijQsGFJ37w0L0tAxDB2XoWO43HuXMwyXS8fltOB06bgcOi6n\nFZfD6k53WXAW1aH86xUuURbBcnmd2zG0vCvnXpu7vsu3rt0gz+nCodux6w7P3olDc4LQTJHBIzhS\nalftdVOYDEMzz6WhYXjK4FWmqC3pETKK2jH70HzLSwuSK+0gNR/xc7dZ/ht+kO5+a69KkE5YsIWw\nIN2zuY+rBFnc+cH+00KtFr95avT2+0GJRSWiaRY0rRpWa4C5rGtASukRHt/RjLPYqOcyTsdlnAWX\ncNrdxy7nJZyui7iMM7hkHobIQwpnqX16blNlNFAgDB0hLQhpAc9eGFYEV6Vh9dxAi/YWJBYwrEjp\nFqwi8XIZOi6XW7ycLh2X07N3WHC4gjCcOoZTw3BYMAosSJcVaehgWJCeNqTXsb8brmFx4Apy4LI4\ncFntOC12HJZCz1aAXc+n0NzyKLDmUaDnkicukaflkq/lYlC+N92uBYGGRVjQhAVd6F57q3uPO03D\ngiZ0z4i1SJCKHgbcDwa5hsZ/nDpOu4YzV8fh0rA7NAqdGoZL9/y76Miifx/Pubsdi5lv0ayEWkMI\nswZRJchqikmYR5SqeAlSkThVCdYJDXLnhXqE5+q0IF29LHGrocTiNkEIgcVSBYulSoW0Zxh2DMOO\nlA4Mw71J6U4zXHZcDjuGvQDDacdwFOJyFrqPizaX59zlacflwBB2DOnAkHak4cCQDnf7RXvsSJxI\n8pHCidScGDhAc3rOXWa61JwgJIIrf6RBFfHDpY6QVoSXUGG4j91CY8Fw6UinxT1ycugYLot7MyxI\nlwWXsyrSXtMtQC63wGlaEJoejG4JQteD0KxWNIsVzeo+FlYLWpCOZrVAkAWCBFg1ZLDAsIIryIlL\nOnEaVzaH4fB7bqZL/+WuLmt35WE37Nhddhwuh3ls1+04DScEX7k8wYGvXOBLintqMw/PwwIegSnQ\nkXlXRqyGtIBRJD76lQcFbyGSFjB0NNzrZUFaEMGWIIL1IIItwYRaggi1hBBqDSLMGkyVoBCqBLn3\nVYNCCA8OISwoGF1oWDSBpumeYx1dgCZ0LLqGJtz/p8wpRSEQXsdF05NFW7G8QMfiquM76BspJRa/\nUzQtCE2rkNvvdSOlBKeBdFzZDIcL6XDgshdgOOy4HB5xcrjPDafdLWAuO9Jpx+WyI11FYue4IobG\nVeKFZy8cSM3hESiHKVDuNCfSmo8M9pwLl5luaE63uGmlj8wC4vRsgMA9etNcFqxS9wiXexNYACtC\nuP+thGZF0zyCZAl2b0HBWKzBWKzhWK3B5r+ru3wQmrAiNAvCMyIRRZsnDXRcSFwSnIbEhcRpSByG\ngQMDh2HglO693XDhMFxee4dbdAxfESp0FXpEyu6TX+AqpNBpp8Bpp9CVh91Tx+Gy4zQcOKV7855u\ntXu2S97Xz+DK24flRErhufoCZNENvej4ymaWkxqa1NEMDU1a3Mdem5A6mmFByKJ8T3lz70nDXV73\nyfPs0cz2dKkh0MxyQmroFJX13QQaRjj8/eVR5b8gZUCJheKmI4QAq46w3rhXZqUhkU7DLVJFQuU0\nkE7pe+7wKuOVbjhdGM5CDIdbsMzNWYhhFHoEq9AULXPkJt2jOfcI6iqR8hIlU8i8y4hcpHYRl+bA\n6beOE7TKnQ6zcOWmYa6pXLXe4z62+KTJojShud9+0IOBML/lpfRaE5LCvUd4RigaEuFZNxIYUsMw\nBC5D4PTsDUMipURICRggJQIDuJImpHSPXGXRSptEuFtGCHklvyhdSIQwQBjuvSZBGGie9CubRAiH\n17FRrK7QPLb4pLvbw7uPojS8jsVVx0V5/7kbUGKhUFQ4QhOIIB2Cbs43HdIliwkQPqJV/NhV6MKZ\n73RvuU6MQhcuu4Fhd2HYXe4RmdOJdHkESbq/99GEC4QThMt9I8KF0Fyec5dHcNx5UjiRwvCc+8/D\nkyY9+QjDI2ZFx1fypCfP8ClXeFVdl09/aFfyimxEu7YPTSvmH+nKqEJI7coe4TkXZvqVtKIyvvkC\nrzwpEEUvLRjiSpvuMSZC6O7yQkMIDYGO0Dxpmu5OE/qVTdMIqdWg0i+HEguF4iYgdIHQdQi+eR8g\nSindD94uA4pGWi7pFjKXv2MD6ZLglEjDk+aUXvV9y/pty3HlGE970ump7/KywfBMTZr9upCGCymd\nJQubcLmftIvd2DWE9Nz4NR2he27AusV9rFvQNB0sOppmQVh0NIsOuoawCISuISwamMcCLO40YRHu\nPP1KOTNfd+cJXfOU96rvXdbzevWtjBILheJ3ihACdNyidZsgDWkKiyk23mLkNEBy5QZ+9Q1bF4g7\naNH5RqLEQqFQ3DYIzfMUboFreLFbUQGoL2oUCoVCUSpKLBQKhUJRKkosFAqFQlEqas1CobhOHA4H\np06doqCgbI4jFYqbQUhICA0bNsRqLR7WoCwosVAorpNTp05RtWpVmjRpcsu//qj4fSKl5Ny5c5w6\ndYqmTZuWqw01DaVQXCcFBQXUrl1bCYXilkUIQe3ata9r9KvEQqGoAJRQKG51rvdvVImFQqFQKEpF\niYVCcZtz4cIF3nvvvXLVnTNnDnl5eRVmS0pKChMmTKiw9srK1q1bGTx48DXXu1Z7T5w4wUcffXTN\n/ZSV8+fPExcXR8uWLYmLi+O3337zW07XdaKjo4mOjiY+Pr7S7PGmUsVCCDFQCHFYCHFUCDHNT36i\nECJHCGHzbGO88v4uhDgghDgkhJgr1DhfofDLrSQWdzoliYXTeR1u6z0kJycTGxtLVlYWsbGxJCcn\n+y0XGhqKzWbDZrPx6aefXne/ZaHSxEIIoQPvAoOANsAwIUQbP0VXSSmjPdsiT90/AvcCUUA7oCvQ\np7JsVShuZ6ZNm8axY8eIjo5m6tSpvPXWW3Tt2pWoqCimT58OQG5uLg8++CAdOnSgXbt2rFq1irlz\n53L69GliYmKIiYkJ2P4XX3xBp06d6NChA7GxsYD7Cfjhhx8mKiqKHj16sG/fvmL1UlNTadeuHR06\ndKB3796A+2bbq1cvOnXqRKdOndi5cyfgHhn06dOHoUOH0qpVK6ZNm8aKFSvo1q0b7du359ixYwAk\nJiYybtw4evXqRatWrfjss8+K9Zubm8vo0aPp2rUrHTt2ZN26dSVev+zsbAYOHEjr1q15/fXXAXjl\nlVd45513zDIvvfQSc+fOZdq0afzrX/8iOjqa//7v/yYlJYXHHnuMhx56iP79+wP4vf4Ay5cvp1u3\nbkRHR/PMM8/gchV3J79u3TpGjXK7Gh81ahRr164t0fYbSWW+OtsNOCqlPA4ghPgY+BNwsAx1JRCC\nOziaAKzA2UqyU6GoMF7/3wMcPP2fCm2zTf1qTH+obcD85ORk9u/fj81mY+PGjaxevZr09HSklMTH\nx7N9+3ZycnKoX78+69evB+DixYtUr16d2bNns2XLFiIiIvy2nZOTw9NPP8327dtp2rQp58+fB2D6\n9Ol07NiRtWvX8vXXXzNy5EhsNptP3RkzZvDll1/SoEEDLly4AEDdunXZtGkTISEhZGVlMWzYMDIy\nMgDYu3cvhw4dolatWjRr1owxY8aQnp7OO++8w7x585gzZw7gFpxt27Zx7NgxYmJiOHr0qE+/s2bN\nol+/fnz44YdcuHCBbt26cf/991Oliv8ok+np6ezfv5+wsDC6du3Kgw8+yFNPPUVCQgKTJk3CMAw+\n/vhj0tPTiYqK4u233zZFKiUlhbS0NPbt20etWrXYuHEjWVlZxa5/nTp1WLVqFTt27MBqtfLss8+y\nYsUKRo4cyZgxYxg3bhxdunTh7NmzREZGAhAZGckvv/zi1+aCggK6dOmCxWJh2rRpPPzwwwH/PiqK\nyhSLBkC21/kpoLufco8IIXoDR4D/K6XMllKmCSG2AGdwi8V8KeWhSrRVobgj2LhxIxs3bqRjx44A\nXL58maysLHr16sWUKVN44YUXGDx4ML169SpTe7t27aJ3797mu/m1atUC4JtvvmHNmjUA9OvXj3Pn\nznHx4kWfuvfeey+JiYkMHTqUhIQEwP0B44QJE7DZbOi6zpEjR8zyXbt2NW+UzZs3N5/U27dvz5Yt\nW8xyQ4cORdM0WrZsSbNmzfjhhx+KXYNPP/2Ut99+G3DfWE+ePMk999zj9zfGxcVRu3ZtABISEvjm\nm2+YPHkytWvXZs+ePZw9e5aOHTuaZfzVL7ouga7/vn37yMzMpGvXrgDk5+dTt25dABYtWuS33ZI4\nefIk9evX5/jx4/Tr14/27dvTvHnza27nWqhMsfC3xiCvOv9fYKWUslAIMQ5YAvQTQrQA7gEaespt\nEkL0llJu9+lAiLHAWIC77767Qo1XKMpDSSOAG4GUkhdffJFnnnmmWF5mZiYbNmzgxRdfpH///rz6\n6qtlas/fcqGUV/9XLv5q5oIFC/j2229Zv3490dHR2Gw25s2bR7169di7dy+GYRASEmKWDw6+EgFc\n0zTzXNM0n/WAq/u5+lxKyZo1a2jdunWpv6+k9saMGUNKSgo///wzo0ePDljfe8QS6PrPmzePUaNG\n8be//a1EW+rVq8eZM2eIjIzkzJkzpqBcTf369QFo1qwZffv2Zc+ePZUuFpW5wH0KaOR13hA47V1A\nSnlOSlkUSfd/gM6e4yHALinlZSnlZeBzoMfVHUgpF0opu0gpu9SpU6fCf4BCcTtQtWpVLl1yR6ke\nMGAAH374IZcvXwbgp59+4pdffuH06dOEhYUxYsQIpkyZwnfffVesrj969uzJtm3b+PHHHwHMaaje\nvXuzYsUKwL3eEBERQbVq1XzqHjt2jO7duzNjxgwiIiLIzs7m4sWLREZGomkay5Yt8ztvXxqpqakY\nhsGxY8c4fvx4MVEYMGAA8+bNMwVtz549Jba3adMmzp8/T35+PmvXruXee+8FYMiQIXzxxRfs3r2b\nAQMGAKVfr0DXPzY2ltWrV5vTSufPn+ff//53sfrx8fEsWbIEgCVLlvCnP/2pWJnffvuNwkL3bfPX\nX39lx44dtGnjbzm4YqnMkcVuoKUQoinwE/B/gOHeBYQQkVLKM57TeKBoqukk8LQQ4m+4Ryh9gDmV\naKtCcdtSu3Zt7r33Xtq1a8egQYMYPnw4PXv2BCA8PJzly5dz9OhRpk6diqZpWK1W3n//fQDGjh3L\noEGDiIyM9JnqKaJOnTosXLiQhIQEDMMw1xxee+01kpKSiIqKIiwszLzBeTN16lSysrKQUhIbG0uH\nDh149tlneeSRR0hNTSUmJibgOkJJtG7dmj59+nD27FkWLFjgMzoB9+L05MmTiYqKQkpJkyZN/C6E\nF3Hffffx5JNPcvToUYYPH06XLl0ACAoKIiYmhho1aqB7AkRFRUVhsVjo0KEDiYmJ1KxZ06et/v37\nc+jQoWLXv02bNsycOZP+/ftjGAZWq5V3332Xxo0b+6xZTJs2jaFDh/LBBx9w9913k5qaCkBGRgYL\nFixg0aJFHDp0iGeeeQZN0zAMg2nTpt0QsRD+hpMV1rgQD+C+yevAh1LKWUKIGUCGlPJTjxjEA07g\nPPBnKeUPnjep3gN64566+kJK+VxJfXXp0kUWLZQpFDeSQ4cOBZwPV1QsiYmJDB48mEcffbTS+zIM\ng06dOpGamkrLli0rvb8bgb+/VSFEppSyS2l1K9WRoJRyA7DhqrRXvY5fBF70U88FFJ90VSgUihvA\nwYMHGTx4MEOGDLljhOJ6UV5nFQoFAN27dzfnwotYtmwZ7du3v0kWFSclJaVc9b788kteeOEFn7Sm\nTZvyySef+C3fpk0bjh8/Xq6+7lSUWCgUCgC+/fbbm21CpTFgwABzkVpRPpRvKIVCoVCUihILhUKh\nUJSKEguFQqFQlIoSC4VCoVCUihILheI2p7wuyh944AHTwV9lceLECdq1a1epfQQiPDz8muuUx943\n3njjmvspK1JKJk6cSIsWLYiKijK/vL+avn370rp1azPGRSAHhNeDEguF4jYnkFiU5kpjw4YN1KhR\no7LM+t0QSCyklBiGcV1tf/7552RlZZGVlcXChQv585//HLDsihUrzBgXgXxKXQ/q1VmFoiL5fBr8\n/H3FtnlXexjkPwgO+MazsFqthIeHExkZic1m4+DBgzz88MNkZ2dTUFDApEmTGDt2LABNmjQhIyOD\ny5cvM2jQIO677z527txJgwYNWLduHaGhoX77O3r0KOPGjSMnJwdd10lNTaVZs2Y8//zzfP755wgh\nePnll3n88cd96h04cICkpCTsdjuGYbBmzRpatmwZ0L7w8HDGjx/P5s2bqVmzJm+88QbPP/88J0+e\nZM6cOcTHx5OSksInn3xCYWEhP/74I8OHD/eJIVHEW2+9xT/+8Q8KCwsZMmSIGbfCH06nk1GjRrFn\nzx5atWrF0qVLSUtLY/78+eZ3GZs2beL999+nVatW5OfnEx0dTdu2bZk1axaDBg0iJiaGtLQ01q5d\ny+HDh5k+fTqFhYU0b96cxYsXEx4eTmZmJs899xyXL18mIiKClJQU0+tuEevWrWPkyJEIIejRowcX\nLlwwHQ3ecKSUd8TWuXNnqVDcDA4ePHjlZMMLUn74QMVuG14osf8ff/xRtm3bVkop5ZYtW2RYWJg8\nfvy4mX/u3DkppZR5eXmybdu28tdff5VSStm4cWOZk5Mjf/zxR6nrutyzZ4+UUsrHHntMLlu2LGB/\n3bp1k//85z+llFLm5+fL3NxcuXr1ann//fdLp9Mpf/75Z9moUSN5+vRpH9smTJggly9fLqWUsrCw\nUObl5ZVoHyA3bNggpZTy4YcflnFxcdJut0ubzSY7dOggpZRy8eLF8q677pK//vqrWX/37t1SSimr\nVKkipZTyyy+/lE8//bQ0DEO6XC754IMPym3btgW8loD85ptvpJRSJiUlybfeeksahiFbt24tf/nl\nFymllMOGDZOffvqpTz9F9YUQMi0tTUopZU5OjuzVq5e8fPmylFLK5ORk+frrr0u73S579uxptvfx\nxx/LpKQkKaWU77//vnz//fellFI++OCD8l//+pfZfr9+/czf502fPn1ku3btZIcOHeSMGTOkYRh+\nf5/P36oH3O6XSr3HqpGFQlGRlDACuFF069bNjD8BMHfuXPOJODs7m6ysrGKxGZo2bUp0dDQAnTt3\n5sSJE37bvnTpEj/99BNDhgwBMJ34ffPNNwwbNgxd16lXrx59+vRh9+7dREVFmXV79uzJrFmzOHXq\nFAkJCaYbjUD2BQUFMXDgQMAd0yI4OBir1Ur79u197PMXj6LIGSAEjjFRFL3vaho1amR6nh0xYgRz\n585lypQpPPnkkyxfvpykpCTS0tJYunSp3/qNGzemRw+3k+xdu3Zx8OBBsz273U7Pnj05fPgw+/fv\nJy4uDnBPGRaNFsaNG2e2JcvgCh7cU1ANGjTg0qVLPPLIIyxbtoyRI0f6ta+8KLFQKO4wvD25bt26\nlc2bN5OWlkZYWBh9+/aloKCgWB3vWBK6rpOfn++3bX83r5LSvRk+fDjdu3dn/fr1DBgwgEWLFqFp\nWkD7rFareWO83vgWgWJ8+CNQe0lJSTz00EOEhITw2GOPYbH4v31eHd8iLi6OlStX+pT5/vvvadu2\nLWlpaSXa0rBhQ7Kzr8SQO3XqlBnLwpsGDRoAbhfqw4cPJz09vcLFQi1wKxS3OSXFWLh48SI1a9Yk\nLCyMH374gV27dl1XX9WqVaNhw4ZmbOjCwkLy8vLo3bs3q1atwuVykZOTw/bt2+nWrZtP3ePHj9Os\nWTMmTpxIfHw8+/btqxD7AsWjKCJQjIlAnDx50ryJr1y5kvvuuw9wBxyqX78+M2fOJDEx0SxvtVpx\nOBx+2+rRowc7duwwQ7/m5eVx5MgRWrduTU5OjtmPw+HgwIEDxerHx8ezdOlSpJTs2rWL6tWrF1uv\ncDqd/Prrr2Y7n332WaW8gabEQqG4zfGOZzF16lSfvIEDB+J0OomKiuKVV14xp0euh2XLljF37lyi\noqL44x//yM8//8yQIUOIioqiQ4cO9OvXj7///e/cddddPvVWrVpFu3btiI6O5ocffmDkyJEVYl9R\nPIro6GgeeeQRnykocMeYKIrx0b59ex599NESAxjdc889LFmyhKioKM6fP+/zBtITTzxBo0aNfOJH\njB07lqioKJ544olibdWpU4eUlBSGDRtGVFQUPXr04IcffiAoKIjVq1fzwgsv0KFDB6Kjo9m5cyfg\njjC4YMECwP16c7NmzWjRogVPP/20z1tvRdOGhYWFDBgwgKioKKKjo2nQoAFPP/30NV/H0qjUeBY3\nEhXPQnGzUPEsbh4pKSlkZGQwf/78G9LfhAkT6NixI0899dQN6a+iuWXjWSgUCsWdQufOnalSpQr/\n9V//dbNNuSkosVAoFH4ZP348O3bs8EmbNGkSSUlJN8mi4iQmJvqsH5SVc+fOERsbWyz9q6++Kvam\nWBGZmZnX3M+dhBILhULhl3ffffdmm1Bp1K5dG5vNdrPNuK1QC9wKhUKhKBUlFgqFQqEoFSUWCoVC\noSgVJRYKhUKhKBUlFgrFbU5541kAzJkzh7y8vAqzJSUlhQkTJlRYe2Vl69atDB48+JrrXau9J06c\n4KOPPrrmfsrK+fPniYuLo2XLlsTFxfHbb7/5LXfy5En69+/PPffcQ5s2bQL68qpIlFgoFLc5t5JY\n3OmUJBbe/qrKS3JyMrGxsWRlZREbG0tysn/HlCNHjmTq1KkcOnSI9PT0SolfcTXq1VmFogJ5M/1N\nfjj/Q4W2+Ydaf+CFbi8EzPeOZxEXF0fdunWLxW7Izc1l6NChnDp1CpfLxSuvvMLZs2c5ffo0MTEx\nREREsGXLFr/tf/HFF/z1r3/F5XIRERHBV199xfnz5xk9ejTHjx8nLCyMhQsX+niYBUhNTeX1119H\n13WqV6/O9u3bOXHiBE8++SToaT1WAAAgAElEQVS5ubkAzJ8/nz/+8Y9s3bqV6dOnU69ePWw2GwkJ\nCbRv35533nnH9PnUvHlzEhMTCQkJ4cCBA5w9e5bZs2cXG1Hk5ubyl7/8he+//x6n08lrr73Gn/70\np4DXLzs7m4EDB/rEw3jllVeIiIhg0qRJALz00kvUq1ePjz76iEOHDhEdHc2oUaOoWbMm69evp6Cg\ngNzcXL7++uuAsTOWL1/O3LlzsdvtdO/enffeew9d131sWbduHVu3bgVg1KhR9O3blzfffNOnzMGD\nB3E6nabH2vJEBCwPSiwUituc5ORk9u/fj81mY+PGjaxevZr09HSklMTHx7N9+3ZycnKoX78+69ev\nB9wOBqtXr87s2bPZsmULERERftvOycnh6aefZvv27TRt2pTz588DMH36dDp27MjatWv5+uuvGTly\nZLHvFmbMmMGXX35JgwYNzPCtdevWZdOmTYSEhJCVlcWwYcMoctOzd+9eDh06RK1atWjWrBljxowh\nPT2dd955h3nz5jFnzhzA/XS/bds2jh07RkxMjOmkr4hZs2bRr18/PvzwQy5cuEC3bt24//77fbzB\nepOens7+/fsJCwuja9euPPjggzz11FMkJCQwadIkDMPg448/Jj09naioKN5++20+++wzwD2NlZaW\nxr59+6hVqxYbN24kKyur2PWvU6cOq1atYseOHVitVp599llWrFjByJEjGTNmDOPGjaNLly6cPXvW\ndBQYGRnp1+HhkSNHqFGjBgkJCfz444/cf//9JCcnFxOeikaJhUJRgZQ0ArgRBIrd0KtXL6ZMmcIL\nL7zA4MGD6dWrV5na27VrF7179zbjY9SqVQtwx69Ys2YNAP369ePcuXNcvHjRp+69995LYmIiQ4cO\nJSEhAXB7RZ0wYQI2mw1d1zly5IhZvmvXruaNsnnz5vTv3x9wx7LwHvUMHToUTdNo2bIlzZo144cf\nfEdyGzdu5NNPP+Xtt98GoKCggJMnTwb03+UvHsbkyZOpXbs2e/bs4ezZs3Ts2DHgl91xcXHmdQl0\n/fft20dmZiZdu3YFID8/35w6WrRokd92A+F0OvnXv/7Fnj17uPvuu3n88cdJSUmpdH9VSiwUijuI\nkmI3ZGZmsmHDBl588UX69+/Pq6++Wqb2/AXbKUtQngULFvDtt9+yfv16oqOjsdlszJs3j3r16rF3\n714MwzCDJ4FvTI3rjV+xZs0aWrduXervK6m9MWPGkJKSws8//8zo0aMD1r86foW/6z9v3jxGjRrF\n3/72txJtqVevnhk29cyZM37XIho2bEjHjh1p1qwZAA8//DC7du2qdLFQC9wKxW2OdzyLQLEbTp8+\nTVhYGCNGjGDKlCl89913xer6o2fPnmzbto0ff/wRwJyG6t27NytWrADcbyJFRERQrVo1n7rHjh2j\ne/fuzJgxg4iICLKzs7l48SKRkZFomsayZctwuVzX/HtTU1MxDINjx45x/PjxYqIwYMAA5s2bZwra\nnj17SmwvUDyMIUOG8MUXX7B7924GDBgAlH69Al3/2NhYVq9ebU4rnT9/nn//+9/F6sfHx7NkyRIA\nlixZ4netpWvXrvz222/k5OQA8PXXX/u4TK8s1MhCobjN8Y5nMWjQIDN2A7gXP5cvX87Ro0eZOnUq\nmqZhtVp5//33AXcshkGDBhEZGel3gbtOnTosXLiQhIQEDMMw1xxee+01kpKSiIqKIiwszLzBeTN1\n6lSysrKQUhIbG0uHDh149tlneeSRR0hNTSUmJibgOkJJtG7dmj59+nD27FkWLFjgMzoBeOWVV5g8\neTJRUVFIKWnSpIm5xuCPongYR48eZfjw4WY8jKCgIGJiYqhRo4a5HhAVFYXFYqFDhw4kJiZSs2ZN\nn7b69+/PoUOHil3/Nm3aMHPmTPr3749hGFitVt59910aN27ss2Yxbdo0hg4dygcffMDdd99Namoq\nABkZGSxYsIBFixah6zpvv/02sbGxSCnp3LlzpcSvuBoVz0KhuE5UPIsbR2JiIoMHD+bRRx+t9L4M\nw6BTp06kpqaa8cJvd64nnoWahlIoFIqrOHjwIC1atCA2NvaOEYrrRU1DKRQKALp3705hYaFP2rJl\ny2jfvv1Nsqg4KSkp5ar35Zdf8sILvm+qNW3alE8++cRv+TZt2nD8+PFy9XWnosRCoVAA8O23395s\nEyqNAQMGmIvUivKhpqEUCoVCUSpKLBQKhUJRKkosFAqFQlEqlSoWQoiBQojDQoijQohpfvIThRA5\nQgibZxvjlXe3EGKjEOKQEOKgEKJJZdqqUCgUisBUmlgIIXTgXWAQ0AYYJoTw95nhKilltGfzdpKy\nFHhLSnkP0A0o7lFLoVCU20X5Aw88YDr4qyxOnDhBu3btKrWPQJTHG2t57H3jjTeuuZ+yIqVk4sSJ\ntGjRgqioKPPL+6ux2+2MHTuWVq1a8Yc//MH021WRVObIohtwVEp5XEppBz4GAvsJ9sIjKhYp5SYA\nKeVlKaVyuq9Q+CGQWJTmSmPDhg3UqFGjssz63RBILKSUGIZxXW1//vnnZGVlkZWVxcKFC/nzn//s\nt9ysWbOoW7cuR44c4eDBg/Tp0+e6+vVHZb462wDI9jo/BXT3U+4RIURv4Ajwf6WU2UAr4IIQ4p9A\nU2AzME1Kee2OZBSKG8jPb7xB4aGKjWcRfM8fuOuvfw2Y7x3Pwmq1Eh4eTmRkJDabjYMHD/Lwww+T\nnZ1NQUEBkyZNYuzYsQA0adKEjIwMLl++zKBBg7jvvvvYuXMnDRo0YN26dYSGhvrt7+jRo4wbN46c\nnBx0XSc1NZVmzZrx/PPP8/nnnyOE4OWXX+bxxx/3qXfgwAGSkpKw2+0YhsGaNWto2bJlQPvCw8MZ\nP348mzdvpmbNmrzxxhs8//zznDx5kjlz5hAfH09KSgqffPIJhYWFPvEoriZQjAl/OJ1ORo0axZ49\ne2jVqhVLly4lLS2N+fPnm99lbNq0iffff59WrVqRn59PdHQ0bdu2ZdasWQwaNIiYmBjS0tJYu3Yt\nhw8fZvr06RQWFtK8eXMWL15MeHg4mZmZPPfcc1y+fJmIiAhSUlJMr7tFrFu3jpEjRyKEoEePHly4\ncMF0NOjNhx9+aHrf1TQtoMv566EyRxbFXVXC1b5F/hdoIqWMwi0IRQ5mLEAvYArQFWgGJBbrQIix\nQogMIURGkVMtheL3RnJyMs2bN8dms/HWW2+Rnp7OrFmzOHjwIOC+kWRmZpKRkcHcuXM5d+5csTay\nsrIYP348Bw4coEaNGiVOYzzxxBOMHz+evXv3snPnTiIjI/nnP/+JzWZj7969bN68malTp3LmzBmf\negsWLGDSpEnYbDYyMjJo2LBhifbl5ubSt29fMjMzqVq1Ki+//DKbNm3ik08+8fGYm56ezooVK7DZ\nbKSmpnK12x/vGBM2m43MzEy2b98e8PcdPnyYsWPHsm/fPqpVq8Z7771Hv379OHTokOm8b/HixSQl\nJZGcnExoaCg2m810rHj48GFGjhzJnj17qFKlCjNnzmTz5s189913dOnShdmzZ+NwOPjLX/7C6tWr\nyczMZPTo0bz00kvmdVqwYAHgdkTYqFEj07aGDRvy008/+dhbNJX4yiuv0KlTJx577DHOnj0b8PeV\nl8ocWZwCGnmdNwROexeQUnr/1f4PUBQS6hSwR0p5HEAIsRboAXxwVf2FwEJw+4aqSOMVivJQ0gjg\nRtGtWzcz/gTA3LlzzSfi7OxssrKyisVmaNq0KdHR0QB07tw5YEznS5cu8dNPPzFkyBAA04nfN998\nw7Bhw9B1nXr16tGnTx92797tEz2vZ8+ezJo1i1OnTpGQkGC60QhkX1BQEAMHDgTcMS2Cg4OxWq20\nb9/exz5/8SiKnAFC4BgTvXv39vsbGzVqZHqeHTFiBHPnzmXKlCk8+eSTLF++nKSkJNLS0li6dKnf\n+o0bN6ZHjx6AOx7IwYMHzfbsdjs9e/bk8OHD7N+/34x253K5zNHCuHHjzLbK4gre6XRy6tQp7r33\nXmbPns3s2bOZMmUKy5Yt82tfealMsdgNtBRCNAV+Av4PMNy7gBAiUkpZ9PgRDxzyqltTCFFHSpkD\n9AOUl0CFogx4e3LdunUrmzdvJi0tjbCwMPr27UtBQUGxOt6xJHRdJz8/32/bgRyPlsUh6fDhw+ne\nvTvr169nwIABLFq0CE3TAtpntVrNG+P1xrcIFOPDH4HaS0pK4qGHHiIkJITHHnsMi8X/7fPq+BZx\ncXGsXLnSp8z3339P27ZtSUtLK9GWhg0bkp19ZTb/1KlT1K9f36dM7dq1CQsLMwX8scce44MPfJ6r\nK4RKm4aSUjqBCcCXuEXgH1LKA0KIGUKIeE+xiUKIA0KIvcBEPFNNnrWJKcBXQojvcU9p/U9l2apQ\n3M6UFGPh4sWL1KxZk7CwMH744Qd27dp1XX1Vq1aNhg0bsnbtWgAKCwvJy8ujd+/erFq1CpfLRU5O\nDtu3b6dbt24+dY8fP06zZs2YOHEi8fHx7Nu3r0LsCxSPoohAMSYCcfLkSfMmvnLlSu677z4A6tev\nT/369Zk5cyaJiYlmeavVisPh8NtWjx492LFjhxn6NS8vjyNHjtC6dWtycnLMfhwOBwcOHChWPz4+\nnqVLlyKlZNeuXVSvXr3YeoUQgoceesiM3f3VV19VSnyLSvUNJaXcAGy4Ku1Vr+MXgRcD1N0ERPnL\nUygUV/COZxEaGkq9evXMvIEDB7JgwQKioqJo3bq1OT1yPSxbtoxnnnmGV199FavVSmpqKkOGDCEt\nLY0OHToghODvf/87d911l8900apVq1i+fDlWq5W77rqLV199lSpVqly3fYHiURQRKMaEvyh0APfc\ncw9LlizhmWeeoWXLlj5vID3xxBPk5OT43IzHjh1LVFQUnTp1YtasWT5t1alTh5SUFIYNG2Y6aZw5\ncyatWrVi9erVTJw4kYsXL+J0Opk8eTJt27Y11yvGjRvHAw88wIYNG2jRogVhYWEsXrzYbLso+iDA\nm2++yZNPPsnkyZOpU6eOT7mKotR4FkKI5sApKWWhEKIv7hv4Uill5b6gfY2oeBaKm4WKZ3HzSElJ\nISMjg/nz59+Q/iZMmEDHjh0rPYRpZVHZ8SzWAC4hRAvcC8xNgY/KY6hCoVDcrnTu3Jl9+/YxYsSI\nm23KTaEs01CGlNIphBgCzJFSzhNClBzUVqFQ3PaMHz+eHTt2+KRNmjSJpKSkm2RRcRITE33WD8rK\nuXPniI2NLZb+1VdfFXtTrIjMzMxr7udOoixi4RBCDANGAQ950qyVZ5JCobgVePfdd2+2CZVG7dq1\nzfl+RdkoyzRUEtATmCWl/NHzKuzyyjVLoVAoFLcSpY4spJQHcb/WihCiJlBVSplc2YYpFAqF4tah\n1JGFEGKrEKKaEKIWsBdYLISYXfmmKRQKheJWoSzTUNWllP8BEoDFUsrOwP2Va5ZCoVAobiXKIhYW\nIUQkMBT4rJLtUSgU10h541kAzJkzh7y8ivP+n5KSwoQJEyqsvbKydetWBg8efM31rtXeEydO8NFH\nlfflwPnz54mLi6Nly5bExcXx22+/FSuzZcsWoqOjzS0kJMT8or4yKYtYzMDtsuOYlHK3EKIZkFW5\nZikUirJyK4nFnU5JYuHtr6q8JCcnExsbS1ZWFrGxsSQnF18ejomJwWazYbPZ+PrrrwkLC6N///7X\n3XdplGWBOxVI9To/DjxSmUYpFLcr//rHEX7NvlyhbUY0CqfX0FYB873jWcTFxVG3bt1isRtyc3MZ\nOnQop06dwuVy8corr3D27FlOnz5NTEwMERERbNmyxW/7X3zxBX/9619xuVxERETw1Vdfcf78eUaP\nHs3x48cJCwtj4cKFPh5mAVJTU3n99dfRdZ3q1auzfft2Tpw4wZNPPklubi4A8+fP549//CNbt25l\n+vTp1KtXD5vNRkJCAu3bt+edd94xfT41b96cxMREQkJCOHDgAGfPnmX27NnFRhS5ubn85S9/4fvv\nv8fpdPLaa6/xpz8FjruWnZ3NwIEDfeJhvPLKK0RERDBp0iQAXnrpJerVq8dHH33EoUOHiI6OZtSo\nUdSsWZP169dTUFBAbm4uX3/9dcDYGcuXL2fu3LnY7Xa6d+/Oe++9h67rPrasW7fO9PE0atQo+vbt\ny5tvvkkgVq9ezaBBgwgLCwtYpqIoVSyEEA2BecC9uONRfANMklKeqmTbFApFGUhOTmb//v3YbDY2\nbtzI6tWrSU9PR0pJfHw827dvJycnh/r167N+/XrA7WCwevXqzJ49my1btgQMlpOTk8PTTz/N9u3b\nadq0KefPnwdg+vTpdOzYkbVr1/L1118zcuTIYt8tzJgxgy+//JIGDRqYMRfq1q3Lpk2bCAkJISsr\ni2HDhpnxJ/bu3cuhQ4eoVasWzZo1Y8yYMaSnp/POO+8wb9485syZA7if7rdt28axY8eIiYkxnfQV\nMWvWLPr168eHH37IhQsX6NatG/fff7+PN1hv0tPT2b9/P2FhYXTt2pUHH3yQp556ioSEBCZNmoRh\nGHz88cekp6cTFRXF22+/zWefuWfkU1JSSEtLY9++fdSqVcsndob39a9Tpw6rVq1ix44dWK1Wnn32\nWVasWMHIkSMZM2YM48aNo0uXLpw9e9Z0FBgZGVmiw0OAjz/+mOeee67EMhVFWT7KW4zbvcdjnvMR\nnrS4yjJKobhdKWkEcCMIFLuhV69eTJkyhRdeeIHBgwfTq1evMrW3a9cuevfubcbHqFWrFuCOX1EU\nIKlfv36cO3eOixcv+tS99957SUxMZOjQoSQkJABu76oTJkzAZrOh6zpHjhwxy3ft2tW8UTZv3tyc\nWmnfvr3PqGfo0KFomkbLli1p1qyZGSHO+xp8+umnvP322wAUFBRw8uTJgP67/MXDmDx5MrVr12bP\nnj2cPXuWjh07BvyyOy4uzrwuga7/vn37yMzMpGvXrgDk5+ebjgwXLVrkt93SOHPmDN9//z0DBgwo\nV/1rpSxiUUdK6e3CMEUIMbmyDFIoFOWnpNgNmZmZbNiwgRdffJH+/fv7RJsrqb2r4zsUpV/N1eUW\nLFjAt99+y/r1600PqfPmzaNevXrs3bsXwzDM4EngG1PjeuNXrFmzhtatW5f6+0pqb8yYMaSkpPDz\nzz8zevTogPWvjl/h7/rPmzePUaNG8be//a1EW+rVq2eGTT1z5kxAz7gA//jHPxgyZAhW641xqFGW\nBe5fhRAjhBC6ZxsBFI/LqFAobgre8SwCxW44ffo0YWFhjBgxgilTpvDdd98Vq+uPnj17sm3bNn78\n8UcAcxqqd+/eZhjRrVu3EhERQbVq1XzqHjt2jO7duzNjxgwiIiLIzs7m4sWLREZGomkay5Ytw+Vy\nXfPvTU1NxTAMjh07xvHjx4uJwoABA5g3b54paHv2lOzKLlA8jCFDhvDFF1+we/du8+m9tOsV6PrH\nxsayevVqc1rp/Pnz/Pvf/y5WPz4+niVL3NGllyxZUuJay8qVKxk2bFiJv60iKcvIYjQwH/hv3GsW\nO3G7AFEoFLcA3vEsBg0axPDhw4vFbjh69ChTp05F0zSsVivvv/8+4I7FMGjQICIjI/0ucNepU4eF\nCxeSkJCAYRjmmsNrr71GUlISUVFRhIWFmTc4b6ZOnUpWVhZSSmJjY+nQoQPPPvssjzzyCKmpqcTE\nxARcRyiJ1q1b06dPH86ePcuCBQt8RifgjkU9efJkoqKikFLSpEkTc43BH4HiYQQFBRETE0ONGjXM\nheioqCgsFgsdOnQgMTGRmjVr+rQVKHZGmzZtmDlzJv3798cwDKxWK++++y6NGzf2WbOYNm0aQ4cO\n5YMPPuDuu+8mNdX9blFGRgYLFiwwp6xOnDhBdnY2ffr0uebrV15KjWfht5IQk6WUcyrBnnKj4lko\nbhYqnsWNIzExkcGDB/Poo49Wel+GYdCpUydSU1PNeOG3O5Udz8IfN2b5XaFQKG4CBw8epEWLFsTG\nxt4xQnG9lDesavEVL4VCcVvTvXt3M/RnEcuWLaN9+/Y3yaLipKSklKvel19+yQsvvOCT1rRpUz75\n5BO/5du0acPx48fL1dedSnnF4trnrhQKxS3Nt99+e7NNqDQGDBhww14xvVMJKBZCiEv4FwUBhFaa\nRQqFQqG45QgoFlLKqjfSEIVCoVDcupR3gVuhUCgUvyOUWCgUCoWiVJRYKBS3OeV1Uf7AAw+YDv4q\nixMnTtCuXbtK7SMQ4eHh11ynPPa+8cYb19xPWZFSMnHiRFq0aEFUVJT55b03ly5d8olvERERweTJ\nFe+RSYmFQnGbE0gsSnOlsWHDBmrUqFFZZv1uCCQWUkoMw7iutj///HOysrLIyspi4cKF/PnPfy5W\npmrVqmZ8C5vNRuPGjU3HjRVJWVyU+3sr6iKQAfx/nvgWCoUC2JKykF/+XbH/Jeo2bkZM4tiA+d7x\nLKxWK+Hh4URGRmKz2Th48CAPP/ww2dnZFBQUMGnSJMaOdbfVpEkTMjIyuHz5MoMGDeK+++5j586d\nNGjQgHXr1hEa6v+lx6NHjzJu3DhycnLQdZ3U1FSaNWvG888/z+eff44QgpdffpnHH3/cp96BAwdI\nSkrCbrdjGAZr1qyhZcuWAe0LDw9n/PjxbN68mZo1a/LGG2/w/PPPc/LkSebMmUN8fDwpKSl88skn\nFBYW+sSjuJpAMSb84XQ6GTVqFHv27KFVq1YsXbqUtLQ05s+fb36XsWnTJt5//31atWpFfn4+0dHR\ntG3bllmzZjFo0CBiYmJIS0tj7dq1HD58mOnTp1NYWEjz5s1ZvHgx4eHhZGZm8txzz3H58mUiIiJI\nSUkxve4WsW7dOkaOHIkQgh49enDhwgXT0aA/srKy+OWXX8rsVfhaKMvIYjYwFWgANASmAP8DfAx8\nWOEWKRSKayI5OZnmzZtjs9l46623SE9PZ9asWRw8eBCADz/8kMzMTDIyMpg7dy7nzhX3A5qVlcX4\n8eM5cOAANWrUMN2P++OJJ55g/Pjx7N27l507dxIZGck///lPbDYbe/fuZfPmzUydOpUzZ8741Fuw\nYAGTJk3CZrORkZFBw4YNS7QvNzeXvn37kpmZSdWqVXn55ZfZtGkTn3zyiY/H3PT0dFasWIHNZiM1\nNZWr3f54x5iw2WxkZmayffv2gL/v8OHDjB07ln379lGtWjXee+89+vXrx6FDh8jJyQFg8eLFJCUl\nkZycTGhoKDabzXSsePjwYUaOHMmePXuoUqUKM2fOZPPmzXz33Xd06dKF2bNn43A4+Mtf/sLq1avJ\nzMxk9OjRvPTSS+Z1WrBgAeB2RNioUSPTtoYNG/LTTz8FtH3lypU8/vjjfj0FXy9l+ShvoJSyu9f5\nQiHELinlDCHEXyvcIoXiNqakEcCNolu3bmb8CYC5c+eaT8TZ2dlkZWUVi83QtGlToqOjAejcuTMn\nTpzw2/alS5f46aefGDJkCIDpxO+bb75h2LBh6LpOvXr16NOnD7t37/aJntezZ09mzZrFqVOnSEhI\nMN1oBLIvKCiIgQMHAu6YFsHBwVitVtq3b+9jn794FEXOACFwjInevXv7/Y2NGjUyPc+OGDGCuXPn\nMmXKFJ588kmWL19OUlISaWlpLF261G/9xo0b06NHD8AdD+TgwYNme3a7nZ49e3L48GH2799PXJw7\nLJDL5TJHC+PGjTPbKosreG8+/vhjli1bFjD/eiiLWBhCiKHAas+5twcv9SW3QnGL4e3JdevWrWze\nvJm0tDTCwsLo27cvBQUFxep4x5LQdZ38/Hy/bQdyPFoWh6TDhw+ne/furF+/ngEDBrBo0SI0TQto\nn9VqNW+M1xvfIlCMD38Eai8pKYmHHnqIkJAQHnvsMSwW/7fPq+NbxMXFsXLlSp8y33//PW3btiUt\nLa1EWxo2bEh2drZ5furUKerXr++37N69e3E6nXTu3LnENstLWaahngCeBH7xbE8CI4QQocCESrFK\noVCUmZJiLFy8eJGaNWsSFhbGDz/8wK5du66rr2rVqtGwYUPWrl0LQGFhIXl5efTu3ZtVq1bhcrnI\nyclh+/btdOvWzafu8ePHadasGRMnTiQ+Pp59+/ZViH2B4lEUESjGRCBOnjxp3sRXrlzJfffdB0D9\n+vWpX78+M2fOJDEx0SxvtVpxOBx+2+rRowc7duwwQ7/m5eVx5MgRWrduTU5OjtmPw+HgwIEDxerH\nx8ezdOlSpJTs2rWL6tWrB1yvqOz4FqWOLDwL2A8FyP6mYs1RKBTXinc8i9DQUOrVq2fmDRw4kAUL\nFhAVFUXr1q3N6ZHrYdmyZTzzzDO8+uqrWK1WUlNTGTJkCGlpaXTo0AEhBH//+9+56667fKaLVq1a\nxfLly7Fardx11128+uqrVKlS5brtCxSPoohAMSYCRaG75557WLJkCc888wwtW7b0eQPpiSeeICcn\nhzZt2phpY8eOJSoqik6dOjFr1iyfturUqUNKSgrDhg0znTTOnDmTVq1asXr1aiZOnMjFixdxOp1M\nnjyZtm3bmusV48aN44EHHmDDhg20aNGCsLAwFi++ErS0KPpgEf/4xz/YsGHDNV+/slJqPAshRENg\nHnAv7mmnb4BJUspTlWZVOVDxLBQ3CxXP4uaRkpJCRkYG8+fPvyH9TZgwgY4dO/LUU0/dkP4qmsqO\nZ7EY+BSoj/uNqP/1pCkUCsXvhs6dO7Nv3z5GjBhxs025KZRlgbuOlNJbHFKEEBX/eaBCobilGD9+\nPDt27PBJmzRpEklJt05U5cTERJ/1g7Jy7tw5YmNji6V/9dVXxd4UKyIzM/Oa+7mTKItY/CqEGAEU\nLecPA4q/qK1QKO4o3n333ZttQqVRu3Ztn/l+RemUZRpqNDAU+Bk4g/vV2Vvn0UKhUCgUlU6pYiGl\nPCmljJdS1pFS1pVSPgyUyfGIEGKgEOKwEOKoEGKan/xEIUSOEMLm2cZclV9NCPGTEOLGrF4pFAqF\nwi/ldST4XGkFhBA68K2LmRoAACAASURBVC4wCGgDDBNCtPFTdJWUMtqzLboq7/8HtpXTRoVCoVBU\nEOUVi7I4HukGHJVSHpdS2nH7kvpTmTsQojNQD9hYPhMVCoVCUVGUVyzK4uajAZDtdX7Kk3Y1jwgh\n9gkhVgshGgEIITTgv3A7MFQoFCVQ3ngWAHPmzCEvL6/CbElJSWHChBvv2GHr1q0MHjz4mutdq70n\nTpzgo48+uuZ+ysr58+eJi4ujZcuWxMXF8dtvv/kt9/zzz9O2bVvuueceJk6cWCZ3K9dLQLEQQlwS\nQvzHz3YJ9zcXpeFv9HH1L/pfoImUMgrYDCzxpD8LbJBSZlMCQoixQogMIURGkTdIheL3xq0kFnc6\nJYmFt7+q8pKcnExsbCxZWVnExsaSnJxcrMzOnTvZsWMH+/btY//+/ezevZtt2yp/tj7gq7NSyqrX\n2fYpoJHXeUPg9FV9eL+C+z/Am57jnkAvIcSzQDgQJP5fe28eH0WR//8/a64kk5CQkBACKAICP0EC\nXhyuIodciqyg4oIKCaKioPjbL+i6iqgfUFZdvwoofPiwGoRV2eACfhYUUTlWN5wSbiEQERANkYRA\nzrnq+0fPdGYyM5lcE656Ph5Nd1dXVVd1mHp1Hf1+C1EspfxTlfQLgYWgfcFdz/IqFPXmzP8ewXay\npEHztLSMpuld7YNe9/ZnMXDgQJo3b+7nu6GkpIRRo0Zx4sQJnE4n06dPJy8vj5MnT9KvXz8SExNZ\nv359wPy/+OIL/vznP+N0OklMTOTrr7+moKCA8ePHk5ubi9VqZeHChT4WZgEyMzN5+eWXMRqNxMXF\nsWnTJo4ePcpDDz1ESYn2jObNm8fNN9/Mhg0bmDFjBsnJyWRnZzNy5Ei6du3KO++8o9t8at++PWlp\naURGRrJv3z7y8vJ46623/HoUJSUlPPnkk+zZsweHw8FLL73E738ffAT8+PHjDBkyxMcfxvTp00lM\nTGTKlCkAPP/88yQnJ/PRRx9x4MABunfvzrhx44iPj2f16tWUl5dTUlLCN998E9R3xtKlS5kzZw42\nm42ePXvy3nvvYTQafcqyatUqNmzYAMC4cePo27cvf/nLX3ziCCEoLy/HZrMhpcRut/uYeAkXNfnO\noq5sAzoIIdoCPwN/AMZ4RxBCpEgpPUbvhwMHAKSUD3jFSQNurCoUCoVCY/bs2ezdu5fs7Gy+/PJL\nli9fztatW5FSMnz4cDZt2kR+fj4tW7Zk9erVgGZgMC4ujrfeeov169eTmJgYMO/8/HweeeQRNm3a\nRNu2bSkoKABgxowZXHfddaxcuZJvvvmGsWPH+n238Morr7B27VpatWqlu29t3rw569atIzIykpyc\nHEaPHq37n9i1axcHDhwgISGBdu3aMWHCBLZu3co777zD3LlzefvttwHt7X7jxo0cOXKEfv366Ub6\nPMyaNYv+/fvz/vvvc+bMGXr06MHtt9/uYw3Wm61bt7J3716sVis33XQTd955Jw8//DAjR45kypQp\nuFwuPvnkE7Zu3Upqaipvvvkm//rXvwBtGCsrK4vdu3eTkJDg4zvD+/knJSWxbNkyvvvuO8xmM088\n8QR///vfGTt2LBMmTGDixInceOON5OXl6YYCU1JSAho87N27N/369SMlJQUpJZMnT24UczNhEwsp\npUMIMRlYCxiB96WU+4QQrwDbpZSfAU8JIYYDDqAASAtXeRSKxqC6HkBjEMx3w6233srUqVN59tln\nGTZsWI09qW3evJk+ffro/jESEhIAzX+Fx0FS//79OX36NEVFRT5pf/e735GWlsaoUaN0N592u53J\nkyeTnZ2N0Wjk0KFDevybbrpJbyjbt2/PoEGDAM2XhXevZ9SoURgMBjp06EC7du344Ycf/J7BZ599\nxptvvglAeXk5x44dC9qgBvKH8fTTT9OsWTN27txJXl4e1113XdAvuwcOHKg/l2DPf/fu3ezYsYOb\nbroJgLKyMt2Q4aJFVReBVs/hw4c5cOAAJ06c0O+/adOmoP45Gopw9iyQUq4B1lQJe9Hr+DnguRB5\nZAAZYSieQnHJUZ3vhh07drBmzRqee+45Bg0a5ONtrrr8AjnbqYlTngULFrBlyxZWr16tW0idO3cu\nycnJ7Nq1C5fLpTtPAl+fGvX1X/Hpp5/SqVOnkPWrLr8JEyaQkZHBr7/+yvjx44Omr+q/ItDznzt3\nLuPGjeO1116rtizJycm629RffvkloGXcFStW0KtXL2JiYgAYOnSoLurhpK6roRQKxQWCtz+LYL4b\nTp48idVq5cEHH2Tq1Kl8//33fmkD0bt3bzZu3MiPP/4IoA9D9enTR3cjumHDBhITE4mNjfVJe+TI\nEXr27Mkrr7xCYmIix48fp6ioiJSUFAwGA0uWLMHpdNa6vpmZmbhcLo4cOUJubq6fKAwePJi5c+fq\ngrZz585q8wvmD2PEiBF88cUXbNu2jcGDBwOhn1ew5z9gwACWL1+uDysVFBTw008/+aUfPnw4ixdr\n63wWL14ccK7lyiuvZOPGjTgcDux2Oxs3bry4h6EUCkXj4O3PYujQoYwZM8bPd8Phw4eZNm0aBoMB\ns9nM/PnzAc0Xw9ChQ0lJSQk4wZ2UlMTChQsZOXIkLpdLn3N46aWXSE9PJzU1FavVqjdw3kybNo2c\nnByklAwYMIBu3brxxBNPcM8995CZmUm/fv2CziNUR6dOnbjtttvIy8tjwYIFPr0TgOnTp/P000+T\nmpqKlJKrrrpKn2MIRDB/GBaLhX79+tG0aVN9Ijo1NRWTyUS3bt1IS0sjPj7eJ69gvjM6d+7MzJkz\nGTRoEC6XC7PZzLvvvkubNm185iz+9Kc/MWrUKP72t79x5ZVXkpmZCcD27dtZsGABixYt4t577+Wb\nb76ha9euCCEYMmQId90VzOVQwxHSn8XFgvJnoThfKH8WjUdaWhrDhg3j3nvvDR25nrhcLq6//noy\nMzN1f+EXO+H2Z6FQKBSXFfv37+fqq69mwIABl4xQ1Bc1DKVQKADo2bOn7vrTw5IlS+jatet5KpE/\nGRkZdUq3du1ann32WZ+wtm3bsmLFioDxO3fuTG5ubp3udamixEKhUACwZcuW812EsDF48GB9klpR\nN9QwlEKhUChCosRCoVAoFCFRYqFQKBSKkCixUCgUCkVIlFgoFBc5dTVRfscdd+gG/sLF0aNHufba\na8N6j2B4zGHUhrqU99VXX631fWqKlJKnnnqKq6++mtTUVP3L+6p8/PHHdO3aldTUVIYMGcJvv/3W\n4GVRYqFQXOQEE4tQpjTWrFlD06ZNw1Wsy4ZgYiGlxOVy1Svvzz//nJycHHJycli4cCGPP/64XxyH\nw8GUKVNYv349u3fvJjU1lXnz5tXrvoFQS2cVigbk888/59dff23QPFu0aMHQoUODXvf2Z2E2m4mJ\niSElJYXs7Gz279/P3XffzfHjxykvL2fKlCk8+uijAFx11VVs376d4uJihg4dyi233MJ//vMfWrVq\nxapVq4iKigp4v8OHDzNx4kTy8/MxGo1kZmbSrl07nnnmGT7//HOEELzwwgvcf//9Pun27dtHeno6\nNpsNl8vFp59+SocOHYKWLyYmhkmTJvHVV18RHx/Pq6++yjPPPMOxY8d4++23GT58OBkZGaxYsYKK\nigoffxRVCeZjIhAOh4Nx48axc+dOOnbsyIcffkhWVhbz5s3Tv8tYt24d8+fPp2PHjpSVldG9e3e6\ndOnCrFmzGDp0KP369SMrK4uVK1dy8OBBZsyYQUVFBe3bt+eDDz4gJiaGHTt28Mc//pHi4mISExPJ\nyMjQre56WLVqFWPHjkUIQa9evThz5oxuaNCDlBIpJSUlJTRr1oyzZ89y9dVXB61fXVE9C4XiImf2\n7Nm0b9+e7Oxs3njjDbZu3cqsWbPYv38/AO+//z47duxg+/btzJkzh9OnT/vlkZOTw6RJk9i3bx9N\nmzbVzY8H4oEHHmDSpEns2rWL//znP6SkpPDPf/6T7Oxsdu3axVdffcW0adP45ZdffNItWLCAKVOm\nkJ2dzfbt22ndunW15SspKaFv377s2LGDJk2a8MILL7Bu3TpWrFjhYzF369at/P3vfyc7O5vMzEyq\nmv3x9jGRnZ3Njh072LRpU9D6HTx4kEcffZTdu3cTGxvLe++9R//+/Tlw4AAej5wffPAB6enpzJ49\nm6ioKLKzs3XDigcPHmTs2LHs3LmT6OhoZs6cyVdffcX333/PjTfeyFtvvYXdbufJJ59k+fLl7Nix\ng/Hjx/P888/rz2nBggWAZojwiisqfci1bt2an3/+2ae8HltfXbt2pWXLluzfv5+HH344aP3qiupZ\nKBQNSHU9gMaiR48euv8JgDlz5uhvxMePHycnJ8fPN0Pbtm3p3r07ADfccANHjx4NmPe5c+f4+eef\nGTFiBIBuxO/bb79l9OjRGI1GkpOTue2229i2bZuP97zevXsza9YsTpw4wciRI3UzGsHKZ7FYGDJk\nCKD5tIiIiMBsNtO1a1ef8gXyR+ExBgjBfUwEM+l9xRVX6JZnH3zwQebMmcPUqVN56KGHWLp0Kenp\n6WRlZfHhhx8GTN+mTRt69eoFaP5A9u/fr+dns9no3bs3Bw8eZO/evQwcOBDQhgw9vYWJEyfqedXE\nFLzdbmf+/Pns3LmTdu3a8eSTT/Laa6/xwgsvBCxfXVFioVBcYnhbct2wYQNfffUVWVlZWK1W+vbt\nS3l5uV8ab18SRqORsrKygHkHMzxaE4OkY8aMoWfPnqxevZrBgwezaNEiDAZD0PKZzWa9Yayvf4tg\nPj4CESy/9PR07rrrLiIjI7nvvvswmQI3n1X9WwwcOJCPP/7YJ86ePXvo0qULWVlZ1ZaldevWHD9+\nXD8/ceIELVu29Inj8VDYvr3meGvUqFEBfXfXFzUMpVBc5FTnY6GoqIj4+HisVis//PADmzdvrte9\nYmNjad26NStXrgSgoqKC0tJS+vTpw7Jly3A6neTn57Np0yZ69OjhkzY3N5d27drx1FNPMXz4cHbv\n3t0g5Qvmj8JDMB8TwTh27JjeiH/88cfccsstALRs2ZKWLVsyc+ZM0tLS9Phmsxm73R4wr169evHd\nd9/prl9LS0s5dOgQnTp1Ij8/X7+P3W5n3759fumHDx/Ohx9+iJSSzZs3ExcX5zev0apVK/bv368P\nka1bty4sVpBVz0KhuMjx9mcRFRVFcnKyfm3IkCEsWLCA1NRUOnXqpA+P1IclS5bw2GOP8eKLL2I2\nm8nMzGTEiBFkZWXRrVs3hBC8/vrrtGjRwme4aNmyZSxduhSz2UyLFi148cUXiY6Ornf5gvmj8BDM\nx0QgL3QA11xzDYsXL+axxx6jQ4cOPiuQHnjgAfLz8+ncubMe9uijj5Kamsr111/PrFmzfPJKSkoi\nIyOD0aNH60YaZ86cSceOHVm+fDlPPfUURUVFOBwOnn76abp06aLPV0ycOJE77riDNWvWcPXVV2O1\nWvnggw/0vD3eB1u2bMmMGTPo06cPZrOZNm3a1NngYnUofxYKRT1R/izOHxkZGWzfvj0sS0UDMXny\nZK677rqwTCA3BvXxZ6F6FgqFQlEDbrjhBqKjo/nrX/96votyXlBioVAoAjJp0iS+++47n7ApU6aQ\nnp5+nkrkT1pams/8QU05ffo0AwYM8Av/+uuv/VaKedixY0et73MpocRCoVAE5N133z3fRQgbzZo1\n01cRKWqGWg2lUCgUipAosVAoFApFSJRYKBQKhSIkSiwUCoVCERIlFgrFRU5d/VkAvP3225SWljZY\nWTIyMpg8eXKD5VdTNmzYwLBhw2qdrrblPXr0KB999FGt71NTCgoKGDhwIB06dGDgwIEUFhYGjPfs\ns89y7bXXcu2117Js2bKwlccbJRYKxUXOhSQWlzrViYW3vaq6Mnv2bAYMGEBOTg4DBgwIaONp9erV\nfP/992RnZ7NlyxbeeOMNzp49W+97h0ItnVUoGpBDh/6Lc8UHGjTPJjHX0LHj9KDXvf1ZDBw4kObN\nm/v5bigpKWHUqFGcOHECp9PJ9OnTycvL4+TJk/Tr14/ExETWr18fMP8vvviCP//5zzidThITE/n6\n668pKChg/Pjx5ObmYrVaWbhwoY+FWYDMzExefvlljEYjcXFxbNq0iaNHj/LQQw9RUlICwLx587j5\n5pvZsGEDM2bMIDk5mezsbEaOHEnXrl155513dJtP7du3Jy0tjcjISPbt20deXh5vvfWWX4+ipKSE\nJ598kj179uBwOHjppZf4/e9/H/T5HT9+nCFDhvj4w5g+fTqJiYlMmTIFgOeff57k5GQ++ugjDhw4\nQPfu3Rk3bhzx8fGsXr2a8vJySkpK+Oabb4L6zli6dClz5szBZrPRs2dP3nvvPYxGo09ZVq1axYYN\nGwAYN24cffv25S9/+YtPnP3793PbbbdhMpkwmUx069aNL774glGjRgWtY0OgxEKhuMiZPXs2e/fu\nJTs7my+//JLly5ezdetWpJQMHz6cTZs2kZ+fT8uWLVm9ejWgGRiMi4vjrbfeYv369SQmJgbMOz8/\nn0ceeYRNmzbRtm1bCgoKAJgxYwbXXXcdK1eu5JtvvmHs2LF+3y288sorrF27llatWunuW5s3b866\ndeuIjIwkJyeH0aNH6/4ndu3axYEDB0hISKBdu3ZMmDCBrVu38s477zB37lzefvttQHu737hxI0eO\nHKFfv366kT4Ps2bNon///rz//vucOXOGHj16cPvtt/tYg/Vm69at7N27F6vVyk033cSdd97Jww8/\nzMiRI5kyZQoul4tPPvmErVu3kpqayptvvsm//vUvQBvGysrKYvfu3SQkJPj4zvB+/klJSSxbtozv\nvvsOs9nME088wd///nfGjh3LhAkTmDhxIjfeeCN5eXm6ocCUlJSABg+7devGyy+/zB//+EdKS0tZ\nv369j62qcKHEQqFoQKrrATQGwXw33HrrrUydOpVnn32WYcOGceutt9Yov82bN9OnTx/dP0ZCQgKg\n+a/wOEjq378/p0+fpqioyCft7373O9LS0hg1ahQjR44ENOuqkydPJjs7G6PRyKFDh/T4N910k95Q\ntm/fnkGDBgGaLwvvXs+oUaMwGAx06NCBdu3a8cMPP/g9g88++4w333wTgPLyco4dOxbUflcgfxhP\nP/00zZo1Y+fOneTl5XHdddcF/bJ74MCB+nMJ9vx3797Njh07uOmmmwAoKyvTDRkuWrQoYL7BGDRo\nENu2bePmm28mKSmJ3r17BzWX3pAosVAoLiGq892wY8cO1qxZw3PPPcegQYN8vM1Vl19V/w6e8KpU\njbdgwQK2bNnC6tWrdQupc+fOJTk5mV27duFyuXTnSeDrU6O+/is+/fRTOnXqFLJ+1eU3YcIEMjIy\n+PXXXxk/fnzQ9FX9VwR6/nPnzmXcuHG89tpr1ZYlOTlZd5v6yy+/BLWM+/zzz+ue9caMGaM7kgon\naoJbobjI8fZnEcx3w8mTJ7FarTz44INMnTqV77//3i9tIHr37s3GjRv58ccfAfRhqD59+uhuRDds\n2EBiYiKxsbE+aY8cOULPnj155ZVXSExM5Pjx4xQVFZGSkoLBYGDJkiU4nc5a1zczMxOXy8WRI0fI\nzc31E4XBgwczd+5cXdB27txZbX7B/GGMGDGCL774gm3btjF48GAg9PMK9vwHDBjA8uXL9WGlgoIC\nfvrpJ7/0w4cPZ/HixQAsXrw44FyL0+nUXc/u3r2b3bt3672wcKJ6FgrFRY63P4uhQ4cyZswYP98N\nhw8fZtq0aRgMBt1nM2i+GIYOHUpKSkrACe6kpCQWLlzIyJEjcblc+pzDSy+9RHp6OqmpqVitVr2B\n82batGnk5OQgpWTAgAF069aNJ554gnvuuYfMzEz69esXdB6hOjp16sRtt91GXl4eCxYs8OmdAEyf\nPp2nn36a1NRUpJRcddVV+hxDIIL5w7BYLPTr14+mTZvqE9Gpqan6pHJaWhrx8fE+eQXzndG5c2dm\nzpzJoEGDcLlcmM1m3n33Xdq0aeMzZ/GnP/2JUaNG8be//Y0rr7ySzMxMALZv386CBQtYtGgRdrtd\nH0aMjY1l6dKljTIMpfxZuFzw20GISYaoeAjQ5VYoqkP5s2g80tLSGDZsGPfee2/Y7+Vyubj++uvJ\nzMxslGGexkD5s6gPZYXwnts7l8EMMc014YhJrjxukuwfZo46v+VWKBRhY//+/QwbNowRI0ZcMkJR\nX5RYmKPg3g+gOM+9ndL2RSfg5x1Qkg8E6H1FxAUQE2+hcW/WZmBQU0OKC5+ePXvqrj89LFmyhK5d\nu56nEvlTV3eha9eu5dlnn/UJa9u2LStWrAgYv3PnzuTm5tbpXpcqYRULIcQQ4B3ACCySUs6ucj0N\neAP42R00T0q5SAjRHZgPxAJOYJaUMjzftFuscO3I4NedDig9DcW/VgpJcR6c8xKXk9nasa3YP70w\nukWkuZegtAjcc7HUfvxWoWgotmzZcr6LEDYGDx6sT1Ir6kbYxEIIYQTeBQYCJ4BtQojPpJT7q0Rd\nJqWsapylFBgrpcwRQrQEdggh1kopz4SrvEExmrTGvEly6LgVxVByShOQc1XExbP9ukcLlwFWgVhi\nvMTES1yaVBEXa6JWLoVCoWgkwtni9AAOSylzAYQQnwC/B6qKhR9SykNexyeFEKeAJKDxxaI2RMRo\nW0K76uO5XO7eivfQl7e4nIK8fXBkPVQU+acXBk0wAs6reETGLTgRTdSkvUKhqDfhFItWwHGv8xNA\nzwDx7hFC9AEOAf+/lNI7DUKIHoAFOBKugjY6BgPEJGkb11Yf114WoIdSpeeSf1Dbu+z+6U1RXr2T\nKnMqTVK08NiWEJWg5lYUCkVQwikWgV5nq84U/y/wsZSyQggxEVgM9NczECIFWAKMk1K6/G4gxKPA\nowBXXnllQ5X7wsIcBfFttK06XC4oPxNAULwm7X/LgaPfaivAqmIwe4lHivvYvennLbSeiuKC4syZ\nM3z00Uc88cQTtUp3xx138NFHH9G0adMwlUyz4zRs2DD27t0btnsEIyYmRv84rqbUpbyvvvoqf/7z\nn2tbvBohpWTKlCmsWbMGq9VKRkYG119/vV+8ZcuWMWvWLJxOJ3feeSevv/56g5clnGJxArjC67w1\ncNI7gpTytNfp/wC6eUUhRCywGnhBSrk50A2klAuBhaB9Z9Ewxb5IMRjAmqBtzUOs+XdUVIrJuZPa\n/uzJyvNTB+DwN2AL8KWqpUkIQUnRei0mS3jqqfDDY6K8qlg4nU4/q6berFmzJtxFuywIJhZSSqSU\nGOrRY//888/JyckhJyeHLVu28Pjjj/stRDh9+jTTpk1jx44dJCUlMW7cOL7++msGDBhQ5/sGIpxi\nsQ3oIIRoi7ba6Q/AGO8IQogUKeUv7tPhwAF3uAVYAXwopcwMYxkvT0wR0PQKbauOinNuAfnFX1DO\n/Qo/ZWnXAg1/RSdpotKkZeVQl/d5kxS1rLiB8DZRbjabiYmJISUlhezsbPbv38/dd9/N8ePHKS8v\nZ8qUKTz66KMAXHXVVWzfvp3i4mKGDh3KLbfcwn/+8x9atWrFqlWriIoK/C3R4cOHmThxIvn5+RiN\nRjIzM2nXrh3PPPMMn3/+OUIIXnjhBe6//36fdPv27SM9PR2bzYbL5eLTTz+lQ4cOQcsXExPDpEmT\n+Oqrr4iPj+fVV1/lmWee4dixY7z99tsMHz6cjIwMVqxYQUVFhY+J8aoEMxseCIfDwbhx49i5cycd\nO3bkww8/JCsri3nz5ulLbdetW8f8+fPp2LEjZWVldO/enS5dujBr1iyGDh1Kv379yMrKYuXKlRw8\neJAZM2ZQUVFB+/bt+eCDD4iJiWHHjh388Y9/pLi4mMTERDIyMnRDih5WrVrF2LFjEULQq1cvzpw5\no9uO8pCbm0vHjh1JSkoC4Pbbb+fTTz9tcLHQ1S8cG3AH2lzEEeB5d9grwHD38WvAPmAXsB74/9zh\nDwJ2INtr617dvW644QapOA84nVIW50v5y24pD66VcnuGlOtfk/Kzp6Rcep+U82+R8vX2Us6I9d9e\nbiblW12k/J/bpfzkQSnXPCPlv9+SMvsTKY9skDL/kJTlZ893DUOyf/9+/fiFQ8fl3d8fatDthUPH\nq73/jz/+KLt06SKllHL9+vXSarXK3Nxc/frp06ellFKWlpbKLl26yN9++01KKWWbNm1kfn6+/PHH\nH6XRaJQ7d+6UUkp53333ySVLlgS9X48ePeQ///lPKaWUZWVlsqSkRC5fvlzefvvt0uFwyF9//VVe\nccUV8uTJkz5lmzx5sly6dKmUUsqKigpZWlpabfkAuWbNGimllHfffbccOHCgtNlsMjs7W3br1k1K\nKeUHH3wgW7RoIX/77Tc9/bZt26SUUkZHR0sppVy7dq185JFHpMvlkk6nU955551y48aNQZ8lIL/9\n9lsppZTp6enyjTfekC6XS3bq1EmeOnVKSinl6NGj5WeffeZzH096IYTMysqSUkqZn58vb731Vllc\nXCyllHL27Nny5ZdfljabTfbu3VvP75NPPpHp6elSSinnz58v58+fL6WU8s4775T//ve/9fz79++v\n189DQUGBbNWqlfzxxx+l3W6XI0eOlMOGDQtYP+//qx6A7bIG7XlY119KKdcAa6qEveh1/BzwXIB0\nS4Gl4SybooEwGCA6UdtaVPPxlsPm/j4lyNBX/g/a6q86DX210FZ/qaEvAHr06KGbFAeYM2eO/kZ8\n/PhxcnJy/Mxtt23blu7duwNwww03cPTo0YB5nzt3jp9//pkRI0YA6HaZvv32W0aPHo3RaCQ5OZnb\nbruNbdu2+ThE6t27N7NmzeLEiROMHDlS/zI6WPksFgtDhgwBNDPlERERmM1munbt6lO+QCbGPfad\nILjZ8D59+gSs4xVXXKEbE3zwwQeZM2cOU6dO5aGHHmLp0qWkp6eTlZXFhx9+GDB9mzZt6NVLswqx\nefNm9u/fr+dnxRGNKwAAG0hJREFUs9no3bs3Bw8eZO/evQwcOBDQhgw9vYWJEyfqeckaWPeNj49n\n/vz53H///RgMBm6++eawfFB42S/WdxaXcPSeezAmJGBMSMCk7+P9w+LjERbVINUJk6X2Q19nf3EP\ngf1SeV7d0Jc1MfRcijkKTJFgNIdlSfF/dWjd4HnWFm/jfBs2bOCrr74iKysLq9VK3759KS8v90vj\nbR7caDRSVlYWMO9AjVd14d6MGTOGnj17snr1agYPHsyiRYswGAxBy2c2m/WGsb4my4OZbQ9EsPzS\n09O56667iIyM5L777gtqvK+qyfKBAwfy8ccf+8TZs2cPXbp0ISsrq9qytG7dmuPHKxeInjhxgpYt\nW/rFu+uuu7jrrrsAWLhwYbVzVXXlshcLabcR2aUzjtMF2I8do2zXLpyFhRDEdLIhJgZjswRM8Qlu\nMYnXjpu5RSXeHeYWGIPXj1BRAyKaaFtiNfZ4XC4oK/CfQ9HPf4GTO92mWoIgDJpomCK05cWmCO3c\nHFn7cHOqe4WZQRMgUWWPIUBYw1Gd2eyioiLi4+OxWq388MMPbN4ccK1IjYmNjaV169asXLmSu+++\nm4qKCpxOJ3369OG///u/GTduHAUFBWzatIk33njDR5hyc3Np164dTz31FLm5uezevZu2bdvWu3we\nE+NRUVGsXLmS999/3+f64MGDmT59Og888AAxMTH8/PPPmM3moL4ijh07RlZWFr179+bjjz/mlltu\nAaBly5a0bNmSmTNnsm7dOj2+2WzGbrdjNpv98urVqxeTJk3i8OHDXH311ZSWlnLixAk6depEfn6+\nfh+73c6hQ4fo0qWLT/rhw4czb948/vCHP7Blyxbi4uL85jUATp06RfPmzSksLOS9997jH//4R62f\nYygue7EwxcfT6q23fMKky4Xr7FkcBQU4Cwrc+0KchQU4TrvDCguw//wz5Xv24CgshCDO2g3R0TUQ\nlWZ6T8ZQxdyyIgDeQ18pqcHjVR36Kj4FjnJts7v3jgpwlLn3XuG2Uu3DSZ9w97HT134Sg/8BhbV5\nkxOVwoFHQAIJTHXXKsOaWQ38rlcPru3SmaioKJKbN9e+zxEGhgzsz4L580lN7Uqnjp204ZF6Wppe\nsmQJjz32GC+++CJms5nMzExGjBhBVlYW3bp1QwjB66+/TosWLXyGi5YtW8bSpUsxm820aNGCF198\nkejoaBYsWEBqaiqdOnXSh29qQzAT4x6CmQ0PJhbXXHMNixcv5rHHHqNDhw48/vjj+rUHHniA/Px8\nHzemjz76KKmpqVx//fXMmjXLJ6+kpCQyMjIYPXq0bndr5syZdOzYkeXLl/PUU09RVFSEw+Hg6aef\npkuXLixYsADQhqPuuOMO1qxZw9VXX43VauWDDz7Q8/Y4lAKYMmUKu3btAuDFF1+kY8eOtX6OoVAm\nyhsAKWWluBQWegmMl9AUFOAoLMR5+rQmLvYAwyiAsFoxxcdjbNZM2ycEFhVjvDZUZrBaG7m2Clwu\nTTDcInLg+G9c0/FqrRGWLsC9l67KMCmBAGGhrlXNK5BRyzohqBQtr2PwEjHQRQu8hMor3CdtTfd4\nCV8d83Dnk5GRwfbt25k3b17tqu95lvoe3/NAYVIy+en/w3XdUnk47SG/a+6MQ+QpA98/YJmqhFeX\n3hwV2nIEykT5eUcIgTEuDmNcHHhNLAZDSomruLgaUdGO7fmnKD94EGdBAdJmC3zvqChfUYlP0IQm\nId639+IWH2G1BnSTqagFBgMYorQfaBRgONN4Juv9xKQG4uNpVAI2RrLmcaQLpCN0Hg0maKEQcOYY\nlPym2VzzFiPPswrW6NaBG4aMIdoaxV+fGQ8F9TEoUVU4q5wHFEhPuCFAmNCGRcPMZS8WdlsFn8/9\nK1GxsVhj44hyb9YmcV5hsRhN/uORdUUIgbFJE4xNmmBpE+LLbNziUlKKs7BA65l4hsR0gXGHnS6g\n4vBhnAWFyACTmAAiIsLdM2mKISISERGBsJgRFgsGiwVhtiAsVTev696b2TeO33W/+GYlVPVFCM2S\ncSMwadIkvvvuO5+wKVOmkJ6eXn3CmohSQHHCqzdVszzS0seTFqiXILwa0wA9m9MFhQwY7vsdCMDX\n//onzRKboRug8Eq3Y+uWyvMq13wa74CNfFVBuPi47MXCVlpKwckTlP5QRPm5cwSwKgJAhDWaqNhY\noprEamKii0qVc7e4mCMabu5BCIExJhpjTDRcEWI1kRtXaWm1cy7OM2dwVZQjbXZcJSVIm81vc9nt\nWo8myHxMnepi1oRHE6kqYhRQqAKIlV+8IGLmE9eMISICERWFIdItkuqDwGp5991365bQp6G8MGkW\n25Ls3Y1vguRi5rIXi+im8aT99T0AXC4n5cXFlJ09S9nZIkrPFWn7s0WUnT3r3hdxLv8UebmHKTt7\nFpczcENqiojQhKNJHNbY2MoeS6xXj6VJpchYoqIa9K3bYLVisVqhdf2Xckqn019IbDakTRMTaQ8g\nNPqx3feavcr1Cv+0zuJz/ul88gg831NbRESEJhxRUb5CEhWJITIKERmBITIKQ1QkIjIKQ2SEe+8b\nRyYm4iguxmA0ag2lwaAJkef4In6bVFw61Hd++rIXC5fLxebNm2natCnx8fHEx8fTrPUV+Jq1CoyU\nkorSEregnKXs3FkvcXELzTlNZH47cYyys2dx2CoC5mU0mfRei4+oNPE99xxHRsc02puxMBoRUVEQ\nxPxDYyOlRNqrERObDVeFLaCQucorkOVluMrKcZWXIcvKtR5WWTmu8nLtWnkF9qKiyrCyMlwVFcgg\n3x44n5nGKSlpGmyYTYgq4mEAg7+o6MJi0FY7CUOA8FDHigse6TOHQrXHNY4rRLXL9KWUnD59Wv+I\nsi5c9mJx7tw5vvzyS5+wqKgo4uPjfQTEs8XFxekfvAghiIyOITI6hviUVjW6n728XBcTredS2WOp\nFJmz/Hoqj9KzRdjKSgPmIwwGTVyaVJlr8RKUKO95lyax2pvvJYAQQvs4spE/kJRSIisqcJWVVe7L\ny7GVlnEKKHQ4KsfhPSZvqmw1DasznpVCQmjiUWsBET67epWjwahHmQI9y6phUv8nYBwZICxgukD5\nBCtDAyPMZkxu21DBiIyMpHU9RhrU0lmgrKyMwsJCCgsLOXPmjH7sOXe5KucxhBDExsb6iYhHXKKj\noxv0Dc9ht1NWRVT0noxHYM5VnpcXB/44CyAyOkYXlagmsURERWGOjMIcGYk5IhJzZCSWyEgtLCIC\nc2SUdh4R6XNsiohQb7FhRO85lZVV9oQ8PZxyTw+oHFdZZU8oaG/J05OVsrIdq/q26r3sE++32Wri\nuveSOsbVy1T/uIHKLdy9LYyenpzWm/M5NhjdvTv3KiPvOEajT7jvsXce7m9ePMcGY4D7uONULYtP\n2iBlMXjd07ssnt6nUdsbmzYlulcgd0GhUUtna0FUVBRRUVEBP6N3uVycPXvWT0QKCwvJycnxs5dv\nNpsDiohnb6nl27DJbKZJQiJNEhJrFN/ldHoNh1UZGvMSlaJTv2IvL8NWXo69vDzo8FhAhMBsiXCL\nS6WweERHFxwlQnXCu+dkjDvfpVEoNJRYhMBgMNC0aVOaNm3KVVdd5XfdZrMF7I0UFhaSm5uLvcpk\nbExMjJ+IeLYmTZrUy/Y9gMFoJLppPNFN42uVzuVy4qio0MSjQhMQW3kZjvJybO5zT5i9ogJ7eZkW\nVlHuFpwybGWllBQW+OTR8CLkEZ3QImSOiMASGaVESHHJIaXEYaugoqSEitISAJq1Dq8DOCUW9cRi\nsdC8efOApgOklJSUlAQc3vrpp5/Ys2ePzwoFo9GoC1Og3kkw/wINgcFgxBJlxRLVsF+E106EAotS\nQ4qQyWLBZLZgskRox57N7Dn2Do/AaDbrYeYq140B0pgtERgtZgyGS2N+SBE+nA4HFaUlVJQUU1FS\nQnlJsfvcfew+Ly+uEu5O4/Ra0p7SoRNjZv41rOW97MWizOliwt6jJFlMNLeYSLKYSbKY3Jt23NRk\nrNObqRCCmJgYYmJiuCLA9xEOh4OzZ8/6DW8VFhZy8uRJP8ufkZGRQYe44uLiglrBPJ80igi5hcXm\n1dsJJkIOWwUOmw2H3abtbTbKi4u1cK8wh80WdFl0TTAYTT6i4ytI1YX7ipXJS6y8rxu90pjdYeq7\nkcZFulzYyst8Gvfy0hIq3I17eYlHCNzh3qJQUoK9IvCHsx4MRiMR7gU0EdHRREbHEJvUXD+OiI4h\nwhpNZEwMMfHNqs2rIbjwWpdGptjpJN9mZ39JGfk2O44A8/1mIUiymEi0mEgym2keYSLJXH9hMZlM\nJCQkkJCQEPB6eXl5wOGtvLw8Dh48iNPLMm7VifeqvZOGnng/34RLhLxxuZxe4lGBw2avFBubDYdd\nO3babNh94mli5AwQ5jn2Fygt//oIlNFk8u/tmC0YzSaMJjMGkwmT2YzBaHLHNWM0mTCYzJjM2t5o\nMmE0+l7zpNfOTZjceVXm4U7nHe6VTlygy3qllDjstuCNe7FvI19RWkx5sbbXzkuDfsQLgBBEWK1E\nWCsb/KYtWhIZozXyfo2+O44n3GS5sIZP1WooL1xScsbhJN/mIN9m1/enbA79+Debg1M2B7/ZAwuL\nRQgS3cLS3CMmAYSlucVEXB17LKBNvJ87dy7gEFdhYWHQiXfvyfbIyEgiIiKwWCz63vv4QuypXOq4\nnM4qAuLb2/EIlO/mK0ZOb1Gz23A6HDgddpwOBy6HA6fd7g5z4HLYcbj3Tve1BkeIAALjJU76Xrvm\nIzheIuVJ551Gz8PrmpTSfyinxOvNvrRSFJwhrBOYIiK0Rtwa7X7Lj/Z5o/dp9K0xPmERUdaLoren\nVkPVAYMQJJhNJJhNdIqu/uMVj7CccgtIvs3BKV1gtOO8Cjt7z5WFFBbvnkmS2UTzCDOJZpN7aEwL\nryosBoOBuLg44uICL5ex2+0BReTMmTMcPXoUWxDDhN4YjUY/AfE+DhRW3XUlPqExGI1YjFFYIs/P\nB5BSSlxOpyYqushUCokuOA47TrsDp7PyWmUaT1x7ZVyfcC9xqnLNUVLid18/gatF76tyKKeykfcM\n5ehDPD5v+dFejb61QW3ChRMpZdh7IerXW0e8hYXo6uO6pKTQ7iTfXtkz8fRcPAKTV2Fnz7lSfrM7\ncAYRFn0ozC0gHiFJNHv1YtzCYjabSUpK0p24eyOlpKysjIqKCioqKrDZbNhsNv04VNi5c+d8rnt/\nh1LtMzMYQopNbQTIZDJdUN30SwEhhP6GfqE2k1JKX8Gp0lMCiIiJIdIac0GvhHO5XD6/L+/fY22P\nW7Rowfjx48NaXiUWjYBBCJpZTDSz1FxY9B6L3XsoTBOWX2spLN4T9x5hSbSYiImKJiqmCfFGAxbP\nF791wOFw1Fh0Al0vLi72Caut+NRGbAwGg89mNBr9wqrbQsVXhB8hBCazGQJ4pgs3TqezXo2693HV\nZfXB8LiU9WwWiwWr1Up8fLz+f7uqT/VwoMTiAsNHWELgkpICu9NrLsU9DGZ36GLza4Wd3edKOR1E\nWPT7AlFGA1ajgSiDQTt276MM7nCjwGo0EmUQvtfdx1ajAavFSlRkDFFGA9FGA4leeZkMNRMjh8NR\nox5OsLDi4mKfcGcQF7nhIJxCVNe4Qgh979kutPNw4nmZqW8D73EhWxNMJpPekHsa+JiYGJo1a+YX\nHujY+/xCGb69MEqhqBMGr8n0UHgLS77NwW92B6VOF2VOl7Z3uY9d/mH5NjtlLt+wMlftF0aYhdAE\nx2B0C4+XMFUrUlFYY6J9rscYDSQZhR7Xk94QoOHxiI/NZsPlclW7OZ3OkHHqGj9UXLvdXue8L3Ya\nUnxcLpdPI1/T52M2m/0a79jY2Gob9WANvLEOdtiklJS5JCVOJ2fdv8GSkgpKnGWUOp2UOF2UeMKd\nLkqcTv24daSFP7Xz983dkCixuEzwFpZrQkcPiUtKt3DISiFxi0mgY48IBb4uKbTbfUXL6cJWh5V6\nkQYRVHiiTQaaGI3EmY3EmYzEmozEmSzEWTzHlZvVeGEu9wyEdBsjDCYunuvex7U9r0/acJ9XvSaE\nqHUDb7FYatzAexr1UneDXeJ0ke9pwEtslJ4tD9ywu/+/lzi034MnbalX3Nr8j/e8TEUbDXX6rdQW\nJRaKOmEQgmijkegwfqjscEnKvXo0VYWnUoBktb2kMqeLs04nv9jsnHM4KXJoP9LqMAq8BMXoc6yH\nmU1+4U3d+0hD+IdXPHi/XSsqkVJS7pLuhriyYT7t3YCX2CkpqtCuuXzf2Et9Gnzfhr12jbpwD8sa\niTZWNvAJZgtWrzCrQQuPNhkrj92btUr6KKMBYyO/zCixUFywmAyCGIORGFPDK5LDJTnrdFJk18Tj\nrMN3X+R9btf2v1SUc9YdFmoYziKEr4iY/cWmam8m1qvHE3GJNfxSShwSyl0u9yYpd7qo8BwHCCtz\nuagIGE87rnC5KHf6h5V5NfK1GaDz9Eo9DbOnwY6PNGM1RPg01tYAjXigRt16Hhr1cKHEQnFZYjII\nEgzupc91oMLlqhQYexVxcQQWoGNlNv2aPcSwQZRBE5va9GY857EmI+ZqFhM4PI2qp8H1NLTuXlmF\nd+Ptcrkb6+Bhnn1ZgDBvEajPzIpJQKTBQKTBQIT7TT3CIPSwZmaTO8xApEEQYzT6NOhVG/HKt3aj\n/lZf0wUYlytKLBSKOhBhMJBkMZBkqf3yTc/wSMBejFt8zjgcPmJz2u7gx7IKPW51K9sArEYDTU1G\nIg0G/a3cIwyBPhCtKQYg0qg1yJ6GOtIgtEbaKGhqNhJpMHuFuRt3n3iVYRFV8ok0+odFqIb8gkCJ\nhULRyAj3qrAoo4HkiLqJTanTFVBofI7tTspcLr3x1RtnrzfwmoZ5Gu/qeiyKSxslFgrFRYYQgmiT\nkWiTEX93XQpFeLi0ZtEUCoVCERaUWCgUCoUiJEosFAqFQhESJRYKhUKhCIkSC4VCoVCERImFQqFQ\nKEKixEKhUCgUIVFioVAoFIqQCNkIpm0bAyFEPvBTPbJIBH5roOJcLFxudb7c6guqzpcL9alzGyml\nv//lKlwyYlFfhBDbpZQ3nu9yNCaXW50vt/qCqvPlQmPUWQ1DKRQKhSIkSiwUCoVCERIlFpUsPN8F\nOA9cbnW+3OoLqs6XC2Gvs5qzUCgUCkVIVM9CoVAoFCG5rMRCCDFECHFQCHFYCPGnauLdK4SQQoiL\nfkVFqDoLIdKEEPlCiGz3NuF8lLMhqcnfWQgxSgixXwixTwjxUWOXsaGpwd/5/3r9jQ8JIc6cj3I2\nJDWo85VCiPVCiJ1CiN1CiDvORzkbkhrUuY0Q4mt3fTcIIVo32M2llJfFBhiBI0A7wALsAjoHiNcE\n2ARsBm483+UOd52BNGDe+S5rI9e5A7ATiHefNz/f5Q53navEfxJ4/3yXuxH+zguBx93HnYGj57vc\njVDnTGCc+7g/sKSh7n859Sx6AIellLlSShvwCfD7APH+C3gdKG/MwoWJmtb5UqImdX4EeFdKWQgg\npTzVyGVsaGr7dx4NfNwoJQsfNamzBGLdx3HAyUYsXzioSZ07A1+7j9cHuF5nLiexaAUc9zo/4Q7T\nEUJcB1whpfxXYxYsjISss5t73N3W5UKIKxqnaGGjJnXuCHQUQnwnhNgshBjSaKULDzX9OyOEaAO0\nBb5phHKFk5rU+SXgQSHECWANWo/qYqYmdd4F3OM+HgE0EUI0a4ibX05iEcjTvL4UTAhhAP4v8H8a\nrUThp9o6u/lf4CopZSrwFbA47KUKLzWpswltKKov2lv2IiFE0zCXK5zUpM4e/gAsl1I6w1iexqAm\ndR4NZEgpWwN3AEvcv/OLlZrUeSpwmxBiJ3Ab8DPgaIibX8wPrracALzfmlvj2y1tAlwLbBBCHAV6\nAZ9d5JPcoeqMlPK0lLLCffo/wA2NVLZwEbLO7jirpJR2KeWPwEE08bhYqUmdPfyBi38ICmpW54eB\nfwBIKbOASDQbShcrNfk9n5RSjpRSXgc87w4raoibX05isQ3oIIRoK4SwoP1oPvNclFIWSSkTpZRX\nSSmvQpvgHi6l3H5+itsgVFtnACFEitfpcOBAI5YvHISsM7AS6AcghEhEG5bKbdRSNiw1qTNCiE5A\nPJDVyOULBzWp8zFgAIAQ4ho0schv1FI2LDX5PSd69Z6eA95vqJtfNmIhpXQAk4G1aA3iP6SU+4QQ\nrwghhp/f0oWHGtb5Kffy0V3AU2iroy5aaljntcBpIcR+tEnAaVLK0+enxPWnFv+3RwOfSPdSmYuZ\nGtb5/wCPuP9vfwykXcx1r2Gd+wIHhRCHgGRgVkPdX33BrVAoFIqQXDY9C4VCoVDUHSUWCoVCoQiJ\nEguFQqFQhESJhUKhUChCosRCoVAoFCFRYqFQ1AAhxEtCiKkXQDmOur8NUSgaFSUWCoVCoQiJEgvF\nZYsQIloIsVoIsUsIsVcIcb/3m7sQ4kYhxAavJN2EEN8IIXKEEI+446QIITa5/UTsFULc6g6fL4TY\n7v7g8WWvex4VQrwqhMhyX79eCLFWCHFECDHRHaevO88Vbp8bCwLZNBJCPCiE2Oq+938LIYzhfF6K\nyxslForLmSHASSllNynltcAXIeKnAncCvYEXhRAtgTHAWilld6AbkO2O+7yU8kZ3mtuEEKle+RyX\nUvYG/g1kAPei2SJ7xStOD7QvkLsC7YGR3gVxm6+4H/id+95O4IFa1F2hqBWm810AheI8sgd4Uwjx\nF+BfUsp/CxHIsKfOKillGVAmhFiP1qBvA94XQpiBlVJKj1iMEkI8ivYbS0HzM7Dbfc1jz2cPECOl\nPAecE0KUe1m/3SqlzAUQQnwM3AIs9yrLADSjj9vcZY4CLna/HIoLGCUWissWKeUhIcQNaOarXxNC\nfIlmztnT446smsQ/C7lJCNEHrcexRAjxBlqPYSpwk5SyUAiRUSUvj5Vfl9ex59zzm/S7V5VzASyW\nUj4XopoKRYOghqEUly3uYaRSKeVS4E3geuAolWba76mS5PdCiEi3M5m+aG/1bYBTUsr/Af7mziMW\nKAGKhBDJwNA6FK+H27qoAW246dsq178G7hVCNHfXJcFdFoUiLKieheJypivwhhDCBdiBx9GGc/4m\nhPgzsKVK/K3AauBK4L+klCeFEOOAaUIIO1AMjJVS/uh2PrMPzfT5d3UoWxYw213GTcAK74tSyv1C\niBeAL92CYgcmAT/V4V4KRUiU1VmF4gJDCNEXmCqlHHa+y6JQeFDDUAqFQqEIiepZKBQKhSIkqmeh\nUCgUipAosVAoFApFSJRYKBQKhSIkSiwUCoVCERIlFgqFQqEIiRILhUKhUITk/wHHbw4fEydtEQAA\nAABJRU5ErkJggg==\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0xc6eb400>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "# 将GridSerachCV得到的结果反映在图示中\n",
    "#打印最佳参数与最佳性能得分\n",
    "print(\"Best: %f using %s\" % (gsearch4_1.best_score_, gsearch4_1.best_params_))\n",
    "test_means = gsearch4_1.cv_results_[ 'mean_test_score' ]\n",
    "test_stds = gsearch4_1.cv_results_[ 'std_test_score' ]\n",
    "train_means = gsearch4_1.cv_results_[ 'mean_train_score' ]\n",
    "train_stds = gsearch4_1.cv_results_[ 'std_train_score' ]\n",
    "\n",
    "pd.DataFrame(gsearch4_1.cv_results_).to_csv('Preds_for_subsample_and_colsamplebytree.csv')\n",
    "\n",
    "# plot results\n",
    "test_scores = np.array(test_means).reshape(len(colsample_bytree), len(subsample))\n",
    "train_scores = np.array(train_means).reshape(len(colsample_bytree), len(subsample))\n",
    "\n",
    "for i, value in enumerate(colsample_bytree):\n",
    "    pyplot.plot(subsample, -test_scores[i], label= 'test_colsample_bytree:'   + str(value))\n",
    "    pyplot.plot(subsample, -train_scores[i], label= 'train_colsample_bytree:'   + str(value))\n",
    "    \n",
    "pyplot.legend()\n",
    "pyplot.xlabel( 'subsample' )                                                                                                      \n",
    "pyplot.ylabel( 'Log Loss' )\n",
    "pyplot.savefig( 'subsample_and_colsamplebytree1_vs_logloss.png' )"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "从.best_params_可知最佳的subsample与colssample_bytree值均为0.8"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.3"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
