{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#1. import environment"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from xgboost import XGBClassifier\n",
    "import xgboost as xgb\n",
    "import pandas as pd \n",
    "import numpy as np\n",
    "from sklearn.metrics import log_loss\n",
    "from matplotlib import pyplot\n",
    "import seaborn as sns\n",
    "%matplotlib inline"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#2. read data\n",
    "#use 5000 data for convenience in calculation"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>bathrooms</th>\n",
       "      <th>bedrooms</th>\n",
       "      <th>price</th>\n",
       "      <th>price_bathrooms</th>\n",
       "      <th>price_bedrooms</th>\n",
       "      <th>room_diff</th>\n",
       "      <th>room_num</th>\n",
       "      <th>Year</th>\n",
       "      <th>Month</th>\n",
       "      <th>Day</th>\n",
       "      <th>...</th>\n",
       "      <th>walk</th>\n",
       "      <th>walls</th>\n",
       "      <th>war</th>\n",
       "      <th>washer</th>\n",
       "      <th>water</th>\n",
       "      <th>wheelchair</th>\n",
       "      <th>wifi</th>\n",
       "      <th>windows</th>\n",
       "      <th>work</th>\n",
       "      <th>interest_level</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>48352</th>\n",
       "      <td>1.0</td>\n",
       "      <td>1</td>\n",
       "      <td>3050</td>\n",
       "      <td>1525.0</td>\n",
       "      <td>1525.000000</td>\n",
       "      <td>0.0</td>\n",
       "      <td>2.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>4</td>\n",
       "      <td>13</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>48353</th>\n",
       "      <td>1.0</td>\n",
       "      <td>1</td>\n",
       "      <td>3095</td>\n",
       "      <td>1547.5</td>\n",
       "      <td>1547.500000</td>\n",
       "      <td>0.0</td>\n",
       "      <td>2.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>4</td>\n",
       "      <td>2</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>48354</th>\n",
       "      <td>1.0</td>\n",
       "      <td>2</td>\n",
       "      <td>1900</td>\n",
       "      <td>950.0</td>\n",
       "      <td>633.333333</td>\n",
       "      <td>-1.0</td>\n",
       "      <td>3.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>4</td>\n",
       "      <td>27</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>48355</th>\n",
       "      <td>1.0</td>\n",
       "      <td>1</td>\n",
       "      <td>2800</td>\n",
       "      <td>1400.0</td>\n",
       "      <td>1400.000000</td>\n",
       "      <td>0.0</td>\n",
       "      <td>2.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>4</td>\n",
       "      <td>15</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>48356</th>\n",
       "      <td>1.0</td>\n",
       "      <td>2</td>\n",
       "      <td>3795</td>\n",
       "      <td>1897.5</td>\n",
       "      <td>1265.000000</td>\n",
       "      <td>-1.0</td>\n",
       "      <td>3.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>4</td>\n",
       "      <td>19</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>2</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 228 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "       bathrooms  bedrooms  price  price_bathrooms  price_bedrooms  room_diff  \\\n",
       "48352        1.0         1   3050           1525.0     1525.000000        0.0   \n",
       "48353        1.0         1   3095           1547.5     1547.500000        0.0   \n",
       "48354        1.0         2   1900            950.0      633.333333       -1.0   \n",
       "48355        1.0         1   2800           1400.0     1400.000000        0.0   \n",
       "48356        1.0         2   3795           1897.5     1265.000000       -1.0   \n",
       "\n",
       "       room_num  Year  Month  Day       ...        walk  walls  war  washer  \\\n",
       "48352       2.0  2016      4   13       ...           0      0    0       0   \n",
       "48353       2.0  2016      4    2       ...           0      0    1       0   \n",
       "48354       3.0  2016      4   27       ...           0      0    0       0   \n",
       "48355       2.0  2016      4   15       ...           0      0    0       0   \n",
       "48356       3.0  2016      4   19       ...           0      0    0       0   \n",
       "\n",
       "       water  wheelchair  wifi  windows  work  interest_level  \n",
       "48352      0           0     0        0     0               2  \n",
       "48353      0           0     0        0     0               2  \n",
       "48354      0           0     0        0     0               2  \n",
       "48355      0           0     0        0     0               2  \n",
       "48356      0           0     0        0     0               2  \n",
       "\n",
       "[5 rows x 228 columns]"
      ]
     },
     "execution_count": 4,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "train_data = pd.read_csv(\"RentListingInquries_FE_train.csv\")\n",
    "train_data = train_data.tail(1000)\n",
    "train_data.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<class 'pandas.core.frame.DataFrame'>\n",
      "RangeIndex: 1000 entries, 48352 to 49351\n",
      "Columns: 228 entries, bathrooms to interest_level\n",
      "dtypes: float64(9), int64(219)\n",
      "memory usage: 1.7 MB\n"
     ]
    }
   ],
   "source": [
    "train_data.info()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "y_train = train_data['interest_level']\n",
    "train_data = train_data.drop([ \"interest_level\"], axis=1)\n",
    "X_train = train_data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "metadata": {},
   "outputs": [],
   "source": [
    "from sklearn.model_selection import train_test_split\n",
    "X_train_part, X_val, y_train_part, y_val = train_test_split(X_train, y_train, train_size = 0.33,random_state = 0)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "def modelfit(alg, X_train, y_train, cv_folds=5, early_stopping_rounds=10):\n",
    "    xgb_param = alg.get_xgb_params()\n",
    "    xgb_param['num_class'] = 9\n",
    "    \n",
    "    #直接调用xgboost，而非sklarn的wrapper类\n",
    "    xgtrain = xgb.DMatrix(X_train, label = y_train)\n",
    "        \n",
    "    cvresult = xgb.cv(xgb_param, xgtrain, num_boost_round=alg.get_params()['n_estimators'], folds =cv_folds,\n",
    "             metrics='mlogloss', early_stopping_rounds=early_stopping_rounds)\n",
    "  \n",
    "    cvresult.to_csv('1_nestimators.csv', index_label = 'n_estimators')\n",
    "    \n",
    "    #最佳参数n_estimators\n",
    "    n_estimators = cvresult.shape[0]\n",
    "    \n",
    "    # 采用交叉验证得到的最佳参数n_estimators，训练模型\n",
    "    alg.set_params(n_estimators = n_estimators)\n",
    "    alg.fit(X_train, y_train, eval_metric='mlogloss')\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {
    "collapsed": true,
    "scrolled": true
   },
   "outputs": [],
   "source": [
    "#params = {\"objective\": \"multi:softprob\", \"eval_metric\":\"mlogloss\", \"num_class\": 9}\n",
    "xgb1 = XGBClassifier(\n",
    "        learning_rate =0.1,\n",
    "        n_estimators=1000,  #数值大没关系，cv会自动返回合适的n_estimators\n",
    "        max_depth=5,\n",
    "        min_child_weight=1,\n",
    "        gamma=0,\n",
    "        subsample = 0.5,\n",
    "        colsample_bytree=0.8,\n",
    "        colsample_bylevel=0.7,\n",
    "        objective= 'multi:softprob',\n",
    "        seed=3)\n",
    "\n",
    "modelfit(xgb1, X_train, y_train)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 10,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAETCAYAAADZHBoWAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xd4HNXVwOHfzO6qd1nFvXPdC+4GjCGYbjAtCaTRw0dJ\nQgIECJBAgITeCYQaCIQYCBAwBgO2AXfccL9ylWzLKpbVrL678/0xY0WyJatYq11J530egXbqOdr1\nnJ17Z+YalmUhhBBCAJjBDkAIIUTokKIghBCilhQFIYQQtaQoCCGEqCVFQQghRC0pCkIIIWq5gx2A\naB9KqeOBBcAPtNYrnWndgOXAr7TWc5xpVwHXAnFAGLADuEtrvdyZvxDoCxQDhrPMv7TW9wUg5gnA\nVVrr69p62y2I4WogTGv9vFLqOiBBa/3XNtr2POAyrfX+ttheW1FKXQ5crLU+NwDb7gds0FrHtPW2\nRduQotBFaK1XK6VuA951CkQpMBt4tU5BeBCYBvxQa53pTDsV+EQpNU5rneVs7lat9XvO/ARgk1Lq\nK6314jYOezjQq4232VInAhsAtNYvtPG2Z7Tx9oQ4ZlIUuhCt9YtKqROBV4HtQCHwIIBSKg34DTBQ\na72vzjrzlVK/BaIb2Wys8//9znaGA88CyYAFPKa1fsOZdy3wK8AH5AI3aq0znJgeB1zOOn8BVgD3\nAfFKqde01lfU3alSahfwOvADoA/wb631bUfLXykVBjwEnOzsaw32WVKJUur/gOuAaqAS+CWggPOA\nGUqpCiAF6Ka1vtHZ/9vAOU6ufwROAMYBNcB5WutspdS5wJ3YZ1SpwD+01ncrpV5zwlqglDob+8zs\niL+bUmo68BRQhv0eTANeBgYDfmAV8Euttb9Onqc76490XicAO4EBwI8Pz1Nrvelof7fD/oZHe39v\nB67C/sLxDTBLa92vBduOB54DxjjbngvcqbX2KqXuBS5w4i4ALtda72tsenP3KY4kfQpdz3XACOBi\n4Bda60O3tE8BNjf0D0pr/abWenOdSY8opdYqpTZiF5cvgQyllBv4L/CM1noUcBbwoFJqinPGcRtw\nitZ6NPYB9UOllAHcCzyutR4HXAmcqrXeDdwDfHt4QagjRmt9EjAVuEkp1b+J3G8HvMA4J4Zs4K9K\nKRfwJHCm1noC8HfgRK31B04+T2itn2tgexHOdn7nrPOU83o3cLmT2++w/87jgcnAHUqpbnVyOgXY\n19jfzVlmBHCps+3zgFit9RhggjN/wGFxfQHEKKXGO68vBeYAJQ3l2cTfrFYT7+8ZwOVOTOP435eF\nlnga+8A+EhgPjAZuUUr1xv7CMsH5O84DJjU2vRX7FXVIUeh6FPY/2ATsf7yHGNjfzuyFlIp1Dvxr\nlVLbnKalQ27VWo/RWg8H0oB+2Afc47APlP8B0FpnA+8DZzo//9Za5zvzXgd6OuvOBp5TSr3lxHRn\nM3P5yNnWXiAPSGpi+XOB84E1Sqm1wCxgmNbaB7wLLFFKPYvdX/JKM/b/vvP/7UCO1vr7Oq+TnII7\nExinlPoj9tmQwZFnXUf7uwHsPtScBywChjt9O7cDT2qtt9XdmLPfV7AP0gBXAC8fQ57NifNs4F2t\ndZGz/4aKaFPOAp7VWlta6yrgBWfaXuB7YLVS6lFgrdb6w6NMF8dAikIX4nQs/we42fl5RymV7sxe\nDgxRSiUDaK1LnQP/GOCf2M0bR9BaFwLvYDdrNPR5MgFPI/MMwKO1fhH72+EXwBnAOqcpoSkVdX63\nnO0djQv4dZ28JmKfMaG1/in2AXwb8Hvsv1NTqur8XnP4TKVUNHYT1fHAauBWZ7nD4zza3w3g4KGJ\nWuudwCDsJrY44Eul1MUNrP8a8EOl1BjszvGFzvqtybM5cXqpn5evBdttbPsm9ufDj93kdzn2mcQT\nSqmnGpveiv2KOqQodBFOE8ls4GOt9b+01q8Bn2MXBpfzre8p7I7oPnXW64PdVt7gP3KllAf7G/gK\nQAPVSqkLnXk9gIuwD/afAz9SSqU4867A/oe8TSm1BBjrnD1ci30Wk4h9oPHQdj4HblRKhSmlTOAl\n4C9KqW5Kqd1Agdb6SeAu7KYLjjGGwdgH7ru01h9jH8DCsYsT2H9TD0f/u9Xj9H28BszTWv/eyWnE\n4cs5Z0/LgRex+yBoIs/mOFqcc4CL6hTzq6hz5tlMnwM3KKUMpVQ49mfhC6XUaOzO/s1a678ATwCj\nG5vewn2Kw0hHc9fxCBCF3cZ9yPXYB44Hgd9rrf+glPoJ8JZSKgb7gFUJ/Jv6zQGPKKXuwv5HHw18\nBTygta5RSs0CnlZK/Qn783Wf1noBgFLqCWC+c0DOB87VWvudq6KeUkrdj915eq/WepdTyB5QSn2g\ntb6gDf4GfwYexf727gLWAr9zOprvB75yOpS9wNXOOnOBZ5VSrdnfOuATYItSqgj72/km7G/627G/\npS/CbtJq8O/mdDTX9QYwHfuKrzIgC7uYN+Ql4D3sfgi01vuPkufhzlRKHazzukhr3auJ9/clYKlS\nqhzYCJQ3su3ow7YNdp/Wr4BngPXYHfOfYX+uqpVSs4GVznoV2BcIfN/Q9Eb2KZrJkEdnCyGOldOp\nPVVr/bTz+rfAJK31j4IbmWgpOVMQnYayv87/u5HZWg5QAZUB/N657NjCPoO5NrghidaQMwUhhBC1\npKNZCCFELSkKQgghanX4PoX8/NJWt38lJkZRWNjYBRIdh+QRWiSP0NIZ8ghEDikpsQ3e19OlzxTc\nblfTC3UAkkdokTxCS2fIoz1z6NJFQQghRH1SFIQQQtSSoiCEEKKWFAUhhBC1AnL1kfOQtFexH4sc\nDtyvtf5vnfkzsZ+V78Ue+esl53k4z2M/0KoKuPrwRwILIYQIrECdKfwU+0mMJ2E/a/3ZQzOcgvEE\ncDr2UyOvVfaoX7Own9U+Bfs58Y8FKDYhhBCNCNR9Cu9iP50R7Gese+vMGwpsc57Dj1JqEfaz+Kdg\nPxURrfWyOqNGHVViYtQxXa6VktKaAaJCj+QRWiSP0NIZ8mivHAJSFLTWB8EevQu7ONxVZ3Yc9ohP\nh5QC8Q1M9yml3FrrugXlCK29oWNLZiHRMRH0To5s1fqhJCUllvz80mCHccwkj9AieYSOQOTQWJEJ\nWEezM37qAuBNrfXbdWaVUH/81ligqIHpZlMF4Vh88O0OHnh9BTVef9MLCyFEFxGQouD0EczDHrjl\n1cNmbwYGK6WSlFJh2E1HS4HF2OO8opSajD3QRsCkpUGNWcKO7OKmFxZCiC4iUH0Kd2IPp3i3Uupu\nZ9pLQLTW+u/OAByfYxelV7XWe5VSHwAznKEZDezBxgNmDR8RPtRi485hqD6JgdyVEEJ0GIHqU/g1\n8OujzP8Y+PiwaX7gukDE05Ax6UNZnb+G9ft2cSED22u3QggR0rrszWtDkgYAkF2xm8rqgHVdCCFE\nh9Jli8LAhP72LzGFZOwuCm4wQggRIrpsUUiLSiHKHY0ZU8imzAPBDkcIIUJCly0KhmEwLHUQZngl\nG/fsDXY4QggRErpsUQAYljoYgJyqPRysqAlyNEIIEXxduigMTRkEgBlbyJbMwiBHI4QQwdeli0K/\nhF54jDDM2EI2S1EQQoiuXRRcposBCX0xI8vYsDsn2OEIIUTQdemiADDYuTS1wJfNgZLKIEcjhBDB\n1eWLwqH7FVyxB6QJSQjR5XX5otAvrg+mYWLGSL+CEEJ0+aIQ5vLQN7YXZnQpm7LysSwr2CEJIUTQ\ndPmiADAoYQAYFiXkkltYEexwhBAiaKQoAAMT+gHIpalCiC5PigIwML4f4BSFXfIcJCFE1yVFAYjy\nRNEjOh0zuohVGbl4fTJEpxCia5Ki4BiU0B/D5YeoEnSWPEpbCNE1SVFw/O9+hUJW6bwgRyOEEMEh\nRcFR268QX8DqjHz8frk0VQjR9UhRcCRGJGAaJq7YQkrKq9i6R5qQhBBdjzuQG1dKTQIe0lpPrzMt\nHXinzmJjgNu11i8opVYDJc70nVrrKwIZ3+E8pocqqwoztoiVW/JRfRLbc/dCCBF0ASsKSqnbgJ8B\nZXWna61zgOnOMlOAB4CXlFIRgFG3gLS3q0b8hOe/f5XwbvtZlZHHpTMGYxpGsMIRQoh2F8jmo+3A\nhY3NVEoZwDPA/2mtfcBoIEopNU8pNV8pNTmAsTXouISBhJkeIpILKDpYzY69JU2vJIQQnUjAzhS0\n1u8rpfodZZGZwEattXZelwOPAi8Dg4G5SimltfYebT+JiVG43a5Wx5mSElvv9aj0oazMXocRXsam\n3UVMGdur1dtuT4fn0VFJHqFF8ggd7ZVDQPsUmvBT4Kk6rzOAbVprC8hQShUA3YHdR9tIYWF5qwNI\nSYklP7+03rTjYgezknVEdCvg2zV7mDm5D0aINyE1lEdHJHmEFskjdAQih8aKTDCvPhoPLKnz+krg\nMQClVA8gDtjX3kEN7zYEgJj0QgpKqtiV07E/TEII0RLtVhSUUpcppa51fk8BSpyzgkNeARKUUouA\nfwNXNtV0FAgJ4fH0ie1JuSsXTC8r5UY2IUQXEtDmI631LmCy8/vbdabnY1+KWnfZauCyQMbTXCOS\nh5JVupfw5AOs2hLLxScPDPkmJCGEaAty81oDRnQbCkBSr2LyiirYnXcwyBEJIUT7kKLQgN6xPYkL\ni6UyPBuwWKnzgx2SEEK0CykKDTANkxHJQ6n0V2BEF/PpskwZplMI0SVIUWjEoSakPoPL8PstMnbL\ns5CEEJ2fFIVGqMRBuE03/phcAL5emx3kiIQQIvCkKDQiwh3OcQkD2V+dR1oarNR5lJZXBzssIYQI\nKCkKR3GoCanfcZV4fRaL1+cEOSIhhAgsKQpHMSLZLgqbqhbjdpl8vXYvfulwFkJ0YlIUjiI5MhGX\n4cJLDccPjSe3sIItmYXBDksIIQJGikITwlxhAKT1KwZgoXQ4CyE6MSkKTbhz4m8A2FW9hV4p0azJ\nyKe4TDqchRCdkxSFJiRFJDI4YQDbinYwYVQcPr/FonVytiCE6JykKDTDhLSxAJiJ2YR5TL5emy0d\nzkKITkmKQjOMTR2J23CxtuB7Jg1NY39xJZt2Hgh2WEII0eakKDRDlCeK4d2Gkl2Ww/Ch9tPGF6zZ\nG+SohBCi7UlRaKaJThPSXm8GLtNgzdb95B7DUKBCCBGKpCg00/DkIUS6I1iZt5arz7Vvavt8eVaQ\noxJCiLYlRaGZPC4PY1NGUVRVTFxaKakJkSxan0PxwapghyaEEG1GikILTEi3m5BW563ljEl98Pr8\nfLlqT5CjEkKItiNFoQUGJfQnITye1XnrmTgsmbgoD/NX76Wiyhvs0IQQok1IUWgB0zCZkDaWSl8l\nGcVbOW18byqqvCxcK1ciCSE6B3cgN66UmgQ8pLWeftj0m4GrgUODH/8S2Ao8D4wGqoCrtdbbAhlf\na0xIH8sXWQv5x6Z3eGDyn5izLJN53+3mtHG98bilxgohOraAHcWUUrcBLwMRDcweB/xcaz3d+dHA\nLCBCaz0FuB14LFCxHYueMd1xGS5q/DV4jUqmj+lB8cFqlm6UsRaEEB1fIL/abgcubGTeOOAOpdQi\npdQdzrQTgc8AtNbLgPEBjO2YXDz4PACWZK/g9Al9cJkGny3PkkdfCCE6vIA1H2mt31dK9Wtk9jvA\nc0AJ8IFS6lwgDiius4xPKeXWWh+1FzcxMQq329XqOFNSYlu8ztkJ0/hox6cszV3BT8efxynjevPl\nd1nsyD3IlJE9Wh3LsWhNHqFI8ggtkkfoaK8cAtqn0BCllAE8qbUudl7PAcZiF4i6WZtNFQSAwmO4\nqzglJZb8/NJWrTs+bSyL9i5jwZYVTB/djy+/y+KhN1by4q3TMQ2j1TG1xrHkEUokj9AieYSOQOTQ\nWJEJRs9oHLBBKRXjFIhTgVXAYuBsAKXUZGB9EGJrtmk9pwDwzd6l9OgWzcShqfj8Fss35QY5MiGE\naL12KwpKqcuUUtc6Zwh3AguAb4GNWutPgQ+ASqXUEuAJ4Ob2iq01esZ0Z0B8PzYfyCC/vICLTx6I\n22Xy3sLtVFX7gh2eEEK0SkCbj7TWu4DJzu9v15n+JvDmYcv6gesCGU9bO6nnZHYU72JR9jIuGHQO\nZ0zszZylmcxdnsmskwYEOzwhhGgxubD+GIxNHUWMJ5ql+76jxlfDOVP6Eh8dxmfLszhQUhns8IQQ\nosWkKBwDj+lmSvcJlNWUszpvHRFhbi46eSDVXj/vfb092OEJIUSLSVE4Rif2nIyBwbd7lwEwdWQ6\nfdNjWbYxl+17i5tYWwghQosUhWPULTKJYcmKnSWZ7C7NxjQMLv3BYAD+9dVWuaFNCNGhSFFoAyf1\nnAzAE6ufB+C43glMGJLKjuwSuURVCNGhSFFoA8OTh2AaJlW+ag5WlwFwySkDAXj5k02UV9YEMzwh\nhGg2KQptwDRMIlzhACzcsxiAbvGRXDBtAJYF78wPuYe9CiFEg6QotJEHTvgDMZ5ovt6zmEqvPUTn\nWZP60Cc1hkXr9rFx54EgRyiEEE2TotBGwlxhTO91AuXeCpZkLwfA7TK58pyhuEyD1+dukRHahBAh\nT4pCG5rWayphrjC+2v0tXr9dAPqkxXLW5D4UlFTyvty7IIQIcVIU2lC0J4oTe0yiqKqY73LW1E6f\nObU/3ZOjmL96Lxm7i4IYoRBCHJ0UhTZ2au+TcBkuvshaiN/yA+Bx281IhgGvfrqZqhp5YJ4QIjRJ\nUWhjiREJTEw/ntzyfNbt31Q7fWCPeGaM701eYQU3P7MoiBEKIUTjpCgEwGl9TsbAYF7mAqw6dzRf\nMG0ApmlQWe1j0y65GkkIEXqkKARAenQqo1OGk1mym61F/+tcDve4+MPPxuEyDV76eBMlZdVBjFII\nIY4kRSFAZvSdDsBz379ab3r/7nFcdPJAisuqeWXOZnk2khAipEhRCJB+cX1wm268fi+bD2TUm3f6\nxN6M6J/E+h0FfPnd7iBFKIQQR5KiEEC3jLsRA4MPts2pvRIJwDQMrjp3GHHRYby7cDu7ckqCGKUQ\nQvxPk0VBKZWklDrN+f0OpdS7SqlhgQ+t4+sd24OJ6cez9+A+VuSsrjcvPjqMq88dis9v8cJHG+Vu\nZyFESGjOmcK/gCFOYbgE+C/wQkCj6kRmDjgDj+nm4x2fU+2r/7TUEf2TOXNSH/sy1WcX1btSSQgh\ngqE5RSFRa/0scD7wutb6TSAqsGF1HokRCZzS+ySKqopZsPvbI+ZfOG0AbpdBdY2fz1dI/4IQIria\nUxRMpdQ4YBbwiVJqDOBuzsaVUpOUUgsbmH6pUmq5UmqxUuoFpZTpTF+tlFro/LzWgjxC2ul9pxPj\niWZe5gJKqw/Wm+d2mTx03VQSYsJ4d+E21m0vCFKUQgjRvKLwe+AR4FGt9Q7spqObm1pJKXUb8DIQ\ncdj0SOB+4BSt9QlAPHCuUioCMLTW052fK1qWSuiKdEdyVr/TqPRVMXfXl0fMT4wN56aLRuF2mbz4\n3w1k7y8LQpRCCAFGc9qxlVLhWusqpdQgQAFztdb+Jta5CFgHvKm1nlxnugmkaK1zndfvAi8BxcAb\nQCb2mcidWutlTcXm9fost9vVZA7B5vV5+e1n95FfVsBjZ91Dj9i0I5ZZuHoPj721iu7donn819OI\niQoLQqRCiC7CaHBiU0VBKXU3MBi4C1gGbAR2aa2vaWqPSql+wDt1i8Jh828CznZ+RgCTsc8uBgNz\nAaW1PuplOfn5pa3unU1JiSU/v7S1q7fYmrz1vLzhTTymhyenP9DgMu9/vZ05SzMZ1i+Rm384GpfZ\n9Mlce+cRKJJHaJE8QkcgckhJiW2wKDSn+eh84BrgMuCfWusZwNhjCUYpZSqlHgVmABdprS0gw9m+\npbXOAAqA7seyn1AzJmUEbsNNjb+GzQUZDS5zwbQBjBnUjU27CrnxiW/liiQhRLtqTlFwaa2rgHOB\nT53mn+hj3O+L2H0Ns7TW5c60K4HHAJRSPYA4YN8x7iekGIbBrePtG9pmb/2QGv+RJ0GmYXDNzGG4\nTIOqGh+zF2yTwiCEaDfNKQpfKaU2AGHAN8DX2PcqtIhS6jKl1LVKqeOBq4CRwHznSqMLgFeABKXU\nIuDfwJVNNR11RL1iezCt11TyyvczP+ubBpeJDHfz6A0n0D05is9X7Oa9hdulMAgh2kVzO5r7AHu0\n1n6l1Bit9drAh9Y8HalP4ZDymgruW/YIlb4q7pl8C0kRiQ0uV3SwiofeXkPugXLOmdKXC6cNwDCO\nbAbsDG2mIHmEGskjdIRUn4JSKgV4FMhTShUBf1RKHXnpjGi2KE8kFww6hxp/De9v/bjR5RJiwrnt\n0rGkJUYyZ2kmH367sx2jFEJ0Rc1pPnoRWAEMAPoBS7GbesQxmJh+PAPj+7E2fwObCnSjyyXGhnPb\nZceTmhDJx0t2ccPjX0tTkhAiYJpTFAZorR/VWpdorYu01g8DfQMdWGdnGAY/UhdgGibvZnzUYKfz\nIXZhGItpGFRU+3jnq20yDoMQIiCaUxQspVTvQy+c/oWaoywvmqlnTHem9ZxCXsV+vsr6+qjLJsVF\n8Mj1U+nRLZovVu7mtU834/Mf9f5BIYRoseYUhbuBpUqp95VS/8FuPronsGF1HecOOB0Dg493fE5O\nWd5Rl02MDef2nxxP/+6xLF6fw98+3EiNVwqDEKLtNFkUtNafYN+s9irwGjDWmSbaQKQ7kqtH/gyA\nNzb/G5/fd9TlYyI93PLjsQzpk8DqjHyeeu97GYtBCNFmmjXymtY6X2s9R2v9sdY6Tym1PtCBdSVj\nUkYwPm0MmSW7+Wp3w/cu1BUZ7ubmH45m7GD7zufL7v6UwtKqdohUCNHZtXY4zn5tGYSAS447n7iw\nWObsmMe+stwml/e4XVx/wQjCPS68Pov7/vEdO7JlWE8hxLFpbVGQS1/aWIwnmkvVhXgtH29saroZ\nCcBlmjz/22lcdd4ISsqq+etbq1m6IacdohVCdFatLQoiAEalDGdi+vFkle7hiyauRjrEMAxmnTyQ\nmy8Zjcdt8tInm5i9YBt+v9RtIUTLNTqCmlLKT8NnBEYj00UbuGTweegDW/l05xeM7DaUnjHNe1Ds\niAHJ3PXzcTz9/no+W57FVyv38MgNU4mTMRmEEC3Q6JmC1trUWrsa+DG11qE/qk0HFeWJ4rIhF+Oz\nfDz03dPU+Jp/S0j35Gju/vk4PG6TGp+fP726Ap1VGMBohRCdjTQfhaAR3YYS7grDZ/n4z7aWXf0b\nFeHhb787mUumD6SkrIaH/7WG/y7aKc1JQohmkaIQov564h/pEZ3ON3uXsir3+xataxoGZ03uy+0/\nPZ6k2HA+XLST6x77mt89uzhA0QohOgspCiEqzOXhqhE/JcwVxttb3iOvfH+LtzGoZzx/vGIiYwd3\nw+vzU1RWxbrtLd+OEKLraLSj+RCl1OGPtLCACmCz1npOQKISAKRHp3KpupB/bHqHVzf8k9+NuwGP\ny9OibcREerjxwpHc9NS3lFd6efLddZw2vheXTB+Exy3fCYQQ9TXnqDAIOAsocn5OA04GrlFKPRzA\n2AT2I7andp/A7oPZLe5fOMQwDJ79zTT+dMUEuidH8eXKPdz/xkqy95e1cbRCiI6uOUVBAdO11k9r\nrZ8GZgDdtNazgDMCGp0A7LudW9u/UFeftFjuuXwCJ4/pwe68g9z18nJuevIbGZ9BCFGrOUUhkfrN\nTGFATAvWF8cozBXGVSN+CsCrG99iT2l2q7cV7nHxizOHcP2sERhAWaWXR99ZS35RRRtFK4ToyJpz\nUH8WWKmUekQp9TjwHfA3pdRvgHUBjU7USo9O5eoR9tNUX1j3OiXVxzZe6/ghqTx6wwmMHpjM5sxC\n7nllBV+t2iOD9wjRxTXn0dlPAz8EsoGdwMVa6+eBOcAVgQ1P1DU2dSQzB5xBYVURf1/3RotubGtI\nYmw4v7p4FNfMHIbbZfDWFxlc9+hCfvvMojaKWAjR0TTn6iMDONH5cQGmUmqz1nprM9adBDyktZ5+\n2PSZ2AP1eIFXtdYvKaVM4HlgNFAFXK213tbCfDq9M/qeyr6yXFbmruWtLe/zi2E/OqbtGYbBlOHp\nDOubyD/nZbAqI5+ismpmz9/GuVP7ERXR5EdECNGJNKf56GHsDuV/YA+ycwrweFMrKaVuA14GIg6b\n7gGeAE7HvorpWqVUGjALiNBaTwFuBx5rfhpdh2EY/GTIJfSL68N3uauZl7mgTbYbHxPODReOJCbS\ng2kYfLYiizv/vpRvvs+Wu6GF6EKMpq48UUp9jz3amt957QbWa62HNrHeRdh9Dm9qrSfXmT4KeFhr\nfabz+glgCTAFWKG1fseZvldr3bOpBLxen+V2d71HMRVVFHPHlw9RUF7ILSf8kom9xrTZtqtqfHz4\n9Tbe/WorVdU+3C6DmEgPb957VpvtQwgRdEZDE5vTNuB2fqrrvG7yYf9a6/eVUv0amBUHFNd5XQrE\nNzDdp5Rya62POtZkYWF5U6E0KiUllvz8Y+uwDR6Ta4b/gr9+9ySPLn6R34y9jsGJA9ps66eO7sHY\nAcm8t3A7SzfmUHSwmnteWMwPTxlEWlJUm+2nro79fvyP5BFaOkMegcghJSW2wenNaT56C1iolLpJ\nKXUTMB/41zHEUgLUjSYW+6a4w6ebTRWErq53bA+uH30VLsPkhXWvs7t0b5tuPzE2nGtmDiMuKgy3\ny2DN1v3c9fJy3v4ig4MVx9bJLYQITc25+uhB4M9AH+xhOB/QWj9wDPvcDAxWSiUppcKAacBSYDFw\nNoBSajIg40A3w/BkxY2TL6fKV8Wza18mtzy/zffx5K9O5MVbpnP9rBEkx0Xw5ao9/Pqpb7nxiW8o\nr5S6LURn0qybz7TWc7XWt2qtf6e1nqOUer6lO1JKXaaUulZrXQP8Fvgcuxi8qrXeC3wAVCqllmB3\nRN/c0n10VSf0mcCP1CwO1pTxzJqXKKwsavN9GIbB+CGp3H/NJH78g8EAlFd5ufVvi3lv4XaKD1a1\n+T6FEO2vyY7mhiilSrTWcQGIp8Xy80tbfWlMZ2hrhP/l8dmu+Xy84zPSolL57fH/R0xYdMD2+bvn\nFtud0G5uUxavAAAgAElEQVSTkrJq3C4Tl2kQGebi8ZtObNU2O9v70dFJHqEjQH0Kre5obkiDGxPB\ndUbfUyivKeer3d9w5+L7efCEuwJWGB674QQAqmt8LN6Qw9xlmewvrqSqxseDb67ixFHdmTAklchw\nuc9BiI6ktc8ukgvXQ5BhGFww6JzaUdueXPMCxVUlAd1nmMfFKWN78pdfTiYm0oPHZbJ9bzGvz93C\nzc8s4rrHFvKbpxfJQ/eE6CAa/RqnlFpAwwd/A4gMWETimBiGwWPT/sz7Wz9mwZ5FPLH6b/xq7LUk\nRSQGdL8u0+TpX58EQEFxJUs27GPx+hzyiiqorqnmwTdXceakPowdnIJpyommEKHqaOf2f2qvIETb\nMgyDiwbPJNwVxmeZ83l8lV0YUqO6tcv+k+MjmHlCf86d2o+bn1lERbWP7dklPPfBBlITIymrqCHc\n4+JRpwlKCBE6Gi0KWuuv2zMQ0bYMw2DmwDPxuML4eMdnPLn6b9w09lq6R6e1awxP/so+e8jeX8bn\nK7JYujEHr8+irNLLCx9tYMKQNEYOSCLM0/XuShciFLXq6qNQIlcfNZ3Hgt2LeG/rfzEwuG3CTfSJ\n7dWO0dVXfLCKO/++jKoaf+1jusPDXFh+i8hwNw9dN6XDF4iu8rnqKDpDHu159ZEUhQ7+YYHm5bEk\newVvb3mfcFc4/zf6CgYl9G+n6BpmWRZZuQf5bkseKzbnsr+4ErAHARo5MJnxKoXZ87dhGAaPXD81\nqLG2VFf6XHUEnSGP9iwKMnJaFzG1x0SuGH4Z1f5qnl37MhsLdFDjMQyDvumxXDx9IA9dN4X46DCi\nItzEx4SxckseL3y0kQOlVZSUV7Nw7V4KS+XmOCHag5wpdPBvENCyPDbs38zLG97Eb1lcPvxSjk8d\nFeDomi8lJZa8vBL25JexSucxZ2kmvjqP7e6bFkteYTket4vHbpyKywzN7zRd8XMVyjpDHh3h5jXR\nQY3oNpQbRl/NC+te49UNb1Guyjmx5+SmV2wnhmHQOzWG3qkxLF6fg8/v56zJffl+2350VhE+v0VF\ntY9fPfUtqnciw/olMmdpJi7TkKuZhGgDcqbQwb9BQOvyyCrZw8Mrn8HC4pTeJ3LBwHNwmcHt4G0q\nj4oqL79/YSk1Xj/x0WHkFVXUzjMMmDQ0jSF9ExnWN5FuCcG7laYrf65CUWfIQzqaW0CKQuvzyC8v\n4IV1r5FTnsfQpOO4cvhPiPJ0nIPp/qIKNmUW8vYXGdT4/NT9KJuGgcdt8MNTBjGgRzy9UqPbrbmp\nq3+uQk1nyEOKQgtIUTi2PCq8lby+8W02FGwhLSqFX466nLSolDaOsHmOJQ/LssguKGdLZiGbdh1g\n7db99W7HD/OY+P0WbpfJz89Q9E2PJS0xKiB3V8vnKrR0hjykKLSAFIVjz8Nv+flo+1y+zPqaSHcE\nVw7/CcOSVRtG2Dxt+X74LYt9BeXs2FvM9uwStmcXsze/rN4y4R4XPr8fl2nyw1MG0jMlhl4p0fzx\n1e8AWn0prHyuQktnyEOKQgtIUWi7PJbvW8Ubm/8NwMwBZ3J63+mYRvtd4RPo9+N3zy3G57M4Z0pf\nMnNLycwtPaJQAJiG/Syn6WN70r1bFD2So3nxo42YZvPumZDPVWjpDHnI1UciKCZ1H0dadAovr/8n\nH+/4jF0lWfx86I+C2s/Qlh5r4OqkW55bjM9vcfH0gezNL2PP/oNs2nmAGp+fL1burresYcBDb62m\ne7doeiRH8cmSXZimyWM3TMUw5CF/onOQM4UO/g0C2j6P0uqDvLbxbXThNlIik7lm5M/pGdO9zbbf\nmFB5P259fgl+y+L6C0awb3852QVlfLVqDz6fhWVZRzw6OMxjkpYYRVpSFBt3FOB2m1x59lASY8NJ\niAnnvte/kzuzg6gz5CHNRy0gRSEwefj8Pj7ZOY95mQvwmB4uVRcyqfu4Nt3H4UL5/bj1+SUAPHDN\nJHIOlJO9v4w3P9f4/BbpSVHkFJZTXeNvdH3TNBjWL5GUhEhSEyL5dGkmpmlwz+UTiIv24DLN2n2E\nSvEI5fejJTpDHlIUWkCKQmDz+D5/Ay+tfwMLGJ82hh+rC4h0B6Y5qSO/H5ZlUXSwmntfW4EFnD6h\nN0Wl1RQerOL7bfvxWxaN/VMzgNgoD2WVXkzTYPqYnqQkRJCSEMnrc7dgmgaPXv+/Jqr2Kh4d+f2o\nqzPkIUWhBaQoBD6P/RUFvL7xX+wsySIpIpFfDPtxQB6o11nfj0MH8fuumkh+UQV5hRW8PncLfsti\n1MBkig9WU1RWTe6B8ka36XGbxEV5iIsOY09+GaYBJ43uQVS4m6gIDx8v3olpGPz2R2OIiw4jLtrD\n7S8sA+oXj4YKSmNFprO+Hx1RpygKSikTeB4YDVQBV2uttznz0oF36iw+Brhda/2CUmo1cGgMyZ1a\n6yuOth8pCu2Th8/vY+6uL/ls13wAzux3Kmf1O61N74Lu6u/HLc8txrLgxotGkl9UQX5RBZ8sycRv\nWfTsFk1JeTUlZdV4fU1/5A3nP6Zh0K97LNERHqIi3KzN2I9hwMwT+hMV4SYq3M2b8zSmYXD3L8YT\nE+khzOPi1ueX4HIZ/PWXU2q32dyCIs1gba+zXH00C4jQWk9RSk0GHgPOB9Ba5wDTAZRSU4AHgJeU\nUhGAobWeHsC4RCu4TBfnDjiDIUnH8Y9N7zB311fMy1zIHyb9Nmg3u3U2dZ/d1L97HADnTOlXbxnL\nsrjl+SVYlsWvLx5NeZWX8kovr8/djGXB1BHpFJdVU1xWzba9xfgti53ZpbVjVxwye8G2I/Z/i3Mw\nD/OYeL1+DMPggTdXEhHmJjLMxcGKGgwD5i7PJC4qjNgoD16f3Y+SX1RBmNvE4zaxLOuIq7GOpaB0\n1GLUUQWyKJwIfAagtV6mlBp/+AJKKQN4BviJ1trnLBOllJrnxHan1npZAGMULTQooT93TvwNdy56\ngGp/NX9Z8SQXDjqHk3pOkcsy24FhGA1eWjtOHVmYDx0kH/6/KVRW+yiv9HL/GyuxLLj8rCGUV9VQ\nUeXjP9/swLIsxgzuxsHyGkoratidaxeSXftK6z2pFuDdBduP2NfvX1h6xLRfPfUtUeFuIsJdFJdV\nYRgGz3+4gXC3SViYi7LKGgzD4LPlWUSEuQgPc1Fd4wNg/Y4CTMPANA1qvHbhydhdhGGAgT3NMGB3\n3kHcLgOXaeDzWxhAcVk1pmF37t/zygrcLoMHr52MaRgYhtGsgtKVC1Ygm49eBt7XWs91XmcBA7TW\n3jrLnAdcpLX+hfN6JDAZeBkYDMwFVN11Duf1+iy3u2OP1NVRLd29ipdW/ouD1WWMTh/K/034OUlR\nCcEOS7TQVffPA+CVu04/YtrLf5hBtddPRaWXXz++wG7eumQMRQerKD5YxbtfbQXLYsqoHlTX+Kiu\n8bMmIw/LsujeLZqyCi8VTvEJBbXFw4D46HDcLgO32yS/sALDgEG9EgjzuNiy64D9kMUR3XG77DOg\nb9bsAeDMKf0xDbtAf7pkJwCzTh5kb8tlMvvLDDDg52cPw3Ta8f4xZyMA114wCo+zvadnr8EAbvvZ\nhNr4Hn7Tvpv+D1dMwu2ced3z4hIMA564eToetwu3y+D6h77CMIx671krtHufwuPAMq31bOf1Hq11\nr8OWmQ08pbVe7LwOB0ytdYXzegV20ah/F1Ed0qcQ3DyKq0r455Z32VSgMTCI8kTy0Il/bNVZg7wf\noaWxDvPWfCu+5bnFANx9+QSqa3xU1fh49F9rAYtfnDWEqmoflTU+3nPOQs6a3Ae/38Lnt/jiO/uf\n/6nj7MOHZcH81fYB+oQR3fH5/Xh9Fiu25IIFowZ1w/Jb+C2LDTsPYAADe8bj81v4/H527bNzSk2M\nxOuz1y06WAWWfXZx+JlRqDIMeOiXU1r9ROBg9CksBmYCs50+hfUNLDMeWFLn9ZXASOB6pVQPIA7Y\nF8AYxTGKD4/j+lFXsih7Oe/o/1BWU84Tq//GJcfNondsj2CHJ9pQQ00fzZ126EtCfHRY7TSP236E\nytjB/2v6mrMkE6jfl7J4fQ4AF508sHba8k25APzk9ONqp23OLATg+lkjaqcd6jC/9dKx9aYBPHDN\n5COmPXL9VPx+i9v+tgTLgrsvH4/X68frt3jordUA/OriUbWXGD/7vn1Yu/a8YXh9dtF57dMtAFw2\nYzCWZfcDvfPlNiwsLjp5YG0h+njJLrAsZkzoXRvHoQI4/fieeL0WXp+fJRtywICRA5Lx+fzU+Pxs\nybSb0iIj2v4Q3h5XH43CPk25AjgeiNFa/10plQJ8obUeU2edMOB1oA9gAb/XWi85fNt1yZlC6ORR\nUFHIf7Z9zNr8DRgYnNRzCjMHnE6UJ6pZ64dKHsdK8ggtoZDHsfZHNOdKsJaS+xQaEAoflrYQanls\nLsjg3a0fkVuebzcpuSP560n3NPlwvVDLo7Ukj9DSGfJoz0tSQ3OQW9GhDU0+jjsn3sysgWdjYVHm\nLefxVc+zu3RvsEMTQjRBioIICLfpZkbf6Txwwh8YlzqanSVZPPTd08zO+JDymoqmNyCECAp5dLYI\nqITweK4c8ROmHpjI7IwP+XrPEr7Zs5RIdyR/OfEu3KZ8BIUIJXKmINrFkKTB3DnxZs4fcBYWFuXe\ncv609GG+3buUGn+jt6EIIdqZfE0T7cZtujm93ylM6j6OL7IWsmjvMt7RH/DZrvnM6Dud85N+EOwQ\nhejy5ExBtLv48DguHnwe9065gx/0nkZ5TTnvZnzEz977NfMyF1DhlT4HIYJFioIImvjwWC4cfC73\nTb2DCFc4YPHR9rnctfgvfLR9LsVVHfsyQiE6Imk+EkEXGxbDYyf/mah4Fx9+/yXzd3/LvMwFzMtc\nQLgrjDsm3ExKVHKwwxSiS5AzBREyosOiOL3fKdw39Q5+rC7ANEyqfNXcu+xhXtnwT7JK9wQ7RCE6\nPTlTECEnzOXhpJ5TmNp9Imvy1/NF5kJW561jdd46hiQO5rS+JzMkcbA8qluIAJCiIEKWy3QxPm0M\n41JHs+XAVuZlLWRL4Va2FG6lR3Q6p/Q+kQlpY/G4PMEOVYhOQ4qCCHmGYTA0+TiGJh9HZslu5u/+\nltV563hry3t8tH0uJ/Wcwkk9pxAfHhvsUIXo8KQoiA6lb1xvrhh+GbMGns3Xe5awKHs5c3d9ydxd\nXzI2ZSQn9pzMcYkDm3z4nhCiYVIURIeUGJHArEFnc1b/01i+bxXf7l3Kmvz1rMlfT2pkN07oOYnJ\n6eOJCYsOdqhCdChSFESHFu4KY1qvKZzUczI7SzJZtHc5q/K+54Ntc/hg2xyOTx3F1B4TUYmD5OxB\niGaQoiA6BcMwGBDfjwHx/bhw8Lncu/RhqnxVtVctJUUkMrX7BCZ1H0dSRGKwwxUiZElREJ1OjCea\nR6bdi2VZ7CzJYmn2Clbmfc8nO+fxyc55DE4YwIT0sYxNGUWUp3Xj2wrRWUlREJ2WffbQlwHxfblo\n8EzuXvJXqn3VbC3awdaiHczO+AgDg3BXGPdNvYNwV1jTGxWik5OiILqECHcEj0z7E2CPJb0ydw0r\nclaTU55Hjb+G3397L8OSFWNSRjAieaicQYguS4qC6HKSIxM5o9+pnN73FPYczGZt3nrW5G/ge+cH\n7Md8nz/wLEYkDyU1qluQIxai/UhREF2WYRj0ju1J79iezBx4JjlluazN38CnO7/E6/fy/taPeX/r\nx6RGdaO4qpQwl4cHT7hLrmISnVrAioJSygSeB0YDVcDVWuttdebfDFwN5DuTfglsPdo6QgRSenQa\nZ0ancWa/H1BUVczGgi1s3L+FzYVbqfZVU+Wr4s7F9zMmZSTHp45kYHx/XKYr2GEL0aYCeaYwC4jQ\nWk9RSk0GHgPOrzN/HPBzrfWqQxOUUhc2sY4Q7SIhPJ4TekzihB6TqPF7uWvxg9T4q7Esi2/3LuXb\nvUsxMPC4PPxs6A8ZkjhY+iFEpxDIonAi8BmA1nqZUmr8YfPHAXcopdKBOVrrvzRjHSHancd089BJ\n9wDg8/vYVrST1fnrWLx3OdW+al7Z8E9Mw6R/XB+yy3IId4fx5yl3SjOT6JAMy7ICsmGl1MvA+1rr\nuc7rLGCA1trrvP4j8BxQAnwA/A377KLRdRri9fost1tO4UX781t+dhbuZs2+jazZt4FtBbuwsP89\nxYfHMjJ9KGPShzE6fSjxEXFBjlaIIzT47PlAnimUAHUfW2nWKQgG8KTWuth5PQcYe7R1GlNYWN7q\nAFNSYsnP7/hDPkoewRNHEiennsTJqSdxsKaM+5Y9itdfg2XBoswVLMpcAYDLcDGjz8kMSx5Cv7je\nHaIvoiO+Hw3pDHkEIoeUlIafKhzIorAYmAnMdvoH1teZFwdsUEoNBcqAU4FXgcijrCNESIvxRPPw\nSX8kJSWWvLwSssty2FSg+WTnPLx+L59lzuezzPlEuiPx+b14TA93TPwNiREJwQ5diFqBLAofADOU\nUkuwT1OuUEpdBsRorf+ulLoTWIB9ldFXWutPnSuW6q0TwPiECBjDMOgZ052eMd2Z0Xc6ld4qMgq3\nsfGAZlOB5kBlIdX+Gu5a8iDdo9MYmnQcQ5OOY2BCf7mzWgRVwPoU2kt+fmmrE+gMp5UgeYSapvKw\nLIvc8jw2Hchg84EMthbuoMZfUzu/X1wfBiX0Z1BCfwbG9yPKE9UeYR+hq7wfHUGAmo/avU9BCNEA\nwzBIj04jPTqNU3ufRI2vhu3Fu3hp/RvU+L1kle5hV0kWX2Z9DYDLMJmUPp7+znOcUqO6yZVNImCk\nKAgRZB6XhyFJg3ns5D8DUOWrZmdxJtuKdvJF1kK8fi9L9q1gyT6709rAwG26Oaf/DAYlDKBPbM8O\n0XEtOgYpCkKEmHBXGEOSBjMkaTDnDjgdn99HdlkOO4sz2VGcxaq8tdT4a/hw+6cAhJke/Fi4DTdX\njriMfnF9iA5Sk5Po+KQoCBHiXKar9hlN03pN5fLhP6aoqpjtRTvZ5vxkl+Xgxcvz378KQGpUN4oq\nS3CbLq4ffRW9YrrjcXmCnInoCKQoCNEBJYTHMy5tDOPSxgBQVlPOrpIsdhZnsrM4i10lu6n2V1Pt\nh0dXPYtpmPSMTievYj8uw82NY66iR3S6FApxBCkKQnQC0Z4ohicPYXjyEMC+2zqnLI+s0j1kle4h\ns2QPew5m4/V7gWoeXvkMpmGSHpXK/ooDuE0X1426gl6xPeSS2C5OioIQnZBpmPSISadHTDqTu9uP\nELP7JnLZXbqXPQf32v8vzabaX0O1Hx5f/TwGBmnRqfSJ7clxaf2II4Ee0ekkhMdjGA1ewSg6GSkK\nQnQRdt9ED3rH9gAmAPYZRV55Plmle+2zihK7YOSU5bIiZ3XtuhGuCGr8NbhMF+f0n0H36DTSo1JJ\njEiQy2M7GSkKQnRhpmHW3jMxMf14wC4U+eX7OegqRu/bxb6yXLLLcskpy8Xn8/HBtjn1tuEyXIxP\nG0P36DTnJ52kiAQ5s+igpCgIIeoxDZO06FRGpAxkYMTg2ulev5f8igJyyvLIKctlnzNSnc/ysTxn\nVb1tGED/+L70iOlOz+ju9IhJJy0qhRhPtBSLECdFQQjRLG7TXXs2ACNrp/v8PvZXFLDPKRTzMhfg\ntXzsKtnNjuLMetuIcEWQGpVMSmQ3NhZoXKaLm8ZcQ1pUCmFyJVRIkKIghDgmLtNFWnQqadGpjGEk\nZ/U/DYAaXw055XlkH8xhdsaH+Cw/iRHxZJflklW6117ZB3/97kkMDJIjkyiuKsE0TM4feBbdIpPp\nFplEckQiblMOVe1F/tJCiIDwuDy1N91N6j6udrrf8lNcVUJueT455Xn1mqMOPRhwdsaH9bZlYjI4\ncQDdIpNJiUx2CkYyKZFJRLgj2jWvzk6KghCiXZmGSWJEAokRCQxJGlxv3sHqMvIrCthf+3OAlblr\n8Vk+dOE2dOG2I7YX44kmJTKZvQdzMA2TWYPOIjE8gaSIRJIiEqg/bpdoihQFIUTIiAmLJiYsmv7x\nfWqn/WzYDwGo9lWzv+IA+RUFvLl5Nn7Lz4D4vhRUHCCrdC8+ywfAO/qDets0MOgd25PkyCS6RSSR\nHJlEUkQiyREJJEYkys16h5GiIIToEMJcYbU35I1OubfePL/lp7CymMKqIg5UFnKgsojCykKW56zC\nb/nJPriPrNI9DW432hNFpbcK0zCZ2mMCieEJJIbHkxiRSEJ4PAnhcV3qKbRSFIQQHZ5pmCRHJpIc\nmQj0r51+6ZCLSEmJJTevmJLqUvZXHKCg4gAHKu3iYReRIspqyvFZPr7es+SIbRsYxIbFUFZT7hSO\niSSGxzsFI57EiATiw+PwdJLO8M6RhRBCHIVpmLUH8UEJ/Y+Yb1kWZd5y+2yjspDCqmIKK4soqiqm\nqKqYwiq7qNiFY3GD+4jxRJMYHk9OeT6mYXJ63+nEh8WREB5PfLj9/0h3RMjfpyFFQQjR5RmGQYwn\nmhhPtPMYkCNZlsXBmrL/FYrKYj7aPhe/5adfXG+KqorJKc+vvYLq4x2fH7GNMNNDQkQ8CWHx7CrZ\njWkYzBxwJgnhccQ7TVVxYbFBba6SoiCEEM1gGHYzUmxYDL1jewIwrdeUestYlkW5t8IpHCUUOwXk\ny6yv8VsWqVEpFFUVk1e+v3add7d+dMS+4sNiiXcKRUbhNlymiwsHnktCRHxt01WgLsWVoiCEEG3E\nMAyiPVFEe6LoGdO9dvrZ/WfUW67G76W4qoSiqmKncNi/L9q7DL9l4TE97DvsJr9/bnm33jZMDO6b\negeJEQltmkPAioJSygSeB0YDVcDVWuttdeZfCvwG8ALrgeu11n6l1GqgxFlsp9b6ikDFKIQQweAx\n3XSLTKJbZFK96RcNnln7u2VZVHgrKKoqwYqoITNvH4VVxRRVFvNd7moswGO2/aNBAnmmMAuI0FpP\nUUpNBh4DzgdQSkUC9wMjtdblSql/AecqpeYBhtZ6egDjEkKIkGcYBlGeKKI8UaSkxNLT3bt23k+G\nXhyw/QayKJwIfAagtV6mlBpfZ14VMFVrXV4njkrss4oopzi4gTu11suOtpPExCjc7tZ3yqSkdI67\nHSWP0CJ5hJbOkEd75RDIohAHFNd57VNKubXWXq21H8gFUErdBMQAXwAjgEeBl4HBwFyllNJaexvb\nSWFheWOzmpSSEkt+fmmr1w8VkkdokTxCS2fIIxA5NFZkAlkUSqj/0BGz7sHd6XN4GDgOuEhrbSml\nMoBtWmsLyFBKFQDdgd0BjFMIIYQjkOPoLQbOBnD6FNYfNv9FIAKYVacZ6UrsvgeUUj2wzzb2BTBG\nIYQQdQTyTOEDYIZSagn2QExXKKUuw24qWglcBXwLzFdKATwFvAK8rpRaBFjAlUdrOhJCCNG2AlYU\nnH6D6w6bvKXO742dpVwWmIiEEEI0JZDNR0IIIToYKQpCCCFqSVEQQghRy7AsK9gxCCGECBFypiCE\nEKKWFAUhhBC1pCgIIYSoJUVBCCFELSkKQgghaklREEIIUUuKghBCiFpdcozmpoYKDXVKqUnAQ1rr\n6UqpQcDr2A8Q3ADc4Dx3KqQppTzAq0A/IBx7JL5NdLBclFIu4CVAYcd9HfaAUa/TgfIAUEqlAquA\nGdjD5L5Ox8uh3nC+wAN0zDzuAM4DwrCPVV/TTnl01TOF2qFCgdtxHtfdESilbsMehCjCmfQ4cJfW\n+iTsp9GeH6zYWuinQIET95nAs3TMXGYCaK1PAO7CPgh1uDycIv0iUOFM6og5ROAM5+v8XEHHzGM6\nMBU4ATgZ6E075tFVi0K9oUKB8UdfPKRsBy6s83oc9rcIgLnAae0eUeu8C9zt/G5gfzPtcLlorT8E\nrnVe9gWK6IB5YI94+AKQ7bzuiDnUDuerlJrvjOPSEfM4A3v8mQ+Aj4FPaMc8umpRaHCo0GAF0xJa\n6/eBmjqTDGekOoBSIL79o2o5rfVBrXWpUioWeA/7W3ZHzcWrlPoH8AzwFh0sD6XU5UC+1vrzOpM7\nVA6OcuzidgZ2M16Hey8c3bC/qF7C//Iw2yuPrloUjjpUaAdTt10xFvubaoeglOoNLADe1Fq/TQfO\nRWv9C+yhZV8CIuvM6gh5XIk9INZCYAzwBpBaZ35HyAEgA/in1trSWmcABUBanfkdJY8C4HOtdbXW\nWmP3UdUtAgHNo6sWhaaGCu1I1jhtkABnYY9mF/KUUmnAPOD3WutXnckdLhel/r+9uwmtowrDOP4v\nQfGjLlTcKi7SBz/QVmyrLmps0VYUQXSj4sdCoaCgElfRRikGm6q0ihtxoejKLCwNQrGgYomUFrVV\nC+GBgi1UcVHRlYpQ6+Kd3F6TGwJJbLje57dJQm7OzLlJ5p05M+c5eri5KQh1pvo38FU39cP2Otu3\n2h4ADgOPAHu6qQ+NTsv57u3CfkwAmyQta/pxIfDp2epHVwyZ/AdmLBW6xPuzEIPAO5LOBSapoZhu\nMARcDGyRNHVv4WngzS7ry0fAu5L2AecAz1D73o2/k3bd+Hc1Yzlf4CRd1g/bH0taBxykTtyfpJ6k\nOiv9SHR2RES09OrwUUREdJCiEBERLSkKERHRkqIQEREtKQoREdGSohAxT5LWSBptPr9H0tbFbDNi\nKfTqPIWIxXA1zYxZ2+PA+GK2GbEUMk8h/teaWaBD1Gzjq6jZ6w/a/muW128CtlIT0X4AnrD9i6TX\nqEjpU8Bu4A3gO2A5NYv2R2DA9mOSjgEfAndTQX9D1GSwfmDQ9pika6mspOVUpMTrVLxEe5uvADuB\nDdRkrA9sjzZ92g70UTHK7zdfnwZ+BR6wfXJh71z0qgwfRS+4BXiKKgqXU4FpM0i6DNgGbLS9CvgE\nGJV0BXCn7eubtvqpPJphYNz2SIfmfrJ9DfANFc9+BxUXPhWJ8Tjwsu3VwG3AiO3fprW5mYpNvg5Y\nA9wn6a7m51cA65vMpReAzbZvpFI1b5jHexQBpChEbzhi+0SzKMkkcMksr1tLFY3PJR2mCkk/dRXw\nh8yvmzcAAAFoSURBVKQvgWepXPs/59jmnubjceCLJnDxOBXtAXXlcF6TmzRCXR1Mtx54z/Yp279T\naZkbmu/Z9lTS7ziwS9JbwKTtvXPsW8SsUhSiF7QfwE9TeVed9AETtlfaXgmsBu5vDuhrqfUfLgX2\nS1oxxzbbh6c6JfCOAfdSq80NzdLG9P/PZZy5Dzi1GA62dwADwFFgu6Tn59i3iFmlKESccQC4ue2A\nvwV4VdIqaoGTfbafow7kog72831Y43Zg2PZuanWtqaU929v8DHhUUp+kC4CHqKjxf5F0ALjI9k5g\nBxk+igVIUYho2P6ZStYck/Q9dXAdtH0I2A8cadYAPkYNDx0EbpK0bR6bewmYaNrb2LR55bQ23wZO\nAN8Ch6h7Dbs6tDVEpYN+Ta0C9+I89icCyNNHERHRJvMUoqdIOp866+9kuJlvENGzcqUQEREtuacQ\nEREtKQoREdGSohARES0pChER0ZKiEBERLf8AlC75LQiv0GMAAAAASUVORK5CYII=\n",
      "text/plain": [
       "<matplotlib.figure.Figure at 0x111352610>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "cvresult = pd.DataFrame.from_csv('1_nestimators.csv')\n",
    "        \n",
    "# plot\n",
    "test_means = cvresult['test-mlogloss-mean']\n",
    "test_stds = cvresult['test-mlogloss-std'] \n",
    "        \n",
    "train_means = cvresult['train-mlogloss-mean']\n",
    "train_stds = cvresult['train-mlogloss-std'] \n",
    "\n",
    "x_axis = range(0, cvresult.shape[0])\n",
    "        \n",
    "pyplot.errorbar(x_axis, test_means, yerr=test_stds ,label='Test')\n",
    "pyplot.errorbar(x_axis, train_means, yerr=train_stds ,label='Train')\n",
    "pyplot.title(\"XGBoost n_estimators vs Log Loss\")\n",
    "pyplot.xlabel( 'n_estimators' )\n",
    "pyplot.ylabel( 'Log Loss' )\n",
    "pyplot.savefig( 'n_estimators4_1.png' )\n",
    "\n",
    "pyplot.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 11,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#就1000的样本看，最佳n-estimators是50\n",
    "#调参数 depth"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "from sklearn.model_selection import GridSearchCV\n",
    "from sklearn.model_selection import StratifiedKFold"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "metadata": {},
   "outputs": [],
   "source": [
    "train_data = pd.read_csv(\"RentListingInquries_FE_train.csv\")\n",
    "train_data = train_data.tail(1000)\n",
    "y_train = train_data['interest_level']\n",
    "train_data = train_data.drop([ \"interest_level\"], axis=1)\n",
    "X_train = train_data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 14,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'max_depth': [3, 5, 7, 9], 'min_child_weight': [2, 4, 6, 8]}"
      ]
     },
     "execution_count": 14,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "max_depth = range(3,10,2)\n",
    "min_child_weight = range(2,10,2)\n",
    "param_test2_1 = dict(max_depth=max_depth, min_child_weight=min_child_weight)\n",
    "param_test2_1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Users/lihuixiang/anaconda3/envs/py27/lib/python2.7/site-packages/sklearn/model_selection/_search.py:667: DeprecationWarning: The grid_scores_ attribute was deprecated in version 0.18 in favor of the more elaborate cv_results_ attribute. The grid_scores_ attribute will not be available from 0.20\n",
      "  DeprecationWarning)\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "([mean: -0.69356, std: 0.01167, params: {'max_depth': 3, 'min_child_weight': 2},\n",
       "  mean: -0.69494, std: 0.00989, params: {'max_depth': 3, 'min_child_weight': 4},\n",
       "  mean: -0.69717, std: 0.00718, params: {'max_depth': 3, 'min_child_weight': 6},\n",
       "  mean: -0.69294, std: 0.01375, params: {'max_depth': 3, 'min_child_weight': 8},\n",
       "  mean: -0.70037, std: 0.01969, params: {'max_depth': 5, 'min_child_weight': 2},\n",
       "  mean: -0.69129, std: 0.01462, params: {'max_depth': 5, 'min_child_weight': 4},\n",
       "  mean: -0.68898, std: 0.00571, params: {'max_depth': 5, 'min_child_weight': 6},\n",
       "  mean: -0.68687, std: 0.00639, params: {'max_depth': 5, 'min_child_weight': 8},\n",
       "  mean: -0.69707, std: 0.02497, params: {'max_depth': 7, 'min_child_weight': 2},\n",
       "  mean: -0.69588, std: 0.01746, params: {'max_depth': 7, 'min_child_weight': 4},\n",
       "  mean: -0.69311, std: 0.01938, params: {'max_depth': 7, 'min_child_weight': 6},\n",
       "  mean: -0.69114, std: 0.00945, params: {'max_depth': 7, 'min_child_weight': 8},\n",
       "  mean: -0.69575, std: 0.02045, params: {'max_depth': 9, 'min_child_weight': 2},\n",
       "  mean: -0.70181, std: 0.01125, params: {'max_depth': 9, 'min_child_weight': 4},\n",
       "  mean: -0.68721, std: 0.01954, params: {'max_depth': 9, 'min_child_weight': 6},\n",
       "  mean: -0.69114, std: 0.00945, params: {'max_depth': 9, 'min_child_weight': 8}],\n",
       " {'max_depth': 5, 'min_child_weight': 8},\n",
       " -0.68686884910799562)"
      ]
     },
     "execution_count": 15,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "xgb2_1 = XGBClassifier(\n",
    "        learning_rate =0.1,\n",
    "        n_estimators=50,  #第一轮参数调整得到的n_estimators最优值\n",
    "        max_depth=5,\n",
    "        min_child_weight=1,\n",
    "        gamma=0,\n",
    "        subsample=0.3,\n",
    "        colsample_bytree=0.8,\n",
    "        colsample_bylevel = 0.7,\n",
    "        objective= 'multi:softprob',\n",
    "        seed=3)\n",
    "\n",
    "\n",
    "gsearch2_1 = GridSearchCV(xgb2_1, param_grid = param_test2_1, scoring='neg_log_loss',n_jobs=-1, cv=3)\n",
    "gsearch2_1.fit(X_train , y_train)\n",
    "\n",
    "gsearch2_1.grid_scores_, gsearch2_1.best_params_,     gsearch2_1.best_score_"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 16,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#目前样本量，最佳参数是 'max_depth': 5, 'min_child_weight': 8"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 17,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#调L1 L2"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 18,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'reg_alpha': [1.5, 2], 'reg_lambda': [0.5, 1, 2]}"
      ]
     },
     "execution_count": 18,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "reg_alpha = [ 1.5, 2]    #default = 0, 测试0.1,1，1.5，2\n",
    "reg_lambda = [0.5, 1, 2]      #default = 1，测试0.1， 0.5， 1，2\n",
    "\n",
    "param_test5_1 = dict(reg_alpha=reg_alpha, reg_lambda=reg_lambda)\n",
    "param_test5_1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Users/lihuixiang/anaconda3/envs/py27/lib/python2.7/site-packages/sklearn/model_selection/_search.py:667: DeprecationWarning: The grid_scores_ attribute was deprecated in version 0.18 in favor of the more elaborate cv_results_ attribute. The grid_scores_ attribute will not be available from 0.20\n",
      "  DeprecationWarning)\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "([mean: -0.68324, std: 0.01442, params: {'reg_alpha': 1.5, 'reg_lambda': 0.5},\n",
       "  mean: -0.68894, std: 0.01269, params: {'reg_alpha': 1.5, 'reg_lambda': 1},\n",
       "  mean: -0.68404, std: 0.00932, params: {'reg_alpha': 1.5, 'reg_lambda': 2},\n",
       "  mean: -0.68625, std: 0.01069, params: {'reg_alpha': 2, 'reg_lambda': 0.5},\n",
       "  mean: -0.68532, std: 0.01165, params: {'reg_alpha': 2, 'reg_lambda': 1},\n",
       "  mean: -0.68704, std: 0.01347, params: {'reg_alpha': 2, 'reg_lambda': 2}],\n",
       " {'reg_alpha': 1.5, 'reg_lambda': 0.5},\n",
       " -0.68323984869569543)"
      ]
     },
     "execution_count": 19,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "xgb5_1 = XGBClassifier(\n",
    "        learning_rate =0.1,\n",
    "        n_estimators=50,  #第二轮参数调整得到的n_estimators最优值\n",
    "        max_depth=5,\n",
    "        min_child_weight=8,\n",
    "        gamma=0,\n",
    "        subsample=0.7,\n",
    "        colsample_bytree=0.8,\n",
    "        colsample_bylevel = 0.7,\n",
    "        objective= 'multi:softprob',\n",
    "        seed=3)\n",
    "\n",
    "\n",
    "gsearch5_1 = GridSearchCV(xgb5_1, param_grid = param_test5_1, scoring='neg_log_loss',n_jobs=-1, cv=3)\n",
    "gsearch5_1.fit(X_train , y_train)\n",
    "\n",
    "gsearch5_1.grid_scores_, gsearch5_1.best_params_,     gsearch5_1.best_score_"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#目前样本量，最佳参数是 'reg_alpha': 1.5, 'reg_lambda': 0.5"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#调subsample-colsample"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'colsample_bytree': [0.6, 0.7, 0.8, 0.9],\n",
       " 'subsample': [0.3, 0.4, 0.5, 0.6, 0.7, 0.8]}"
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "subsample = [i/10.0 for i in range(3,9)]\n",
    "colsample_bytree = [i/10.0 for i in range(6,10)]\n",
    "param_test3_1 = dict(subsample=subsample, colsample_bytree=colsample_bytree)\n",
    "param_test3_1"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "/Users/lihuixiang/anaconda3/envs/py27/lib/python2.7/site-packages/sklearn/model_selection/_search.py:667: DeprecationWarning: The grid_scores_ attribute was deprecated in version 0.18 in favor of the more elaborate cv_results_ attribute. The grid_scores_ attribute will not be available from 0.20\n",
      "  DeprecationWarning)\n"
     ]
    },
    {
     "data": {
      "text/plain": [
       "([mean: -0.69135, std: 0.01231, params: {'subsample': 0.3, 'colsample_bytree': 0.6},\n",
       "  mean: -0.69358, std: 0.01469, params: {'subsample': 0.4, 'colsample_bytree': 0.6},\n",
       "  mean: -0.68970, std: 0.01225, params: {'subsample': 0.5, 'colsample_bytree': 0.6},\n",
       "  mean: -0.69124, std: 0.00597, params: {'subsample': 0.6, 'colsample_bytree': 0.6},\n",
       "  mean: -0.68843, std: 0.01446, params: {'subsample': 0.7, 'colsample_bytree': 0.6},\n",
       "  mean: -0.69074, std: 0.01909, params: {'subsample': 0.8, 'colsample_bytree': 0.6},\n",
       "  mean: -0.69343, std: 0.01239, params: {'subsample': 0.3, 'colsample_bytree': 0.7},\n",
       "  mean: -0.68429, std: 0.01272, params: {'subsample': 0.4, 'colsample_bytree': 0.7},\n",
       "  mean: -0.69105, std: 0.01349, params: {'subsample': 0.5, 'colsample_bytree': 0.7},\n",
       "  mean: -0.69077, std: 0.01397, params: {'subsample': 0.6, 'colsample_bytree': 0.7},\n",
       "  mean: -0.68793, std: 0.01153, params: {'subsample': 0.7, 'colsample_bytree': 0.7},\n",
       "  mean: -0.69306, std: 0.01456, params: {'subsample': 0.8, 'colsample_bytree': 0.7},\n",
       "  mean: -0.68687, std: 0.00639, params: {'subsample': 0.3, 'colsample_bytree': 0.8},\n",
       "  mean: -0.68366, std: 0.00541, params: {'subsample': 0.4, 'colsample_bytree': 0.8},\n",
       "  mean: -0.68638, std: 0.01099, params: {'subsample': 0.5, 'colsample_bytree': 0.8},\n",
       "  mean: -0.68235, std: 0.01684, params: {'subsample': 0.6, 'colsample_bytree': 0.8},\n",
       "  mean: -0.68874, std: 0.01424, params: {'subsample': 0.7, 'colsample_bytree': 0.8},\n",
       "  mean: -0.68741, std: 0.01676, params: {'subsample': 0.8, 'colsample_bytree': 0.8},\n",
       "  mean: -0.68077, std: 0.00166, params: {'subsample': 0.3, 'colsample_bytree': 0.9},\n",
       "  mean: -0.68829, std: 0.00925, params: {'subsample': 0.4, 'colsample_bytree': 0.9},\n",
       "  mean: -0.68444, std: 0.01562, params: {'subsample': 0.5, 'colsample_bytree': 0.9},\n",
       "  mean: -0.68725, std: 0.01255, params: {'subsample': 0.6, 'colsample_bytree': 0.9},\n",
       "  mean: -0.68406, std: 0.00887, params: {'subsample': 0.7, 'colsample_bytree': 0.9},\n",
       "  mean: -0.68613, std: 0.01734, params: {'subsample': 0.8, 'colsample_bytree': 0.9}],\n",
       " {'colsample_bytree': 0.9, 'subsample': 0.3},\n",
       " -0.68076797717809678)"
      ]
     },
     "execution_count": 23,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "xgb3_1 = XGBClassifier(\n",
    "        learning_rate =0.1,\n",
    "        n_estimators=50,  #第二轮参数调整得到的n_estimators最优值\n",
    "        max_depth=5,\n",
    "        min_child_weight=8,\n",
    "        gamma=0,\n",
    "        subsample=0.3,\n",
    "        colsample_bytree=0.8,\n",
    "        colsample_bylevel = 0.7,\n",
    "        objective= 'multi:softprob',\n",
    "        seed=3)\n",
    "\n",
    "\n",
    "gsearch3_1 = GridSearchCV(xgb3_1, param_grid = param_test3_1, scoring='neg_log_loss',n_jobs=-1, cv=3)\n",
    "gsearch3_1.fit(X_train , y_train)\n",
    "\n",
    "gsearch3_1.grid_scores_, gsearch3_1.best_params_,     gsearch3_1.best_score_"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#就目前样本量看，最佳参数 'colsample_bytree': 0.9, 'subsample': 0.3"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "metadata": {},
   "outputs": [],
   "source": [
    "def modelfit(alg, X_train, y_train, useTrainCV=True, cv_folds=3, early_stopping_rounds=100):\n",
    "    \n",
    "    if useTrainCV:\n",
    "        xgb_param = alg.get_xgb_params()\n",
    "        xgb_param['num_class'] = 3\n",
    "        \n",
    "        xgtrain = xgb.DMatrix(X_train, label = y_train)\n",
    "        \n",
    "        cvresult = xgb.cv(xgb_param, xgtrain, num_boost_round=alg.get_params()['n_estimators'], folds =cv_folds,\n",
    "                         metrics='mlogloss', early_stopping_rounds=early_stopping_rounds)\n",
    "        \n",
    "        n_estimators = cvresult.shape[0]\n",
    "        alg.set_params(n_estimators = n_estimators)\n",
    "        \n",
    "        print (cvresult)\n",
    "        cvresult.to_csv('my_preds4_2_3_699.csv', index_label = 'n_estimators')\n",
    "        \n",
    "        \n",
    "    \n",
    "    #Fit the algorithm on the data\n",
    "    alg.fit(X_train, y_train, eval_metric='mlogloss')\n",
    "        \n",
    "    #Predict training set:\n",
    "    train_predprob = alg.predict_proba(X_train)\n",
    "    logloss = log_loss(y_train, train_predprob)\n",
    "\n",
    "        \n",
    "    #Print model report:\n",
    "    print (\"logloss of train :\" )\n",
    "    print (logloss)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 26,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "     test-mlogloss-mean  test-mlogloss-std  train-mlogloss-mean  \\\n",
      "0              1.088776           0.000425             1.088231   \n",
      "1              1.079556           0.000972             1.078168   \n",
      "2              1.070156           0.001429             1.068219   \n",
      "3              1.060966           0.001719             1.058253   \n",
      "4              1.052368           0.002028             1.049217   \n",
      "5              1.043623           0.001792             1.040060   \n",
      "6              1.035394           0.002010             1.031331   \n",
      "7              1.027033           0.002556             1.022462   \n",
      "8              1.019095           0.003193             1.014082   \n",
      "9              1.011131           0.003076             1.005565   \n",
      "10             1.003988           0.003516             0.997695   \n",
      "11             0.996483           0.003115             0.989869   \n",
      "12             0.989228           0.002981             0.982157   \n",
      "13             0.982530           0.002571             0.974889   \n",
      "14             0.976246           0.002712             0.967481   \n",
      "15             0.969812           0.002747             0.960275   \n",
      "16             0.963182           0.002600             0.953017   \n",
      "17             0.956435           0.002546             0.945834   \n",
      "18             0.950274           0.002610             0.939222   \n",
      "19             0.944162           0.002817             0.932693   \n",
      "20             0.938189           0.003095             0.926253   \n",
      "21             0.932408           0.003376             0.919931   \n",
      "22             0.926678           0.003651             0.913712   \n",
      "23             0.921245           0.003824             0.907542   \n",
      "24             0.916192           0.004279             0.901998   \n",
      "25             0.911142           0.004226             0.896407   \n",
      "26             0.905626           0.004315             0.890587   \n",
      "27             0.900547           0.004246             0.885023   \n",
      "28             0.896043           0.004266             0.879867   \n",
      "29             0.891316           0.004062             0.874636   \n",
      "..                  ...                ...                  ...   \n",
      "170            0.692345           0.005821             0.610309   \n",
      "171            0.692155           0.005881             0.609653   \n",
      "172            0.691949           0.005622             0.608934   \n",
      "173            0.691681           0.005809             0.608318   \n",
      "174            0.691417           0.005970             0.607753   \n",
      "175            0.691158           0.006130             0.607140   \n",
      "176            0.690987           0.006161             0.606424   \n",
      "177            0.690659           0.006385             0.605812   \n",
      "178            0.690381           0.006555             0.605207   \n",
      "179            0.690048           0.006678             0.604617   \n",
      "180            0.689964           0.006674             0.603935   \n",
      "181            0.689525           0.006627             0.603315   \n",
      "182            0.689095           0.006560             0.602663   \n",
      "183            0.689007           0.006674             0.602110   \n",
      "184            0.688718           0.006860             0.601502   \n",
      "185            0.688428           0.006808             0.601004   \n",
      "186            0.688389           0.006892             0.600474   \n",
      "187            0.688131           0.007065             0.599839   \n",
      "188            0.687902           0.007057             0.599338   \n",
      "189            0.687568           0.007254             0.598761   \n",
      "190            0.687367           0.007168             0.598185   \n",
      "191            0.687014           0.007243             0.597625   \n",
      "192            0.686814           0.007185             0.596991   \n",
      "193            0.686734           0.007166             0.596390   \n",
      "194            0.686634           0.007174             0.595827   \n",
      "195            0.686492           0.007444             0.595337   \n",
      "196            0.686335           0.007666             0.594846   \n",
      "197            0.686234           0.007753             0.594354   \n",
      "198            0.686028           0.007895             0.593880   \n",
      "199            0.685873           0.007721             0.593319   \n",
      "\n",
      "     train-mlogloss-std  \n",
      "0              0.000663  \n",
      "1              0.001032  \n",
      "2              0.001476  \n",
      "3              0.001776  \n",
      "4              0.001946  \n",
      "5              0.001734  \n",
      "6              0.001800  \n",
      "7              0.002031  \n",
      "8              0.002352  \n",
      "9              0.002356  \n",
      "10             0.002257  \n",
      "11             0.002018  \n",
      "12             0.001717  \n",
      "13             0.001688  \n",
      "14             0.001943  \n",
      "15             0.001943  \n",
      "16             0.001833  \n",
      "17             0.001969  \n",
      "18             0.002144  \n",
      "19             0.002035  \n",
      "20             0.002330  \n",
      "21             0.002886  \n",
      "22             0.003113  \n",
      "23             0.003198  \n",
      "24             0.003585  \n",
      "25             0.003386  \n",
      "26             0.003400  \n",
      "27             0.003673  \n",
      "28             0.003838  \n",
      "29             0.003543  \n",
      "..                  ...  \n",
      "170            0.007886  \n",
      "171            0.007958  \n",
      "172            0.008092  \n",
      "173            0.008135  \n",
      "174            0.007960  \n",
      "175            0.007938  \n",
      "176            0.007954  \n",
      "177            0.007950  \n",
      "178            0.007986  \n",
      "179            0.008034  \n",
      "180            0.007999  \n",
      "181            0.008069  \n",
      "182            0.008122  \n",
      "183            0.008033  \n",
      "184            0.008155  \n",
      "185            0.008134  \n",
      "186            0.008186  \n",
      "187            0.008202  \n",
      "188            0.008223  \n",
      "189            0.008129  \n",
      "190            0.008225  \n",
      "191            0.008253  \n",
      "192            0.008375  \n",
      "193            0.008385  \n",
      "194            0.008436  \n",
      "195            0.008509  \n",
      "196            0.008384  \n",
      "197            0.008418  \n",
      "198            0.008530  \n",
      "199            0.008466  \n",
      "\n",
      "[200 rows x 4 columns]\n",
      "logloss of train :\n",
      "0.577460527264\n"
     ]
    }
   ],
   "source": [
    "#params = {\"objective\": \"multi:softprob\", \"eval_metric\":\"mlogloss\", \"num_class\": 9}\n",
    "xgb44 = XGBClassifier(\n",
    "        learning_rate =0.02,\n",
    "        n_estimators=200,  #数值大没关系，cv会自动返回合适的n_estimators\n",
    "        max_depth=5,\n",
    "        min_child_weight=8,\n",
    "        gamma=0,\n",
    "        subsample = 0.3,\n",
    "        colsample_bytree=0.9,\n",
    "        colsample_bylevel=0.7,\n",
    "        reg_alpha = 0,\n",
    "        reg_lambda = 3,\n",
    "        objective= 'multi:softprob',\n",
    "        seed=3)\n",
    "\n",
    "modelfit(xgb44, X_train, y_train)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 27,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "{'base_score': 0.5,\n",
       " 'booster': 'gbtree',\n",
       " 'colsample_bylevel': 0.7,\n",
       " 'colsample_bytree': 0.9,\n",
       " 'gamma': 0,\n",
       " 'learning_rate': 0.02,\n",
       " 'max_delta_step': 0,\n",
       " 'max_depth': 5,\n",
       " 'min_child_weight': 8,\n",
       " 'missing': None,\n",
       " 'n_estimators': 200,\n",
       " 'nthread': 1,\n",
       " 'objective': 'multi:softprob',\n",
       " 'reg_alpha': 0,\n",
       " 'reg_lambda': 3,\n",
       " 'scale_pos_weight': 1,\n",
       " 'seed': 3,\n",
       " 'silent': 1,\n",
       " 'subsample': 0.3}"
      ]
     },
     "execution_count": 27,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "xgb44.get_xgb_params()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "#【以下为看答案写的】"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 28,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "('logloss of train is:', 0.57746052726358177)\n"
     ]
    }
   ],
   "source": [
    "#保存\n",
    "import pandas as pd\n",
    "import cPickle\n",
    "\n",
    "cPickle.dump(xgb44, open(\"xgb_model.pkl\", 'wb'))\n",
    "\n",
    "xgb = cPickle.load(open(\"xgb_model.pkl\", 'rb'))\n",
    "\n",
    "train_predprob = xgb.predict_proba(X_train)\n",
    "logloss = log_loss(y_train, train_predprob)\n",
    "\n",
    "#Print model report:\n",
    "print ('logloss of train is:', logloss)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 29,
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/html": [
       "<div>\n",
       "<style>\n",
       "    .dataframe thead tr:only-child th {\n",
       "        text-align: right;\n",
       "    }\n",
       "\n",
       "    .dataframe thead th {\n",
       "        text-align: left;\n",
       "    }\n",
       "\n",
       "    .dataframe tbody tr th {\n",
       "        vertical-align: top;\n",
       "    }\n",
       "</style>\n",
       "<table border=\"1\" class=\"dataframe\">\n",
       "  <thead>\n",
       "    <tr style=\"text-align: right;\">\n",
       "      <th></th>\n",
       "      <th>bathrooms</th>\n",
       "      <th>bedrooms</th>\n",
       "      <th>price</th>\n",
       "      <th>price_bathrooms</th>\n",
       "      <th>price_bedrooms</th>\n",
       "      <th>room_diff</th>\n",
       "      <th>room_num</th>\n",
       "      <th>Year</th>\n",
       "      <th>Month</th>\n",
       "      <th>Day</th>\n",
       "      <th>...</th>\n",
       "      <th>virtual</th>\n",
       "      <th>walk</th>\n",
       "      <th>walls</th>\n",
       "      <th>war</th>\n",
       "      <th>washer</th>\n",
       "      <th>water</th>\n",
       "      <th>wheelchair</th>\n",
       "      <th>wifi</th>\n",
       "      <th>windows</th>\n",
       "      <th>work</th>\n",
       "    </tr>\n",
       "  </thead>\n",
       "  <tbody>\n",
       "    <tr>\n",
       "      <th>0</th>\n",
       "      <td>1.0</td>\n",
       "      <td>1</td>\n",
       "      <td>2950</td>\n",
       "      <td>1475.000000</td>\n",
       "      <td>1475.000000</td>\n",
       "      <td>0.0</td>\n",
       "      <td>2.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>6</td>\n",
       "      <td>11</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>1</th>\n",
       "      <td>1.0</td>\n",
       "      <td>2</td>\n",
       "      <td>2850</td>\n",
       "      <td>1425.000000</td>\n",
       "      <td>950.000000</td>\n",
       "      <td>-1.0</td>\n",
       "      <td>3.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>6</td>\n",
       "      <td>24</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>2</th>\n",
       "      <td>1.0</td>\n",
       "      <td>1</td>\n",
       "      <td>3758</td>\n",
       "      <td>1879.000000</td>\n",
       "      <td>1879.000000</td>\n",
       "      <td>0.0</td>\n",
       "      <td>2.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>6</td>\n",
       "      <td>3</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>3</th>\n",
       "      <td>1.0</td>\n",
       "      <td>2</td>\n",
       "      <td>3300</td>\n",
       "      <td>1650.000000</td>\n",
       "      <td>1100.000000</td>\n",
       "      <td>-1.0</td>\n",
       "      <td>3.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>6</td>\n",
       "      <td>11</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "    <tr>\n",
       "      <th>4</th>\n",
       "      <td>2.0</td>\n",
       "      <td>2</td>\n",
       "      <td>4900</td>\n",
       "      <td>1633.333333</td>\n",
       "      <td>1633.333333</td>\n",
       "      <td>0.0</td>\n",
       "      <td>4.0</td>\n",
       "      <td>2016</td>\n",
       "      <td>4</td>\n",
       "      <td>12</td>\n",
       "      <td>...</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>1</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "      <td>0</td>\n",
       "    </tr>\n",
       "  </tbody>\n",
       "</table>\n",
       "<p>5 rows × 227 columns</p>\n",
       "</div>"
      ],
      "text/plain": [
       "   bathrooms  bedrooms  price  price_bathrooms  price_bedrooms  room_diff  \\\n",
       "0        1.0         1   2950      1475.000000     1475.000000        0.0   \n",
       "1        1.0         2   2850      1425.000000      950.000000       -1.0   \n",
       "2        1.0         1   3758      1879.000000     1879.000000        0.0   \n",
       "3        1.0         2   3300      1650.000000     1100.000000       -1.0   \n",
       "4        2.0         2   4900      1633.333333     1633.333333        0.0   \n",
       "\n",
       "   room_num  Year  Month  Day  ...   virtual  walk  walls  war  washer  water  \\\n",
       "0       2.0  2016      6   11  ...         0     0      0    0       0      0   \n",
       "1       3.0  2016      6   24  ...         0     0      0    1       0      0   \n",
       "2       2.0  2016      6    3  ...         0     0      0    0       0      0   \n",
       "3       3.0  2016      6   11  ...         0     0      0    0       0      0   \n",
       "4       4.0  2016      4   12  ...         0     0      0    1       0      0   \n",
       "\n",
       "   wheelchair  wifi  windows  work  \n",
       "0           0     0        0     0  \n",
       "1           0     0        0     0  \n",
       "2           0     0        0     0  \n",
       "3           1     0        0     0  \n",
       "4           0     0        0     0  \n",
       "\n",
       "[5 rows x 227 columns]"
      ]
     },
     "execution_count": 29,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "test_data = pd.read_csv(\"RentListingInquries_FE_test.csv\")\n",
    "test_data.head()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 30,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<class 'pandas.core.frame.DataFrame'>\n",
      "RangeIndex: 74659 entries, 0 to 74658\n",
      "Columns: 227 entries, bathrooms to work\n",
      "dtypes: float64(9), int64(218)\n",
      "memory usage: 129.3 MB\n"
     ]
    }
   ],
   "source": [
    "test_data.info()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "metadata": {},
   "outputs": [],
   "source": [
    "test_data = test_data.tail(1000)\n",
    "X_test  = test_data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 32,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": [
    "xgb = cPickle.load(open(\"xgb_model.pkl\", 'rb'))"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 34,
   "metadata": {},
   "outputs": [],
   "source": [
    "y_test_pred = xgb.predict_proba(X_test)\n",
    "\n",
    "out_df1 = pd.DataFrame(y_test_pred)\n",
    "out_df1.columns = [\"high\", \"medium\", \"low\"]\n",
    "\n",
    "out_df1.to_csv(\"xgb_Rent11.csv\", index=False)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {
    "collapsed": true
   },
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 2",
   "language": "python",
   "name": "python2"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
