{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "from xgboost import XGBClassifier\n",
    "import xgboost as xgb\n",
    "\n",
    "import pandas as pd \n",
    "import numpy as np\n",
    "\n",
    "from sklearn.model_selection import GridSearchCV\n",
    "from sklearn.model_selection import StratifiedKFold"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {},
   "outputs": [],
   "source": [
    "# read in data，数据在xgboost安装的路径下的demo目录,现在我们将其copy到当前代码下的data目录\n",
    "dpath = './data/'\n",
    "train = pd.read_csv(dpath + \"RentListingInquries_FE_train.csv\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 3,
   "metadata": {},
   "outputs": [],
   "source": [
    "y_train = train[\"interest_level\"]\n",
    "X_train = train.drop(\"interest_level\", axis = 1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "# prepare cross validation\n",
    "kfold = StratifiedKFold(n_splits=5, shuffle=True, random_state=3)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "metadata": {},
   "outputs": [],
   "source": [
    "def modelfit(alg, X_train, y_train, useTrainCV=True, cv_folds=None, early_stopping_rounds=50):\n",
    "    \n",
    "    if useTrainCV:\n",
    "        xgb_param = alg.get_xgb_params()\n",
    "        xgb_param['num_class'] = 9\n",
    "        \n",
    "        xgtrain = xgb.DMatrix(X_train, label = y_train)\n",
    "        \n",
    "        cvresult = xgb.cv(xgb_param, xgtrain, num_boost_round=alg.get_params()['n_estimators'], folds =cv_folds,\n",
    "                         metrics='mlogloss', early_stopping_rounds=early_stopping_rounds)\n",
    "        \n",
    "        n_estimators = cvresult.shape[0]\n",
    "        alg.set_params(n_estimators = n_estimators)\n",
    "        \n",
    "        print(cvresult) \n",
    "        #result = pd.DataFrame(cvresult)   #cv缺省返回结果为DataFrame\n",
    "        #result.to_csv('my_preds.csv', index_label = 'n_estimators')\n",
    "        cvresult.to_csv('my_preds4_2_3_699.csv', index_label = 'n_estimators')\n",
    "        \n",
    "        # plot\n",
    "        test_means = cvresult['test-mlogloss-mean']\n",
    "        test_stds = cvresult['test-mlogloss-std'] \n",
    "        \n",
    "        train_means = cvresult['train-mlogloss-mean']\n",
    "        train_stds = cvresult['train-mlogloss-std'] \n",
    "\n",
    "        x_axis = range(0, n_estimators)\n",
    "        pyplot.errorbar(x_axis, test_means, yerr=test_stds ,label='Test')\n",
    "        pyplot.errorbar(x_axis, train_means, yerr=train_stds ,label='Train')\n",
    "        pyplot.title(\"XGBoost n_estimators vs Log Loss\")\n",
    "        pyplot.xlabel( 'n_estimators' )\n",
    "        pyplot.ylabel( 'Log Loss' )\n",
    "        pyplot.savefig( 'n_estimators4_2_3_699.png' )\n",
    "    \n",
    "    #Fit the algorithm on the data\n",
    "    alg.fit(X_train, y_train, eval_metric='mlogloss')\n",
    "        \n",
    "    #Predict training set:\n",
    "    train_predprob = alg.predict_proba(X_train)\n",
    "    logloss = log_loss(y_train, train_predprob)\n",
    "\n",
    "        \n",
    "    #Print model report:\n",
    "    print (\"logloss of train :\" )\n",
    "    print (logloss)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 9,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "     test-mlogloss-mean  test-mlogloss-std  train-mlogloss-mean  \\\n",
      "0              1.956905           0.002108             1.955550   \n",
      "1              1.783103           0.002314             1.780366   \n",
      "2              1.647337           0.002711             1.643171   \n",
      "3              1.535061           0.002034             1.529705   \n",
      "4              1.441140           0.002101             1.434535   \n",
      "5              1.361072           0.001695             1.353339   \n",
      "6              1.291905           0.002134             1.282833   \n",
      "7              1.230470           0.002368             1.220421   \n",
      "8              1.176693           0.002464             1.165596   \n",
      "9              1.127639           0.002650             1.115523   \n",
      "10             1.084953           0.002251             1.071945   \n",
      "11             1.046696           0.002425             1.032704   \n",
      "12             1.012287           0.002613             0.997453   \n",
      "13             0.980969           0.002816             0.965417   \n",
      "14             0.952929           0.002697             0.936548   \n",
      "15             0.927170           0.002578             0.910142   \n",
      "16             0.903855           0.002403             0.886091   \n",
      "17             0.882359           0.002407             0.863983   \n",
      "18             0.863032           0.002489             0.843936   \n",
      "19             0.844974           0.002433             0.825183   \n",
      "20             0.829080           0.002498             0.808416   \n",
      "21             0.814143           0.002218             0.792718   \n",
      "22             0.800196           0.002165             0.778173   \n",
      "23             0.787490           0.002375             0.764778   \n",
      "24             0.775846           0.002422             0.752477   \n",
      "25             0.764887           0.002403             0.740891   \n",
      "26             0.754845           0.002229             0.730178   \n",
      "27             0.745553           0.002088             0.720304   \n",
      "28             0.736744           0.002001             0.710790   \n",
      "29             0.728850           0.001940             0.702341   \n",
      "..                  ...                ...                  ...   \n",
      "175            0.593333           0.003000             0.491279   \n",
      "176            0.593357           0.003022             0.490926   \n",
      "177            0.593310           0.002961             0.490483   \n",
      "178            0.593290           0.002947             0.489954   \n",
      "179            0.593325           0.002940             0.489549   \n",
      "180            0.593383           0.003025             0.489061   \n",
      "181            0.593407           0.003014             0.488622   \n",
      "182            0.593329           0.002908             0.488025   \n",
      "183            0.593286           0.002896             0.487646   \n",
      "184            0.593262           0.002938             0.487084   \n",
      "185            0.593190           0.002931             0.486649   \n",
      "186            0.593292           0.003033             0.486150   \n",
      "187            0.593189           0.002975             0.485666   \n",
      "188            0.593173           0.002884             0.485252   \n",
      "189            0.593213           0.002933             0.484815   \n",
      "190            0.593191           0.002877             0.484250   \n",
      "191            0.593144           0.002834             0.483815   \n",
      "192            0.593107           0.002734             0.483427   \n",
      "193            0.593070           0.002720             0.482928   \n",
      "194            0.593067           0.002664             0.482573   \n",
      "195            0.593074           0.002628             0.482024   \n",
      "196            0.593060           0.002646             0.481654   \n",
      "197            0.593018           0.002716             0.481177   \n",
      "198            0.592989           0.002775             0.480792   \n",
      "199            0.593014           0.002730             0.480401   \n",
      "200            0.592961           0.002737             0.480039   \n",
      "201            0.592891           0.002748             0.479577   \n",
      "202            0.592840           0.002732             0.479104   \n",
      "203            0.592788           0.002707             0.478674   \n",
      "204            0.592764           0.002720             0.478296   \n",
      "\n",
      "     train-mlogloss-std  \n",
      "0              0.001749  \n",
      "1              0.002457  \n",
      "2              0.003100  \n",
      "3              0.002450  \n",
      "4              0.002215  \n",
      "5              0.001711  \n",
      "6              0.002142  \n",
      "7              0.002220  \n",
      "8              0.002199  \n",
      "9              0.002302  \n",
      "10             0.002067  \n",
      "11             0.001703  \n",
      "12             0.001853  \n",
      "13             0.001945  \n",
      "14             0.001745  \n",
      "15             0.001659  \n",
      "16             0.001548  \n",
      "17             0.001514  \n",
      "18             0.001481  \n",
      "19             0.001364  \n",
      "20             0.001409  \n",
      "21             0.001545  \n",
      "22             0.001507  \n",
      "23             0.001529  \n",
      "24             0.001425  \n",
      "25             0.001427  \n",
      "26             0.001531  \n",
      "27             0.001653  \n",
      "28             0.001784  \n",
      "29             0.001957  \n",
      "..                  ...  \n",
      "175            0.001083  \n",
      "176            0.001121  \n",
      "177            0.001123  \n",
      "178            0.001087  \n",
      "179            0.001022  \n",
      "180            0.001079  \n",
      "181            0.001011  \n",
      "182            0.001077  \n",
      "183            0.000991  \n",
      "184            0.000978  \n",
      "185            0.000915  \n",
      "186            0.000993  \n",
      "187            0.000987  \n",
      "188            0.000992  \n",
      "189            0.001036  \n",
      "190            0.001065  \n",
      "191            0.001044  \n",
      "192            0.001098  \n",
      "193            0.001116  \n",
      "194            0.001113  \n",
      "195            0.001153  \n",
      "196            0.001097  \n",
      "197            0.001129  \n",
      "198            0.001118  \n",
      "199            0.001102  \n",
      "200            0.001031  \n",
      "201            0.000944  \n",
      "202            0.001064  \n",
      "203            0.000964  \n",
      "204            0.000983  \n",
      "\n",
      "[205 rows x 4 columns]\n"
     ]
    },
    {
     "ename": "NameError",
     "evalue": "name 'pyplot' is not defined",
     "output_type": "error",
     "traceback": [
      "\u001b[0;31m-----------------------------------------------------------\u001b[0m",
      "\u001b[0;31mNameError\u001b[0m                 Traceback (most recent call last)",
      "\u001b[0;32m<ipython-input-9-f49079fba596>\u001b[0m in \u001b[0;36m<module>\u001b[0;34m()\u001b[0m\n\u001b[1;32m     12\u001b[0m         seed=3)\n\u001b[1;32m     13\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 14\u001b[0;31m \u001b[0mmodelfit\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mxgb2_3\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mX_train\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my_train\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcv_folds\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mkfold\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
      "\u001b[0;32m<ipython-input-8-812933f52223>\u001b[0m in \u001b[0;36mmodelfit\u001b[0;34m(alg, X_train, y_train, useTrainCV, cv_folds, early_stopping_rounds)\u001b[0m\n\u001b[1;32m     26\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     27\u001b[0m         \u001b[0mx_axis\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mn_estimators\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 28\u001b[0;31m         \u001b[0mpyplot\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0merrorbar\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_axis\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtest_means\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0myerr\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtest_stds\u001b[0m \u001b[0;34m,\u001b[0m\u001b[0mlabel\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'Test'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m     29\u001b[0m         \u001b[0mpyplot\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0merrorbar\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mx_axis\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtrain_means\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0myerr\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtrain_stds\u001b[0m \u001b[0;34m,\u001b[0m\u001b[0mlabel\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'Train'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m     30\u001b[0m         \u001b[0mpyplot\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtitle\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"XGBoost n_estimators vs Log Loss\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
      "\u001b[0;31mNameError\u001b[0m: name 'pyplot' is not defined"
     ]
    }
   ],
   "source": [
    "#调整max_depth和min_child_weight之后再次调整n_estimators(6,4)\n",
    "xgb2_3 = XGBClassifier(\n",
    "        learning_rate =0.1,\n",
    "        n_estimators=1000,  #数值大没关系，cv会自动返回合适的n_estimators\n",
    "        max_depth=6,\n",
    "        min_child_weight=5,\n",
    "        gamma=0,\n",
    "        subsample=0.3,\n",
    "        colsample_bytree=0.8,\n",
    "        colsample_bylevel=0.7,\n",
    "        objective= 'multi:softprob',\n",
    "        seed=3)\n",
    "\n",
    "modelfit(xgb2_3, X_train, y_train, cv_folds = kfold)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.4"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
