# -*- coding: utf-8 -*-

# 导入必要的工具包
# 独立调用xgboost或在sklearn框架下调用均可。
# 1. 模型训练：超参数调优
#     1. 初步确定弱学习器数目： 20分
#     2. 对树的最大深度（可选）和min_children_weight进行调优（可选）：20分
#     3. 对正则参数进行调优：20分
#     4. 重新调整弱学习器数目：10分
#     5. 行列重采样参数调整：10分
# 2. 调用模型进行测试10分
# 3. 生成测试结果文件10分

import xgboost as xgb
from xgboost import XGBClassifier

from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import StratifiedKFold

from sklearn.metrics import log_loss

import pandas as pd
import numpy as np

dpath = './data/'
train = pd.read_csv(dpath + 'RentListingInquries_FE_train_sample.csv')
test = pd.read_csv(dpath + 'RentListingInquries_FE_test.csv')

train_X = train.drop("interest_level", axis=1)
train_y = train["interest_level"]

# # 各类样本不均衡，交叉验证是采用StratifiedKFold，在每折采样时各类样本按比例采样
# # prepare cross validation
from sklearn.model_selection import StratifiedKFold
kfold = StratifiedKFold(n_splits=5, shuffle=True, random_state=3)

#第一轮参数调整得到的n_estimators 是 277
estimators = 50
# 上一轮求得的参数是： {'max_depth': 3, 'min_child_weight': 3}
max_depth4 = 4
min_child_weight4 = 1

xgb5 = XGBClassifier(
        learning_rate =0.1,
        n_estimators=estimators,
        max_depth= max_depth4,
        min_child_weight= min_child_weight4,
        gamma=0,
        subsample=0.3,
        colsample_bytree=0.8,
        colsample_bylevel = 0.7,
        objective= 'multi:softmax',
        nthread=-1,
        seed=3)


# 第一轮参数调整 {'reg_alpha': 0.1, 'reg_lambda': 0.1}
reg_alpha = [0.01, 0.025,0.05, 0.1, 0.2]    #default = 0
reg_lambda = [0.0005, 0.001, 0.002]   #default = 1
param_test_5 =  dict(reg_alpha=reg_alpha, reg_lambda=reg_lambda)

gsearch5 = GridSearchCV(xgb5, param_grid = param_test_5, scoring='neg_log_loss',n_jobs=-1, cv=kfold)
gsearch5.fit(train_X , train_y)

print gsearch5.grid_scores_
print gsearch5.best_params_
print gsearch5.best_score_

# FIXME
# [mean: -0.64310, std: 0.01339, params: {'reg_alpha': 0.05, 'reg_lambda': 0.05}, mean: -0.64399, std: 0.01334, params: {'reg_alpha': 0.05, 'reg_lambda': 0.1}, mean: -0.64351, std: 0.01323, params: {'reg_alpha': 0.05, 'reg_lambda': 0.2}, mean: -0.64376, std: 0.01332, params: {'reg_alpha': 0.05, 'reg_lambda': 0.4}, mean: -0.64371, std: 0.01420, params: {'reg_alpha': 0.1, 'reg_lambda': 0.05}, mean: -0.64259, std: 0.01349, params: {'reg_alpha': 0.1, 'reg_lambda': 0.1}, mean: -0.64382, std: 0.01405, params: {'reg_alpha': 0.1, 'reg_lambda': 0.2}, mean: -0.64323, std: 0.01416, params: {'reg_alpha': 0.1, 'reg_lambda': 0.4}, mean: -0.64362, std: 0.01433, params: {'reg_alpha': 0.2, 'reg_lambda': 0.05}, mean: -0.64296, std: 0.01398, params: {'reg_alpha': 0.2, 'reg_lambda': 0.1}, mean: -0.64338, std: 0.01415, params: {'reg_alpha': 0.2, 'reg_lambda': 0.2}, mean: -0.64405, std: 0.01305, params: {'reg_alpha': 0.2, 'reg_lambda': 0.4}, mean: -0.64305, std: 0.01256, params: {'reg_alpha': 0.4, 'reg_lambda': 0.05}, mean: -0.64218, std: 0.01308, params: {'reg_alpha': 0.4, 'reg_lambda': 0.1}, mean: -0.64277, std: 0.01328, params: {'reg_alpha': 0.4, 'reg_lambda': 0.2}, mean: -0.64347, std: 0.01385, params: {'reg_alpha': 0.4, 'reg_lambda': 0.4}]
# {'reg_alpha': 0.4, 'reg_lambda': 0.1}
# -0.642177192162


