"""
XGBoost的三个场景
1、回归
2、分类
   1）二分类
   2）多分类
"""
import time
import numpy as np
import xgboost as xgb
from sklearn.metrics import r2_score, accuracy_score
from sklearn.model_selection import train_test_split
from sklearn.model_selection import GridSearchCV
from sklearn.datasets import load_boston, load_digits, load_iris


class TrainEvalModel:
    """
    训练和评估模型
    """
    def __init__(self, x, y):
        self.x_train, self.x_test, self.y_train, self.y_test = \
            train_test_split(x, y, test_size=0.2, random_state=0)
        self.x = x
        self.y = y
        self.threds = 2

    def best_params_score(self, model, params):
        """
        测试最佳参数和得分
        :param model: 模型
        :param params: 模型预选参数
        :return:
        """
        clf = GridSearchCV(model, params, verbose=1, n_jobs=self.threds)
        clf.fit(self.x, self.y)
        # 得分 & 参数
        return 'best scores: {}\nbest params: {}'.format(clf.best_score_,
                                                         clf.best_params_)

    def xbg_train_scores(self, params, reg=True):
        """
        训练和评估
        :param params: 模型参数
        :param reg: 是否回归
        :return:
        """
        dtrain = xgb.DMatrix(self.x_train, label=self.y_train)
        dtest = xgb.DMatrix(self.x_test, label=self.y_test)
        evallist = [(dtrain, 'train'), (dtest, 'test')]

        xgb_start = time.time()
        model = xgb.train(params=params, dtrain=dtrain, num_boost_round=10,
                          evals=evallist)
        xgb_end = time.time()

        y_pred = model.predict(xgb.DMatrix(self.x_test))
        if reg:
            scores = r2_score(self.y_test, y_pred)
        else:
            if isinstance(y_pred[0], np.ndarray):
                y_pred = [t.tolist().index(t.max()) for t in y_pred]
            else:
                y_pred = [0 if value < 0.5 else 1 for value in y_pred]
            scores = accuracy_score(self.y_test, y_pred)
        return scores, xgb_end - xgb_start

    def sklearn_xgb_train_scores(self, clf, reg=True):
        """
        训练和评估
        :param reg: 是否是回归
        :param clf: 模型
        """
        begin = time.time()
        if reg:
            clf.fit(self.x_train, self.y_train)
        else:
            # clf.fit(self.x_train, self.y_train, eval_metric='auc',
            #         eval_set=[(self.x_test, self.y_test)])
            clf.fit(self.x_train, self.y_train, eval_metric='auc')
        end = time.time()
        
        clf_scores = clf.score(self.x_test, self.y_test)
        print('clf scores: {}'.format(clf_scores))

        y_pred = clf.predict(self.x_test)
        if reg:
            scores = r2_score(self.y_test, y_pred)
        else:
            scores = accuracy_score(self.y_test, y_pred)

        return scores, end - begin


def xgb_reg():
    """
    回归
    :return:
    """
    boston = load_boston()
    x = boston['data']
    y = boston['target']

    candidate_params = {
        'max_depth': [2, 4, 6],
        # 'num_parallel_tree': [50, 100, 200],
        'n_estimators': [50, 100, 200]
    }

    reg_model = TrainEvalModel(x=x, y=y)

    params = {
        'booster': 'gbtree',
        'nthread': reg_model.threds,
        'disable_default_eval_metric': 0,
        'min_child_weight': 1,
        'subsample': 0.8,
        'gamma': 2,
        'eta': 0.7,
        'max_depth': 2,
        'num_parallel_tree': 100,
        'objective': 'reg:squarederror'
    }

    best_params = reg_model.best_params_score(model=xgb.XGBRegressor(),
                                              params=candidate_params)
    print(best_params)
    print('*' * 100)
    xgb_r2, xgb_times = reg_model.xbg_train_scores(params)
    print('xgb: r2_scores: {}, cost: {}'.format(xgb_r2, xgb_times))
    print('*' * 100)
    sklearn_xgb_r2, sklearn_xgb_times = \
        reg_model.sklearn_xgb_train_scores(clf=xgb.XGBRegressor(**params))
    print('sklearn xgb: r2_scores: {}, cost: {}'.format(sklearn_xgb_r2,
                                                        sklearn_xgb_times))


def xgb_binary_classifier():
    """
    二分类
    :return:
    """
    digits = load_digits(n_class=2)
    x = digits['data']
    y = digits['target']

    class_model = TrainEvalModel(x=x, y=y)

    params = {
        'booster': 'gbtree',
        'nthread': class_model.threds,
        'disable_default_eval_metric': 0,
        'min_child_weight': 1,
        'subsample': 0.8,
        'gamma': 2,
        'eta': 0.3,
        'max_depth': 3,
        'num_parallel_tree': 10,
        'objective': 'binary:logistic'
    }

    xgb_scores, xgb_times = class_model.xbg_train_scores(params, reg=False)
    print('xgb: accuracy_score: {}, cost: {}'.format(xgb_scores, xgb_times))
    print('*' * 100)
    sklearn_xgb_r2, sklearn_xgb_times = \
        class_model.sklearn_xgb_train_scores(clf=xgb.XGBClassifier(**params),
                                             reg=False)
    print('sklearn xgb: accuracy_score: {}, cost: {}'.format(sklearn_xgb_r2,
                                                             sklearn_xgb_times))


def xgb_multiple_classifier():
    """
    多分类
    :return:
    """
    iris = load_iris()
    x = iris['data']
    y = iris['target']

    class_model = TrainEvalModel(x=x, y=y)

    params = {
        'booster': 'gbtree',
        'nthread': class_model.threds,
        'disable_default_eval_metric': 0,
        'min_child_weight': 1,
        'subsample': 0.8,
        'gamma': 2,
        'eta': 0.3,
        'max_depth': 3,
        'num_parallel_tree': 10,
        'objective': 'multi:softprob',
        'num_class': len(set(y))
    }

    xgb_scores, xgb_times = class_model.xbg_train_scores(params, reg=False)
    print('xgb: accuracy_score: {}, cost: {}'.format(xgb_scores, xgb_times))
    print('*' * 100)
    sklearn_xgb_r2, sklearn_xgb_times = \
        class_model.sklearn_xgb_train_scores(clf=xgb.XGBClassifier(**params),
                                             reg=False)
    print('sklearn xgb: accuracy_score: {}, cost: {}'.format(sklearn_xgb_r2,
                                                             sklearn_xgb_times))


def xgb_classifier():
    """
    分类
    :return:
    """
    #  二分类
    print('{} 二分类 {}'.format('=' * 50, '=' * 50))
    xgb_binary_classifier()

    #  多分类
    print('{} 多分类 {}'.format('=' * 50, '=' * 50))
    xgb_multiple_classifier()


def run():
    """
    主函数
    :return:
    """
    # 回归
    print('{} 回归 {}'.format('=' * 50, '=' * 50))
    xgb_reg()

    # 分类
    xgb_classifier()


if __name__ == '__main__':
    run()
