# -*- coding: UTF-8 -*-

import numpy as np
import xgboost as xgb
from sklearn.metrics import mean_absolute_error


def logregobj(preds, dtrain):
    labels = dtrain.get_label()
    preds = 1.0 / (1.0 + np.exp(-preds))
    grad = preds - labels
    hess = preds * (1.0 - preds)
    return grad, hess


def mae_error(preds, dtrain):
    labels = dtrain.get_label()
    return 'my-mae-error', mean_absolute_error(labels, preds)


def xgboost_customized(train_features, train_labels, test_features, test_labels):
    dtrain = xgb.DMatrix(train_features, train_labels)
    dtest = xgb.DMatrix(test_features, test_labels)
    # params = {'max_depth': 2, 'eta': 1, 'silent': 1}
    params = {
        "booster": "gbtree",
        "learning_rate": 0.1,
        "n_estimators": 100,
        "max_depth": 5,
        "min_child_weight": 1,
        "gamma": 0,
        "subsample": 0.8,
        "colsample_bytree": 0.8,
        "objective": "reg:linear",
        "nthread": -1,
        "scale_pos_weight": 1,
        "seed": 27
        # "eta": 0.20,
        # "max_depth": 5,
        # "subsample": 0.8,
        # "colsample_bytree": 0.8,
        # "silent": 1,
        # 'eval_metric': 'rmse'
    }
    watchlist = [(dtest, 'eval'), (dtrain, 'train')]
    num_round = 2
    dst = xgb.train(params, dtrain, num_round, watchlist)
    return dst.predict(dtest)
    # print("---------logreg--------")
    # dst = xgb.train(params, dtrain, num_round, watchlist, obj=logregobj)
    # print(dst.predict(dtest))
    # print("---------default--------")
    # dst = xgb.train(params, dtrain, num_round, watchlist, feval=mae_error)
    # print(dst.predict(dtest))
    # print("---------mae--------")
    # dst = xgb.train({"booster": "gbtree",
    #                  "eta": 0.20,
    #                  "max_depth": 4,
    #                  "subsample": 0.75,
    #                  "colsample_bytree": 0.65,
    #                  "silent": 1,
    #                  'eval_metric': 'mae'}, dtrain, num_round, watchlist,
    #                 feval=mae_error)
    # print(dst.predict(dtest))
    # exit()

# result_dict['xgboost'] = xgboost_customized(train_features=train_features,
#                                             train_labels=train_labels[labels_column_name],
#                                             test_features=test_features,
#                                             test_labels=test_labels[labels_column_name])
