import xgboost as xgb
from sklearn import metrics
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score
from sklearn.metrics import recall_score
from sklearn.metrics import precision_score
import matplotlib.pyplot as plt


def xgboost_classify(train_features, train_labels, test_features, test_labels):
    """
    xgboost 分类模型
    :param train_features:
    :param train_labels:
    :param test_features:
    :param test_labels:
    :return:
    """
    xgboost_classify_model = xgb.XGBClassifier(objective='binary:logistic')
    xgboost_classify_model.fit(train_features, train_labels, eval_metric='error')

    pred_y_list = xgboost_classify_model.predict(test_features)
    pred_nclass_proba_list = xgboost_classify_model.predict_proba(test_features)

    metrics_info(pred_nclass_proba_list, pred_y_list, test_labels)
    return pred_y_list, [e[1] for e in pred_nclass_proba_list]


def metrics_info(nclass_proba, pred_y, true_y):
    print('test_labels: ', true_y)
    print('pred_y_list: ', pred_y)

    fpr, tpr, thresholds = metrics.roc_curve(true_y, [e[1] for e in nclass_proba], pos_label=1)
    print('fpr: ', fpr)
    print('tpr: ', tpr)
    print('thresholds: ', thresholds)
    roc_auc = metrics.auc(fpr, tpr)
    print('metrics.auc(fpr, tpr): ', roc_auc)

    accu = accuracy_score(true_y, pred_y)
    print('accuracy_score: ', accu)

    precision = precision_score(true_y, pred_y)
    macro_precision = precision_score(true_y, pred_y, average='macro')
    micro_precision = precision_score(true_y, pred_y, average='micro')
    weighted_precision = precision_score(true_y, pred_y, average='weighted')
    print('binary, macro, micro, weighted precision score: ', precision, macro_precision, micro_precision,
          weighted_precision)
    recall = recall_score(true_y, pred_y)
    macro_recall = recall_score(true_y, pred_y, average='macro')
    micro_recall = recall_score(true_y, pred_y, average='micro')
    weighted_recall = recall_score(true_y, pred_y, average='weighted')

    print('binary, macro, micro, weighted recall: ', macro_recall, micro_recall, weighted_recall, recall)
    maxtrix = confusion_matrix(true_y, pred_y)
    print(maxtrix)


def roc_curve_plot(true_y_list, proba_list):
    fpr, tpr, threshold = metrics.roc_curve(true_y_list, proba_list)
    print(fpr)
    print(tpr)
    print(threshold)
    roc_auc = metrics.auc(fpr, tpr)
    lw = 2
    plt.figure(figsize=(10, 8))
    plt.plot(fpr, tpr, color='darkorange', lw=lw, label='ROC curve (area = %0.2f)' % roc_auc)
    plt.plot([0, 1], [0, 1], color='navy', lw=lw, linestyle='--')
    plt.xlim([0.0, 1.0])
    plt.ylim([0.0, 1.05])
    plt.xlabel('False Positive Rate')
    plt.ylabel('True Positive Rate')
    plt.title('Roc Curve')
    plt.legend(loc="lower right")
    plt.show()
