# opening check for python 3
import time
import numpy as np
import pandas as pd
import xgboost as xgb
import matplotlib.pylab as plt
from sklearn.externals import joblib
from sklearn.metrics import roc_curve, roc_auc_score
from sklearn.model_selection import KFold, GridSearchCV, train_test_split

save_model_name = 'XgbModel_kaggle'
model = xgb.XGBClassifier()
model.set_params(**{'tree_method': 'gpu_hist'})
print(model.get_xgb_params())

dfp_train = pd.read_csv('D:\\data\\kaggle_test_data\\train.csv')
print(dfp_train.columns)
# exit()
X = dfp_train.drop(labels=['target', 'ID_code'], axis=1).values
y = dfp_train[['target']].values

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=.5,
                                                    random_state=0)


# dfp_test = pd.read_csv('D:\\data\\kaggle_test_data\\test.csv')
# print(dfp_test)
# X_test = dfp_test.drop(labels=['target', 'ID_code'], axis=1).values
# y_test = dfp_test[['target']].values


cv_params = {'max_depth': [3, 4  # , 5, 6, 7  # , 8, 9, 10, 11, 12, 13, 14, 15, 16
                           ],
             # 'colsample_bytree': [0.6, 0.7, 0.8, 0.9, 1.0]
             }

k_fold = KFold(n_splits=2, shuffle=True, random_state=1)

optimized_GBM = GridSearchCV(estimator=model, param_grid=cv_params,
                             scoring='roc_auc',
                             cv=k_fold,
                             verbose=1,
                             n_jobs=1  # <---- diff from other params' n_jobs, this is for GradSeCV, the previous one is for xgboost
                             )

optimized_GBM.fit(X_train, y_train)

start_time = time.time()
joblib.dump(optimized_GBM, save_model_name)
cost_time = time.time() - start_time
print("xgboost evaluate and save model successfully! cost time:", cost_time, "(s)")


evalute_result = optimized_GBM.cv_results_
print('echo round\' result: {0}'.format(evalute_result))
print('best parameter{0}'.format(optimized_GBM.best_params_))
print('best model score:{0}'.format(optimized_GBM.best_score_))
dfp = pd.DataFrame(evalute_result)
writer = pd.ExcelWriter('evalute_result.xlsx')
dfp.to_excel(writer, 'Sheet1')
writer.save()
writer.close()

start_time = time.time()
print('===============================================================')
print('|                        Start Loading Model                  |')
print('===============================================================')
optimized_GBM = joblib.load(save_model_name)
cost_time = time.time() - start_time
print("load model success! cost time:", cost_time, "(s)")

fpr_rf, tpr_rf, thresholds_rf = roc_curve(y_test, optimized_GBM.predict_proba(X_test)[:, 1])
close_default_rf = np.argmin(np.abs(thresholds_rf - 0.5))

plt.plot(fpr_rf, tpr_rf, label="ROC Curve xgb")
plt.xlabel('FPR')
plt.ylabel('TPR (recall)')
plt.plot(fpr_rf[close_default_rf], tpr_rf[close_default_rf], 'o', markersize=10, label='xgb threshold 0.5 RF', fillstyle='none', c='k', mew=2)
print('auc score', roc_auc_score(y_test, optimized_GBM.predict(X_test)))
plt.legend(loc=4)
plt.show()
plt.close()

# start_time = time.time()
# predict_y = optimized_GBM.predict(X_test)
# cost_time = time.time() - start_time
# print("predict success! cost time:", cost_time, "(s)")
