import numpy as np
import pandas as pd
import os
import joblib
from collections import Counter
from datetime import datetime
import matplotlib.pyplot as plt
import seaborn as sns
from scipy import stats
import gc
import warnings
from pylab import mpl
from sklearn.model_selection import *
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score
from sklearn.metrics import roc_curve, roc_auc_score
from xgboost import XGBClassifier
from sklearn.model_selection import GridSearchCV
# 设置显示中文字体
plt.rcParams['font.sans-serif'] = ['PingFang SC', 'SimHei', 'Songti SC']
plt.rcParams['axes.unicode_minus'] = False
# 设置正常显示符号
warnings.filterwarnings('ignore')

df_train = pd.read_csv('../../data/processed/train_v1.csv')
Y = df_train['label']
X = df_train.drop(['user_id', 'merchant_id', 'label'], axis=1)
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.25, random_state=10)

# 使用 XGBoost 进行训练
# 调参优化部分
# param_grid = {
#     'n_estimators': [100],
#     'max_depth': [2],
#     'learning_rate': [0.1],
#     'subsample': [0.5],
#     'colsample_bytree': [0.8],
#     'min_child_weight': [4]
# }
#
# # 修改模型定义部分
# xgb_model = XGBClassifier(eval_metric='logloss')
# xgb_model.fit(X_train, y_train)
#
# # 修改 GridSearchCV 部分
# xgb = XGBClassifier(eval_metric='logloss', random_state=42)
# grid_search = GridSearchCV(xgb, param_grid, scoring='roc_auc', cv=3, n_jobs=-1, verbose=1)
#
# grid_search.fit(X_train, y_train)
#
# print("✅ Xgboost最佳参数：", grid_search.best_params_)
# print("🏆 Xgboost最佳交叉验证 AUC 值：%.4f" % grid_search.best_score_)
# best_xgb_model = grid_search.best_estimator_
# 导入模型
best_xgb_model = joblib.load('../../model/xgb_best_model.pkl')

Predict = best_xgb_model.predict(X_test)
Predict_proba = best_xgb_model.predict_proba(X_test)[:, 1]
# #  保存模型
# joblib.dump(best_xgb_model, '../../model/xgb_best_model.pkl')


auc = roc_auc_score(y_test, Predict_proba)
score = accuracy_score(y_test, Predict)
# 获取预测概率（注意使用 predict_proba）
y_scores = best_xgb_model.predict_proba(X_test)[:, 1]  # 取正类的概率
fpr, tpr, thresholds = roc_curve(y_test, y_scores)
print(f'Xgboost准确率为: {score:.4f}')
print(f'Xgboost AUC 值为: {auc:.4f}')
print('*'*100)

# 绘制 ROC 曲线
plt.figure(figsize=(8, 6))
plt.plot(fpr, tpr, color='blue', label=f'ROC Curve (AUC = {auc:.4f})')
plt.plot([0, 1], [0, 1], 'k--')  # 对角线
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('ROC Curve for Xgboost')
plt.legend(loc='lower right')
plt.grid(True)
plt.show()