import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.model_selection import GridSearchCV
from sklearn.metrics import accuracy_score, classification_report
import joblib
from src.utils.common import data_processing, auc_plot
from src.utils.log import Logger
import xgboost as xgb
from imblearn.over_sampling import SMOTE
import warnings
warnings.filterwarnings("ignore")
# XGBOOST
def smote_model(x_train, y_train):
    #初始化SMOTE并对训练集过采样（只处理训练集！）
    smote = SMOTE(
        sampling_strategy=0.2,  # 目标比例：少数类/多数类 = 0.2（即1:5，比原1:20更平衡）
        random_state=44  # 固定随机数，保证结果可复现42 0.9999999999 43 1
    )
    x_train_smote, y_train_smote = smote.fit_resample(x_train, y_train)  # 生成平衡后的训练集

    #查看过采样效果
    print("原始训练集：", pd.Series(y_train).value_counts())  # 比如 0:2000, 1:100（1:20）
    print("SMOTE后训练集：", pd.Series(y_train_smote).value_counts()) # 比如 0:1000, 1:200（1:5）
    return x_train_smote, y_train_smote
def xgboost_model(x_train, x_test, y_train, y_test,ls):
    # es = xgb.XGBClassifier(max_depth=3, n_estimators=157, learning_rate=0.01, random_state=24)
    est = xgb.XGBClassifier()
    params = {
        'max_depth':[4],
        'min_child_weight': [5],
        'gamma': [0],
        'max_delta_step': [0, 1, 3],
        'scale_pos_weight': [15, 20, 25],
        'subsample': [0.7000000000000001],
        'colsample_bytree': [0.6000000000000001],
        'learning_rate': [0.13],
        'objective': ['binary:logistic'],
        'eval_metric': ['auc'],
        'n_estimators': [40],
        'alpha': [40],
        'lambda': [2]
    }   #'learning_rate': 0.01, 'max_depth': 3, 'n_estimators': 157, 'random_state': 24
    es = GridSearchCV(est, param_grid=params, cv=7)
    es.fit(x_train, y_train)
    joblib.dump(es.best_estimator_, "../../model/xgboost.pkl")
    y_pre = es.predict(x_test)
    y_predict = es.predict_proba(x_test)[:, 1]
    # print(es.predict_proba(x_test))
    print(f'准确率：{accuracy_score(y_test, y_pre)}')
    print(f'分类报告：{classification_report(y_test, y_pre)}')
    # print(f'ROC:{roc_auc_score(y_test, y_predict)}')  # 74.45
    print(f"最佳参数:{es.best_estimator_}")
    print(f'最优超参：{es.best_params_}')
    # 绘制ROC曲线
    auc_plot(y_test, y_predict)
    return []
def grid_search(randomnum):
    lis_gap=[[i for i in range(3,11)],[i for i in range(3,11)]]
    x, y = data_processing(1)
    # 0, 922
    # 1, 178
    x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.3, random_state=randomnum, stratify=y)
    x_train_smote, y_train_smote=smote_model(x_train, y_train)
    ret = xgboost_model(x_train_smote, x_test, y_train_smote, y_test, lis_gap)
    lis_gap.append([randomnum, ret])

if __name__ == '__main__':
    grid_search(269)