# train_xgb.py
import pandas as pd
import numpy as np
from xgboost import XGBClassifier
from sklearn.model_selection import StratifiedKFold, GridSearchCV
from sklearn.metrics import roc_auc_score, accuracy_score, precision_score, recall_score, f1_score, log_loss
from utils.log import Logger
import datetime

class XGBTrainer:
    def __init__(self, data_path, log_dir='../log'):
        self.data_path = data_path
        log_name = 'xgb_grid_train_' + datetime.datetime.now().strftime('%Y%m%d%H%M%S')
        self.logger = Logger(log_dir, log_name).get_logger()
        self.load_data()

    def load_data(self):
        """加载处理好的数据"""
        self.logger.info(f'正在加载数据: {self.data_path}')
        df = pd.read_csv(self.data_path)
        self.feature_columns = [col for col in df.columns if col != 'Attrition']
        self.X = df[self.feature_columns]
        self.y = df['Attrition'].astype(int)
        self.logger.info(f'数据加载完成，特征数: {self.X.shape[1]}, 样本数: {self.X.shape[0]}')

        # 计算正负样本比，用于 scale_pos_weight
        self.scale_pos_weight = sum(self.y == 0) / sum(self.y == 1) if sum(self.y == 1) > 0 else 1.0
        self.logger.info(f'正负样本比例 (scale_pos_weight): {self.scale_pos_weight:.4f}')

    def train_with_gridsearch(self, n_splits=5, random_state=42):
        """
        使用 GridSearchCV 自动调参，多指标评估，不保存任何文件
        """
        self.logger.info('🚀 开始使用 GridSearchCV 调参，优化目标：AUC + 多指标评估')

        # 定义参数网格（精简但覆盖关键参数）
        param_grid = {
            'max_depth': [3, 4, 5],
            'learning_rate': [0.05, 0.1, 0.2],
            'min_child_weight': [5, 8, 12],
            'gamma': [0.1, 0.2, 0.3],
            'subsample': [0.8, 0.9],
            'colsample_bytree': [0.8, 0.9],
            'reg_alpha': [0.0, 0.1, 0.5],
            'reg_lambda': [1.0, 1.5, 2.0],
        }

        # 初始化模型（固定部分参数）
        xgb_model = XGBClassifier(
            objective='binary:logistic',
            eval_metric='auc',
            scale_pos_weight=self.scale_pos_weight,
            random_state=random_state,
            n_jobs=-1,
            verbosity=0
        )

        # 设置交叉验证策略
        cv = StratifiedKFold(n_splits=n_splits, shuffle=True, random_state=random_state)

        # 设置多个评分指标
        scoring = {
            'auc': 'roc_auc',
            'accuracy': 'accuracy',
            'precision': 'precision',
            'recall': 'recall',
            'f1': 'f1',
            'log_loss': 'neg_log_loss'  # 注意：GridSearchCV 中 log_loss 是负值
        }

        # 执行网格搜索（refit 用 AUC）
        self.logger.info('正在进行网格搜索，请耐心等待...')
        grid_search = GridSearchCV(
            estimator=xgb_model,
            param_grid=param_grid,
            scoring=scoring,
            refit='auc',  # 以 AUC 作为选择最佳模型的标准
            cv=cv,
            return_train_score=False,  # 不记录训练分数，节省内存
            verbose=0,
            n_jobs=-1
        )

        grid_search.fit(self.X, self.y)

        # 输出最佳参数和得分
        best_params = grid_search.best_params_
        best_index = grid_search.best_index_
        self.logger.info(f'✅ 最佳参数: {best_params}')

        # 获取各指标在验证集上的平均得分
        auc_cv = grid_search.cv_results_['mean_test_auc'][best_index]
        acc_cv = grid_search.cv_results_['mean_test_accuracy'][best_index]
        pre_cv = grid_search.cv_results_['mean_test_precision'][best_index]
        rec_cv = grid_search.cv_results_['mean_test_recall'][best_index]
        f1_cv = grid_search.cv_results_['mean_test_f1'][best_index]
        logloss_cv = -grid_search.cv_results_['mean_test_log_loss'][best_index]  # 转为正值

        # 打印多指标结果
        print(f"\n{'='*60}")
        print(f"📊 网格搜索最佳模型 {n_splits}-Fold CV 评估结果:")
        print(f"   AUC       : {auc_cv:.6f}")
        print(f"   Accuracy  : {acc_cv:.6f}")
        print(f"   Precision : {pre_cv:.6f}")
        print(f"   Recall    : {rec_cv:.6f}")
        print(f"   F1-Score  : {f1_cv:.6f}")
        print(f"   LogLoss   : {logloss_cv:.6f}")
        print(f"{'='*60}\n")

        self.logger.info(f'🎯 AUC: {auc_cv:.6f}, Accuracy: {acc_cv:.6f}, Precision: {pre_cv:.6f}, '
                         f'Recall: {rec_cv:.6f}, F1: {f1_cv:.6f}, LogLoss: {logloss_cv:.6f}')

        # 👇 不保存模型，不保存预测，仅返回最佳AUC
        self.logger.info('训练与调参完成，未保存任何文件（按需求配置）')
        return auc_cv

if __name__ == '__main__':
    # 👇 请替换为你实际的特征文件路径
    processed_data_path = '../data/feature_selection_data_20250912_154855.csv'

    trainer = XGBTrainer(processed_data_path)
    final_auc = trainer.train_with_gridsearch(n_splits=5)