"""模型优化模块."""

from __future__ import annotations

import logging
from dataclasses import dataclass
from typing import Any, Dict, List, Optional

import numpy as np
import pandas as pd
from sklearn.model_selection import GridSearchCV, RandomizedSearchCV

from ..backtest import BacktestConfig, WalkForwardBacktester, calculate_backtest_metrics
from .base import BaseQuantModel, ModelConfig
from .factory import ModelFactory
from .trainer import ModelTrainer

logger = logging.getLogger(__name__)


@dataclass
class OptimizationResult:
    """优化结果."""

    best_params: Dict[str, Any]
    best_score: float
    optimization_history: List[Dict[str, Any]]


class HyperparameterOptimizer:
    """超参数优化器."""

    def __init__(self, random_seed: int = 42) -> None:
        self.random_seed = random_seed

    def optimize(
        self,
        model_name: str,
        X_train: pd.DataFrame,
        y_train: pd.Series,
        X_val: pd.DataFrame | None = None,
        y_val: pd.Series | None = None,
        param_grid: Dict[str, List[Any]] | None = None,
        method: str = "grid_search",
        scoring: str = "roc_auc",
        cv: int = 5,
    ) -> OptimizationResult:
        """优化模型超参数.

        Parameters
        ----------
        model_name:
            模型名称
        X_train:
            训练特征数据
        y_train:
            训练标签
        X_val:
            验证特征数据（可选）
        y_val:
            验证标签（可选）
        param_grid:
            参数网格，如果为 None 则使用默认网格
        method:
            优化方法：'grid_search' 或 'random_search'
        scoring:
            评分指标
        cv:
            交叉验证折数

        Returns
        -------
        优化结果
        """
        if param_grid is None:
            param_grid = self._get_default_param_grid(model_name)

        # 创建基础模型
        base_model = ModelFactory.create(model_name)

        # 选择优化方法
        if method == "grid_search":
            search = GridSearchCV(
                base_model.model if hasattr(base_model, "model") else base_model,
                param_grid,
                scoring=scoring,
                cv=cv,
                n_jobs=-1,
                random_state=self.random_seed,
            )
        elif method == "random_search":
            search = RandomizedSearchCV(
                base_model.model if hasattr(base_model, "model") else base_model,
                param_grid,
                n_iter=20,
                scoring=scoring,
                cv=cv,
                n_jobs=-1,
                random_state=self.random_seed,
            )
        else:
            raise ValueError(f"不支持的优化方法: {method}")

        # 执行优化
        X_train_array = X_train.values if isinstance(X_train, pd.DataFrame) else X_train
        y_train_array = y_train.values if isinstance(y_train, pd.Series) else y_train

        search.fit(X_train_array, y_train_array)

        # 构建优化历史
        optimization_history = []
        for i, (params, score) in enumerate(
            zip(search.cv_results_["params"], search.cv_results_["mean_test_score"])
        ):
            optimization_history.append({"iteration": i, "params": params, "score": float(score)})

        return OptimizationResult(
            best_params=search.best_params_,
            best_score=float(search.best_score_),
            optimization_history=optimization_history,
        )

    def _get_default_param_grid(self, model_name: str) -> Dict[str, List[Any]]:
        """获取默认参数网格."""
        grids = {
            "xgboost": {
                "max_depth": [3, 5, 7],
                "n_estimators": [50, 100, 150],
                "learning_rate": [0.01, 0.05, 0.1],
            },
            "lightgbm": {
                "num_leaves": [15, 31, 63],
                "n_estimators": [50, 100, 150],
                "learning_rate": [0.01, 0.05, 0.1],
            },
            "logistic_regression": {
                "C": [0.1, 1.0, 10.0],
                "penalty": ["l1", "l2"],
            },
        }
        return grids.get(model_name, {})


class ModelSelector:
    """模型选择器."""

    def __init__(self) -> None:
        pass

    def select_best_model(
        self,
        model_metrics: Dict[str, Dict[str, float]],
        weights: Dict[str, float] | None = None,
    ) -> tuple[str, float]:
        """选择最佳模型.

        Parameters
        ----------
        model_metrics:
            模型指标字典，键为模型名称，值为指标字典
        weights:
            指标权重，如果为 None 则使用默认权重

        Returns
        -------
        (最佳模型名称, 综合评分)
        """
        if weights is None:
            weights = {
                "sharpe_ratio": 0.4,
                "information_ratio": 0.3,
                "max_drawdown": -0.3,  # 负权重，因为回撤越小越好
            }

        scores = {}
        for model_name, metrics in model_metrics.items():
            score = 0.0
            for metric_name, weight in weights.items():
                if metric_name in metrics:
                    value = metrics[metric_name]
                    if metric_name == "max_drawdown":
                        value = abs(value)  # 取绝对值
                    score += weight * value
            scores[model_name] = score

        best_model = max(scores.items(), key=lambda x: x[1])
        return best_model[0], best_model[1]


class ModelEnsemble:
    """模型集成."""

    def __init__(self, models: Dict[str, BaseQuantModel], weights: Dict[str, float] | None = None) -> None:
        self.models = models
        if weights is None:
            # 默认等权重
            weights = {name: 1.0 / len(models) for name in models.keys()}
        self.weights = weights

    def predict_proba(self, X: pd.DataFrame | np.ndarray) -> np.ndarray:
        """集成预测概率.

        Parameters
        ----------
        X:
            特征数据

        Returns
        -------
        集成后的预测概率
        """
        predictions = []
        for model_name, model in self.models.items():
            proba = model.predict_proba(X)
            weight = self.weights.get(model_name, 0.0)
            predictions.append(proba * weight)

        ensemble_proba = np.sum(predictions, axis=0)
        return ensemble_proba


class StrategyOptimizer:
    """策略参数优化器."""

    def __init__(self, base_config: BacktestConfig) -> None:
        self.base_config = base_config

    def optimize_thresholds(
        self,
        model: BaseQuantModel,
        X: pd.DataFrame,
        prices: pd.DataFrame,
        buy_threshold_range: tuple[float, float] = (0.5, 0.7),
        sell_threshold_range: tuple[float, float] = (0.3, 0.5),
        n_trials: int = 10,
        objective: str = "sharpe_ratio",
    ) -> OptimizationResult:
        """优化信号阈值.

        Parameters
        ----------
        model:
            已训练的模型
        X:
            特征数据
        prices:
            价格数据
        buy_threshold_range:
            买入阈值范围
        sell_threshold_range:
            卖出阈值范围
        n_trials:
            试验次数
        objective:
            目标函数：'sharpe_ratio' 或 'information_ratio'

        Returns
        -------
        优化结果
        """
        buy_thresholds = np.linspace(buy_threshold_range[0], buy_threshold_range[1], n_trials)
        sell_thresholds = np.linspace(sell_threshold_range[0], sell_threshold_range[1], n_trials)

        best_score = float("-inf")
        best_params = {}
        optimization_history = []

        for buy_th in buy_thresholds:
            for sell_th in sell_thresholds:
                if sell_th >= buy_th:
                    continue

                # 创建新配置
                config = BacktestConfig(
                    window_length=self.base_config.window_length,
                    step_size=self.base_config.step_size,
                    buy_threshold=buy_th,
                    sell_threshold=sell_th,
                    stop_loss_pct=self.base_config.stop_loss_pct,
                    take_profit_pct=self.base_config.take_profit_pct,
                    commission_rate=self.base_config.commission_rate,
                    slippage_rate=self.base_config.slippage_rate,
                    initial_capital=self.base_config.initial_capital,
                    max_position=self.base_config.max_position,
                    risk_free_rate=self.base_config.risk_free_rate,
                    signal_lag=self.base_config.signal_lag,
                )

                # 执行回测
                backtester = WalkForwardBacktester(config)
                models_dict = {"model": model}
                backtest_results = backtester.walk_forward_backtest(models_dict, X, prices)

                # 计算指标
                if backtest_results and "model" in backtest_results:
                    result_list = backtest_results["model"]
                    if result_list:
                        metrics = calculate_backtest_metrics(result_list, config)

                        if objective == "sharpe_ratio":
                            score = metrics.sharpe_ratio
                        elif objective == "information_ratio":
                            score = metrics.information_ratio
                        else:
                            score = metrics.sharpe_ratio

                        params = {"buy_threshold": buy_th, "sell_threshold": sell_th}
                        optimization_history.append({"params": params, "score": score})

                        if score > best_score:
                            best_score = score
                            best_params = params

        return OptimizationResult(
            best_params=best_params,
            best_score=best_score,
            optimization_history=optimization_history,
        )

