"""模型持久化存储."""

from __future__ import annotations

import json
import logging
from dataclasses import asdict, dataclass
from pathlib import Path
from typing import Any, Dict, Optional

from ..utils import now_shanghai
from .base import BaseQuantModel, ModelConfig
from .factory import ModelFactory

logger = logging.getLogger(__name__)


@dataclass
class ModelMetadata:
    """模型元数据."""

    model_name: str
    symbol: str
    training_date: str
    feature_names: list[str] | None
    performance_metrics: Dict[str, Any] | None
    config: Dict[str, Any]
    version: str = "1.0"


class ModelStorage:
    """模型持久化管理."""

    def __init__(self, base_dir: str | Path = "models") -> None:
        self.base_dir = Path(base_dir)
        self.base_dir.mkdir(parents=True, exist_ok=True)

    def _get_model_dir(self, symbol: str, model_name: str) -> Path:
        """获取模型目录."""
        return self.base_dir / symbol / model_name

    def _get_model_path(self, symbol: str, model_name: str) -> Path:
        """获取模型文件路径."""
        model_dir = self._get_model_dir(symbol, model_name)
        # 根据模型类型选择文件扩展名
        if model_name == "lstm":
            return model_dir / "model.pt"
        elif model_name in ["xgboost", "lightgbm"]:
            return model_dir / "model.txt"
        else:
            return model_dir / "model.pkl"

    def _get_metadata_path(self, symbol: str, model_name: str) -> Path:
        """获取元数据文件路径."""
        model_dir = self._get_model_dir(symbol, model_name)
        return model_dir / "metadata.json"

    def save(
        self,
        model: BaseQuantModel,
        symbol: str,
        metadata: ModelMetadata | None = None,
    ) -> Path:
        """保存模型.

        Parameters
        ----------
        model:
            要保存的模型
        symbol:
            ETF 代码
        metadata:
            模型元数据，如果为 None 则自动生成

        Returns
        -------
        模型文件路径
        """
        model_name = model.config.name
        model_dir = self._get_model_dir(symbol, model_name)
        model_dir.mkdir(parents=True, exist_ok=True)

        # 保存模型
        model_path = self._get_model_path(symbol, model_name)
        model.save(model_path)

        # 保存元数据
        if metadata is None:
            metadata = ModelMetadata(
                model_name=model_name,
                symbol=symbol,
                training_date=now_shanghai().isoformat(),
                feature_names=model.feature_names,
                performance_metrics=None,
                config=asdict(model.config),
            )

        metadata_path = self._get_metadata_path(symbol, model_name)
        with metadata_path.open("w", encoding="utf-8") as f:
            json.dump(asdict(metadata), f, ensure_ascii=False, indent=2)

        logger.info(f"模型已保存: {model_path}")
        return model_path

    def load(
        self,
        symbol: str,
        model_name: str,
        version: str | None = None,
    ) -> tuple[BaseQuantModel, ModelMetadata]:
        """加载模型.

        Parameters
        ----------
        symbol:
            ETF 代码
        model_name:
            模型名称
        version:
            版本号（可选，目前未实现版本管理）

        Returns
        -------
        (模型实例, 元数据)
        """
        model_path = self._get_model_path(symbol, model_name)
        metadata_path = self._get_metadata_path(symbol, model_name)

        if not model_path.exists():
            raise FileNotFoundError(f"模型文件不存在: {model_path}")
        if not metadata_path.exists():
            raise FileNotFoundError(f"元数据文件不存在: {metadata_path}")

        # 加载元数据
        with metadata_path.open("r", encoding="utf-8") as f:
            metadata_dict = json.load(f)
            metadata = ModelMetadata(**metadata_dict)

        # 创建模型实例
        config = ModelConfig(**metadata.config)
        model = ModelFactory.create(model_name, config)

        # 加载模型权重
        model.load(model_path)
        
        # 恢复特征名称（如果元数据中有保存）
        if metadata.feature_names is not None:
            model._feature_names = metadata.feature_names

        logger.info(f"模型已加载: {model_path}")
        return model, metadata

    def list_models(self, symbol: str | None = None) -> list[Dict[str, Any]]:
        """列出已保存的模型.

        Parameters
        ----------
        symbol:
            ETF 代码（可选），如果为 None 则列出所有模型

        Returns
        -------
        模型信息列表
        """
        models = []

        if symbol is None:
            # 列出所有符号
            symbols = [d.name for d in self.base_dir.iterdir() if d.is_dir()]
        else:
            symbols = [symbol]

        for sym in symbols:
            symbol_dir = self.base_dir / sym
            if not symbol_dir.exists():
                continue

            for model_dir in symbol_dir.iterdir():
                if not model_dir.is_dir():
                    continue

                metadata_path = model_dir / "metadata.json"
                if metadata_path.exists():
                    try:
                        with metadata_path.open("r", encoding="utf-8") as f:
                            metadata_dict = json.load(f)
                            models.append(
                                {
                                    "symbol": sym,
                                    "model_name": metadata_dict["model_name"],
                                    "training_date": metadata_dict["training_date"],
                                    "version": metadata_dict.get("version", "1.0"),
                                }
                            )
                    except Exception as e:
                        logger.warning(f"读取模型元数据失败: {metadata_path}, {e}")

        return models

    def delete(self, symbol: str, model_name: str) -> None:
        """删除模型.

        Parameters
        ----------
        symbol:
            ETF 代码
        model_name:
            模型名称
        """
        model_dir = self._get_model_dir(symbol, model_name)
        if model_dir.exists():
            import shutil

            shutil.rmtree(model_dir)
            logger.info(f"模型已删除: {model_dir}")
        else:
            logger.warning(f"模型目录不存在: {model_dir}")


class ModelLoader:
    """模型加载器（便捷接口）."""

    def __init__(self, storage: ModelStorage | None = None) -> None:
        self.storage = storage or ModelStorage()

    def load_best_model(
        self,
        symbol: str,
        model_names: list[str] | None = None,
    ) -> tuple[BaseQuantModel, ModelMetadata]:
        """加载最佳模型（基于元数据中的性能指标）.

        Parameters
        ----------
        symbol:
            ETF 代码
        model_names:
            候选模型名称列表，如果为 None 则尝试所有可用模型

        Returns
        -------
        (最佳模型实例, 元数据)
        """
        if model_names is None:
            # 获取所有可用模型
            models = self.storage.list_models(symbol)
            model_names = [m["model_name"] for m in models]

        best_model = None
        best_metadata = None
        best_score = float("-inf")

        for model_name in model_names:
            try:
                model, metadata = self.storage.load(symbol, model_name)

                # 根据性能指标评分（如果有）
                score = 0.0
                if metadata.performance_metrics:
                    metrics = metadata.performance_metrics
                    # 综合评分：夏普比 + 信息比率 - 最大回撤
                    score = (
                        metrics.get("sharpe_ratio", 0.0)
                        + metrics.get("information_ratio", 0.0)
                        - abs(metrics.get("max_drawdown", 0.0))
                    )

                if score > best_score:
                    best_score = score
                    best_model = model
                    best_metadata = metadata
            except Exception as e:
                logger.warning(f"加载模型 {model_name} 失败: {e}")

        if best_model is None:
            raise ValueError(f"未找到可用模型: symbol={symbol}, models={model_names}")

        return best_model, best_metadata

