"""
Strongly typed configuration models for continual learning.

This module provides Pydantic models for type-safe configuration of
continual learning components, optimizers, and other settings.
"""

from typing import Dict, Optional, Any, Literal
import inspect
import torch.optim as optim
import torch.optim.lr_scheduler as lr_scheduler
from pydantic import BaseModel, Field, field_validator, model_validator


class OptimizerConfig(BaseModel):
    """
    Strongly typed configuration for optimizers.

    Attributes:
        type: The optimizer type (e.g., 'Adam', 'SGD')
        lr: Learning rate
        weight_decay: Weight decay (L2 penalty)
    """

    type: str = Field("Adam", description="Optimizer type (e.g., 'Adam', 'SGD')")
    lr: float = Field(0.001, description="Learning rate")
    weight_decay: float = Field(0.0, description="Weight decay (L2 penalty)")

    # Optional fields for specific optimizers
    momentum: Optional[float] = Field(None, description="Momentum factor (for SGD)")
    betas: Optional[tuple] = Field(
        None, description="Coefficients for computing running averages (for Adam)"
    )
    eps: Optional[float] = Field(
        None, description="Term added to denominator for numerical stability"
    )

    @field_validator("type")
    @classmethod
    def validate_optimizer_type(cls, v):
        """Validate that optimizer type is available in torch.optim."""
        available_optimizers = [
            name
            for name, obj in inspect.getmembers(optim)
            if inspect.isclass(obj)
            and issubclass(obj, optim.Optimizer)
            and name != "Optimizer"
        ]

        if v not in available_optimizers:
            raise ValueError(
                f"Optimizer type '{v}' not found in torch.optim. "
                f"Available optimizers: {', '.join(available_optimizers)}"
            )
        return v

    def to_dict(self) -> Dict[str, Any]:
        """Convert to dictionary, excluding None values."""
        result = self.model_dump(exclude_none=True)
        return result


class SchedulerConfig(BaseModel):
    """
    Strongly typed configuration for learning rate schedulers.

    Attributes:
        type: The scheduler type (e.g., 'StepLR', 'CosineAnnealingLR')
    """

    type: str = Field("StepLR", description="Scheduler type")

    # Fields for StepLR
    step_size: Optional[int] = Field(None, description="Period of learning rate decay")
    gamma: Optional[float] = Field(
        None, description="Multiplicative factor of learning rate decay"
    )

    # Fields for CosineAnnealingLR
    T_max: Optional[int] = Field(None, description="Maximum number of iterations")
    eta_min: Optional[float] = Field(None, description="Minimum learning rate")

    # Fields for ReduceLROnPlateau
    mode: Optional[str] = Field(
        "min", description="One of 'min', 'max' for ReduceLROnPlateau"
    )
    factor: Optional[float] = Field(
        None, description="Factor by which to reduce the learning rate"
    )
    patience: Optional[int] = Field(
        None,
        description="Number of epochs with no improvement after which LR will be reduced",
    )
    threshold: Optional[float] = Field(
        None, description="Threshold for measuring the new optimum"
    )

    @field_validator("type")
    def validate_scheduler_type(cls, v):
        """Validate that scheduler type is available in torch.optim.lr_scheduler."""
        available_schedulers = [
            name
            for name, obj in inspect.getmembers(lr_scheduler)
            if inspect.isclass(obj) and name[0].isupper() and not name.startswith("_")
        ]

        if v not in available_schedulers:
            raise ValueError(
                f"Scheduler type '{v}' not found in torch.optim.lr_scheduler. "
                f"Available schedulers: {', '.join(available_schedulers)}"
            )
        return v

    @model_validator(mode="after")
    def validate_scheduler_params(cls, values):
        """Validate that required parameters for the chosen scheduler are provided."""
        scheduler_type = values.get("type")

        if scheduler_type == "StepLR" and values.get("step_size") is None:
            raise ValueError("'step_size' parameter is required for StepLR scheduler")

        if scheduler_type == "CosineAnnealingLR" and values.get("T_max") is None:
            raise ValueError(
                "'T_max' parameter is required for CosineAnnealingLR scheduler"
            )

        if scheduler_type == "ReduceLROnPlateau":
            if values.get("factor") is None:
                values["factor"] = 0.1  # Set default value
            if values.get("patience") is None:
                values["patience"] = 10  # Set default value

        return values

    def to_dict(self) -> Dict[str, Any]:
        """Convert to dictionary, excluding None values."""
        result = self.model_dump(exclude_none=True)
        return result


class EWCConfig(BaseModel):
    """
    Configuration for Elastic Weight Consolidation method.

    Attributes:
        regularization_weight: Weight of the regularization loss
        fisher_samples: Number of samples to use for Fisher estimation
        fisher_alpha: EMA coefficient for Fisher matrices
    """

    regularization_weight: float = Field(
        5000.0, description="Weight of the regularization loss"
    )
    fisher_samples: int = Field(
        200, description="Number of samples to use for Fisher estimation"
    )
    fisher_alpha: float = Field(0.95, description="EMA coefficient for Fisher matrices")


class ReplayConfig(BaseModel):
    """
    Configuration for Experience Replay method.

    Attributes:
        memory_size: Maximum number of examples to store
        replay_ratio: Ratio of replay samples to current task samples
    """

    memory_size: int = Field(200, description="Maximum number of examples to store")
    replay_ratio: float = Field(
        0.5, description="Ratio of replay samples to current task samples"
    )


class ContinualLearningMethodConfig(BaseModel):
    """
    Configuration for continual learning methods.

    Attributes:
        method_type: Type of continual learning method
        ewc: Configuration for EWC (when method_type is 'ewc')
        replay: Configuration for Experience Replay (when method_type is 'replay')
    """

    method_type: Literal["ewc", "replay", "none"] = Field(
        "none", description="Type of continual learning method"
    )
    ewc: Optional[EWCConfig] = None
    replay: Optional[ReplayConfig] = None

    @model_validator(mode="after")
    def validate_method_config(self):
        """Validate that appropriate config is provided based on method_type."""
        if self.method_type == "ewc" and self.ewc is None:
            # 使用 model_validate 创建实例，这样会应用所有默认值
            self.ewc = EWCConfig.model_validate({})
        elif self.method_type == "replay" and self.replay is None:
            # 使用 model_validate 创建实例，这样会应用所有默认值
            self.replay = ReplayConfig.model_validate({})
        return self


class TrainingConfig(BaseModel):
    """
    Configuration for training parameters.

    Attributes:
        max_epochs: Maximum number of epochs for each task
        batch_size: Batch size for training
        optimizer: Optimizer configuration
        scheduler: Scheduler configuration (optional)
        cl_method: Continual learning method configuration
    """

    max_epochs: int = Field(5, description="Maximum number of epochs for each task")
    batch_size: int = Field(32, description="Batch size for training")
    # 使用 model_validate 确保默认值正确应用
    optimizer: OptimizerConfig = Field(
        default_factory=lambda: OptimizerConfig.model_validate({})
    )
    scheduler: Optional[SchedulerConfig] = None
    cl_method: ContinualLearningMethodConfig = Field(
        default_factory=lambda: ContinualLearningMethodConfig.model_validate({})
    )
