"""
Builder utility for constructing continual learning experiments.

This module provides a fluent builder interface for easily assembling continual learning
components in a type-safe manner with proper error handling and component integration.
"""

import logging
from typing import Any, Dict, List, Optional, Type, Union, cast
import torch
import torch.nn as nn
import pytorch_lightning as pl

from ..module.continual_module import ContinualModule
from ..data.continual_datamodule import ContinualDataModule
from ..config.typed_config import (
    OptimizerConfig,
    SchedulerConfig,
    ContinualLearningMethodConfig,
    TrainingConfig,
)
from ..methods.factory import create_continual_learning_strategy
from ..callbacks.task_boundary_callback import TaskBoundaryCallback
from continuallearning.registry import MethodInterface, TaskAdaptProtocol
from ..events.dispatcher import EventDispatcher, global_dispatcher


class ContinualLearningBuilder:
    """
    Builder for setting up continual learning experiments with proper component integration.

    This builder provides a fluent interface for configuring and connecting the various
    components needed for a continual learning experiment, ensuring they work together
    correctly and follow best practices.

    Example:
        ```python
        # Create a builder
        builder = ContinualLearningBuilder()

        # Configure and build components
        module, datamodule, trainer = builder.with_model(model)
            .with_datamodule(datamodule)
            .with_optimizer(optimizer_cfg)
            .with_scheduler(scheduler_cfg)
            .with_method("ewc", regularization_weight=5000.0)
            .build()

        # Run experiment
        trainer.fit(module, datamodule)
        ```
    """

    def __init__(self):
        """Initialize the builder with default settings."""
        self._model: Optional[nn.Module] = None
        self._datamodule: Optional[ContinualDataModule] = None
        self._optimizer_cfg: Optional[OptimizerConfig] = None
        self._scheduler_cfg: Optional[SchedulerConfig] = None
        self._cl_method: Optional[MethodInterface] = None
        self._criterion: Optional[nn.Module] = None
        self._callbacks: List[pl.Callback] = []
        self._event_dispatcher = global_dispatcher
        self._logger = logging.getLogger(__name__)
        self._max_epochs: int = 10
        self._devices: Union[int, List[int], str] = 1
        self._accelerator: Optional[str] = None
        self._verbose: bool = True

    def with_model(self, model: nn.Module) -> "ContinualLearningBuilder":
        """
        Set the model to be used for training.

        Args:
            model: PyTorch neural network model

        Returns:
            Self for method chaining
        """
        self._model = model
        return self

    def with_datamodule(
        self, datamodule: ContinualDataModule
    ) -> "ContinualLearningBuilder":
        """
        Set the data module to be used for training.

        Args:
            datamodule: Lightning data module for continual learning

        Returns:
            Self for method chaining
        """
        self._datamodule = datamodule
        return self

    def with_optimizer(
        self, config: Union[OptimizerConfig, Dict[str, Any]]
    ) -> "ContinualLearningBuilder":
        """
        Configure the optimizer.

        Args:
            config: Optimizer configuration or dictionary

        Returns:
            Self for method chaining
        """
        if isinstance(config, dict):
            from ..config.typed_config import OptimizerConfig

            config = OptimizerConfig(**config)

        self._optimizer_cfg = config
        return self

    def with_scheduler(
        self, config: Union[SchedulerConfig, Dict[str, Any], None]
    ) -> "ContinualLearningBuilder":
        """
        Configure the learning rate scheduler (optional).

        Args:
            config: Scheduler configuration or dictionary, or None

        Returns:
            Self for method chaining
        """
        if config is None:
            self._scheduler_cfg = None
        elif isinstance(config, dict):
            from ..config.typed_config import SchedulerConfig

            config = SchedulerConfig(**config)

        self._scheduler_cfg = config
        return self

    def with_method(self, method_type: str, **kwargs) -> "ContinualLearningBuilder":
        """
        Configure a continual learning method.

        Args:
            method_type: Type of method ('ewc', 'replay', 'none', etc.)
            **kwargs: Method-specific parameters

        Returns:
            Self for method chaining

        Raises:
            ValueError: If method_type is unknown
        """
        # Create a config based on method type
        if method_type == "ewc":
            from ..config.typed_config import EWCConfig

            method_config = ContinualLearningMethodConfig(
                method_type=method_type, ewc=EWCConfig(**kwargs)
            )
        elif method_type == "replay":
            from ..config.typed_config import ReplayConfig

            method_config = ContinualLearningMethodConfig(
                method_type=method_type, replay=ReplayConfig(**kwargs)
            )
        elif method_type == "none":
            method_config = ContinualLearningMethodConfig(method_type="none")
        else:
            raise ValueError(f"Unknown method type: {method_type}")

        # Create the method instance
        self._cl_method = create_continual_learning_strategy(method_config)
        return self

    def with_criterion(self, criterion: nn.Module) -> "ContinualLearningBuilder":
        """
        Set a custom loss criterion.

        Args:
            criterion: Loss function module

        Returns:
            Self for method chaining
        """
        self._criterion = criterion
        return self

    def with_callback(self, callback: pl.Callback) -> "ContinualLearningBuilder":
        """
        Add a Lightning callback.

        Args:
            callback: PyTorch Lightning callback

        Returns:
            Self for method chaining
        """
        self._callbacks.append(callback)
        return self

    def with_event_dispatcher(
        self, dispatcher: EventDispatcher
    ) -> "ContinualLearningBuilder":
        """
        Set a custom event dispatcher.

        Args:
            dispatcher: Event dispatcher for component communication

        Returns:
            Self for method chaining
        """
        self._event_dispatcher = dispatcher
        return self

    def with_max_epochs(self, max_epochs: int) -> "ContinualLearningBuilder":
        """
        Set the maximum number of epochs for training.

        Args:
            max_epochs: Maximum number of epochs

        Returns:
            Self for method chaining
        """
        if max_epochs <= 0:
            raise ValueError("max_epochs must be a positive integer")

        self._max_epochs = max_epochs
        return self

    def with_devices(
        self, devices: Union[int, List[int], str]
    ) -> "ContinualLearningBuilder":
        """
        Configure the devices to use for training.

        Args:
            devices: Device specification (e.g., 1, [0, 1], "auto")

        Returns:
            Self for method chaining
        """
        self._devices = devices
        return self

    def with_accelerator(
        self, accelerator: Optional[str]
    ) -> "ContinualLearningBuilder":
        """
        Set the accelerator type for training.

        Args:
            accelerator: Accelerator type (e.g., "gpu", "cpu", "tpu")

        Returns:
            Self for method chaining
        """
        self._accelerator = accelerator
        return self

    def from_config(self, config: TrainingConfig) -> "ContinualLearningBuilder":
        """
        Configure the builder from a comprehensive training configuration.

        Args:
            config: Training configuration object

        Returns:
            Self for method chaining
        """
        self._optimizer_cfg = config.optimizer
        self._scheduler_cfg = config.scheduler
        self._cl_method = create_continual_learning_strategy(config.cl_method)
        self._max_epochs = config.max_epochs
        return self

    def build(
        self,
    ) -> tuple[ContinualModule, Optional[ContinualDataModule], pl.Trainer]:
        """
        Build and connect all components for a continual learning experiment.

        Returns:
            Tuple containing:
            - The configured continual module
            - The data module (if provided)
            - A PyTorch Lightning trainer

        Raises:
            ValueError: If required components are missing or misconfigured
        """
        # Validate required components
        if self._model is None:
            raise ValueError("Model must be provided using with_model()")

        if self._optimizer_cfg is None:
            self._logger.warning(
                "No optimizer configured, using default Adam with lr=0.001"
            )
            self._optimizer_cfg = OptimizerConfig()

        # Create continual module
        module = ContinualModule(
            model=self._model,
            optimizer_cfg=self._optimizer_cfg,
            scheduler_cfg=self._scheduler_cfg,
            cl_strategy=self._cl_method,
            criterion=self._criterion,
        )

        # Create task boundary callback with event dispatcher
        task_boundary_callback = TaskBoundaryCallback(
            verbose=self._verbose, event_dispatcher=self._event_dispatcher
        )
        callbacks = [task_boundary_callback] + self._callbacks

        # Create trainer
        trainer = pl.Trainer(
            max_epochs=self._max_epochs,
            callbacks=callbacks,
            devices=self._devices,
            accelerator=self._accelerator,
        )

        self._logger.info("Successfully built continual learning components")

        return module, self._datamodule, trainer
