from abc import abstractmethod
from typing import Any, Dict, List, Optional

import torch
import torch.nn as nn

from continuallearning.interfaces import (
    TaskAwareHookManagerInterface,
    HookBasedAdapterInterface,
    AdapterType,
    AdapterOutput,
    AdapterInterface,
)

# 从专用模块导入标准化函数
from continuallearning.models.core.output_utils import standardize_adapter_output
from continuallearning.models.backbones.base import BaseBackbone

# 替换标准logging库为自定义日志系统
from continuallearning.utils.logging import get_logger

# 使用自定义日志系统创建记录器
logger = get_logger(__name__)


class BaseAdapter(nn.Module, AdapterInterface):
    """
    Base class for all adapter modules in continual learning.

    This class provides common functionality for adapters and ensures all
    adapter implementations follow a consistent interface. Adapters are
    responsible for parameter-efficient fine-tuning of backbone models.

    Args:
        backbone: The backbone model this adapter adapts
        adapter_type: Type of adapter (PRE_BACKBONE or POST_BACKBONE)
    """

    _current_task: int = -1

    def __init__(
        self,
        backbone: BaseBackbone,
        adapter_type: AdapterType = AdapterType.POST_BACKBONE,
    ):
        super().__init__()
        self.backbone = backbone
        self._adapter_type = adapter_type

    @property
    def adapter_type(self) -> AdapterType:
        """
        Get the type of this adapter.

        Returns:
            AdapterType: Whether this adapter is PRE_BACKBONE or POST_BACKBONE
        """
        return self._adapter_type

    def forward(
        self, x: torch.Tensor, task_id: Optional[int] = None, **kwargs
    ) -> AdapterOutput:
        """
        Apply the adapter to input features or raw inputs.

        Subclasses must implement this method.

        Args:
            x: Input features for POST_BACKBONE adapters, or raw inputs for PRE_BACKBONE adapters
            task_id: Optional task identifier, defaults to current_task if None
            **kwargs: Additional arguments

        Returns:
            AdapterOutput: Standardized output format with adapted features
        """
        raise NotImplementedError("Subclasses must implement forward()")


class BaseHookAdapter(BaseAdapter, HookBasedAdapterInterface):
    """
    Base class for hook-based adapters.

    This class extends BaseAdapter with hook functionality, providing common
    utilities for adapters that use hooks to modify model behavior.

    Args:
        backbone: Backbone model to adapt
        adapter_type: Type of adapter (PRE_BACKBONE or POST_BACKBONE)
        num_tasks: Number of tasks to support (used when task_specific=True)
    """

    def __init__(
        self,
        backbone: BaseBackbone,
        adapter_type: AdapterType = AdapterType.PRE_BACKBONE,
        num_tasks: int = 1,
    ):
        super().__init__(backbone=backbone, adapter_type=adapter_type)

        # Initialize hook management
        self._hook_manager = None
        self._attached_model = None
        self._cached_features = None
        self._task_hook_managers: Dict[int, TaskAwareHookManagerInterface] = {}

    @abstractmethod
    def _create_hook_manager(
        self, model: nn.Module, task_id: int
    ) -> TaskAwareHookManagerInterface:
        """
        Create a hook manager for a specific task.

        Subclasses must implement this method to create the appropriate
        hook manager for their adapter type.

        Args:
            model: Model to apply hooks to
            task_id: Task identifier

        Returns:
            TaskAwareHookManagerInterface: Hook manager for the adapter
        """
        pass

    def attach(self, model: nn.Module) -> TaskAwareHookManagerInterface:
        """
        Attach this adapter to a model using hooks.

        This unified implementation handles both task-specific and shared adapters.

        Args:
            model: Model to apply hooks to

        Returns:
            TaskAwareHookManagerInterface: The hook manager for this adapter
        """
        if self.task_specific:
            # Get hook manager for current task
            hook_manager = self.get_hook_manager_for_task(self.current_task)
        else:
            # Create a shared hook manager if none exists
            if self._hook_manager is None:
                logger.debug(
                    f"Creating shared hook manager for {self.__class__.__name__}"
                )
                self._hook_manager = self._create_hook_manager(model, 0)
            hook_manager = self._hook_manager

        # Attach hooks if not already attached
        if not hook_manager.is_attached:
            hook_manager.attach()

        # Store attached model
        self._attached_model = model
        return hook_manager

    def detach(self) -> None:
        """
        Detach this adapter from the model.

        For task-specific adapters, detaches hooks for all tasks.
        """
        if self.task_specific:
            # Detach all task-specific hook managers
            for task_id, hook_manager in self._task_hook_managers.items():
                if hook_manager.is_attached:
                    logger.debug(f"Detaching hooks for task {task_id}")
                    hook_manager.detach()
        elif self._hook_manager is not None and self._hook_manager.is_attached:
            self._hook_manager.detach()
            self._hook_manager = None

    def prepare_task(self, task_id: int, **kwargs) -> None:
        """
        Configure the adapter for a specific task.

        This is a unified implementation for task management:
        - Updates the current task
        - For task-specific adapters, switches to task-specific parameters

        Args:
            task_id: Task identifier
        """
        # Validate and normalize task ID
        if task_id >= self.num_tasks and self.task_specific:
            logger.warning(
                f"Task ID {task_id} exceeds max tasks {self.num_tasks}, using modulo"
            )
            task_id = task_id % self.num_tasks

        # Update current task in base class
        super().prepare_task(task_id)

        # If task-specific, ensure the appropriate hook manager is active
        if self.task_specific and self._attached_model is not None:
            # Get or create hook manager but don't attach yet
            hook_manager = self.get_hook_manager_for_task(task_id)

            # If we're already attached to a model, update the active hook manager
            if (
                hasattr(self, "_hook_manager")
                and self._hook_manager
                and self._hook_manager.is_attached
            ):
                # Don't detach/reattach if it's already the correct hook manager
                if self._hook_manager != hook_manager:
                    logger.debug(
                        f"Switching hooks from task {self.current_task} to {task_id}"
                    )
                    self._hook_manager.detach()
                    hook_manager.attach()
                    self._hook_manager = hook_manager
            elif not hook_manager.is_attached:
                # If the hook manager exists but isn't attached, attach it
                hook_manager.attach()
                self._hook_manager = hook_manager

    def get_trainable_parameters(
        self, task_id: Optional[int] = None
    ) -> List[nn.Parameter]:
        """
        Get trainable parameters for this adapter.

        Args:
            task_id: Optional task identifier for task-specific adapters

        Returns:
            List[nn.Parameter]: List of trainable parameters
        """
        # Use current task if none specified
        task_id = self.current_task if task_id is None else task_id

        # Normalize task ID if needed
        if self.task_specific and task_id >= self.num_tasks:
            task_id = task_id % self.num_tasks

        # For non-task-specific adapters, use the shared hook manager
        if not self.task_specific:
            if self._hook_manager and hasattr(
                self._hook_manager, "get_trainable_parameters"
            ):
                return self._hook_manager.get_trainable_parameters()
            return []

        # For task-specific adapters, get parameters from the appropriate hook manager
        hook_manager = self.get_hook_manager_for_task(task_id)

        # If the hook manager is already attached or has hooks, get parameters directly
        if hook_manager.is_attached or (
            hasattr(hook_manager, "hooks") and hook_manager.hooks
        ):
            return hook_manager.get_trainable_parameters()

        # If not attached but model is available, temporarily attach to get parameters
        if hasattr(self, "backbone") and self.backbone is not None:
            logger.debug(
                f"Temporarily attaching hooks for task {task_id} to get parameters"
            )
            with hook_manager:  # Use context manager for automatic cleanup
                return hook_manager.get_trainable_parameters()

        # If we can't get parameters, warn and return empty list
        logger.warning(
            f"Cannot get parameters for task {task_id}: no backbone available"
        )
        return []

    def forward(
        self, x: torch.Tensor, task_id: Optional[int] = None, **kwargs
    ) -> AdapterOutput:
        """
        Process input through the adapter.

        Args:
            x: Input tensor
            task_id: Optional task identifier
            **kwargs: Additional arguments for the backbone

        Returns:
            AdapterOutput: Standardized output from the adapter
        """
        # Set the current task if specified
        if task_id is not None:
            self.prepare_task(task_id)

        # Use context manager pattern for clean hook management
        with self:
            try:
                # Call backbone with hooks in place
                outputs = self.backbone(x, **kwargs)
                # 使用专用函数 standardize_adapter_output 而不是通用的 standardize_output
                # 这确保返回类型始终为 AdapterOutput，符合方法声明
                std_outputs = standardize_adapter_output(outputs)
                return std_outputs
            except Exception as e:
                logger.error(f"Error during adapter forward pass: {e}")
                # Re-raise the exception with additional context
                raise RuntimeError(f"Adapter forward failed: {str(e)}") from e

    def __enter__(self) -> "BaseHookAdapter":
        """
        Enter context manager, attaching hooks.

        Returns:
            Self: For use in with statements
        """
        if self._attached_model is None:
            if hasattr(self, "backbone"):
                self._hook_manager = self.attach(self.backbone)
            else:
                raise ValueError(
                    "Cannot enter context: No backbone or attached model available"
                )
        elif self._hook_manager is None or not self._hook_manager.is_attached:
            self._hook_manager = self.attach(self._attached_model)
        return self

    def __exit__(
        self, exc_type: Optional[Any], exc_val: Optional[Any], exc_tb: Optional[Any]
    ) -> None:
        """
        Exit context manager, detaching hooks.

        Args:
            exc_type: Exception type if an exception was raised
            exc_val: Exception value if an exception was raised
            exc_tb: Exception traceback if an exception was raised
        """
        try:
            self.detach()
        except Exception as e:
            # Log exception but don't suppress the original exception
            logger.error(f"Error while detaching hooks in __exit__: {e}")

        # Return None to not suppress any exception that occurred
        return None
