"""
Custom exceptions for Parameter-Efficient Fine-Tuning (PEFT) methods.

This module defines specific exception types for PEFT-related operations,
improving error handling and debugging.
"""

from typing import Any, Dict, List, Optional, Type, Union

import torch


class PEFTError(Exception):
    """
    Base exception class for all PEFT-related errors.

    This provides a common base for all PEFT exceptions with enhanced error context.
    """

    def __init__(self, message: str, context: Optional[Dict[str, Any]] = None):
        """
        Initialize PEFT error with optional context.

        Args:
            message: Error message
            context: Optional dictionary containing error context
        """
        super().__init__(message)
        self.context = context or {}
        self._handled = False  # Flag to prevent double handling

    def __str__(self) -> str:
        """Return formatted error message with context."""
        base_msg = super().__str__()
        if self.context:
            context_str = "\nError context:"
            for key, value in self.context.items():
                context_str += f"\n  {key}: {value}"
            return base_msg + context_str
        return base_msg

    def add_context(self, **kwargs: Any) -> "PEFTError":
        """
        Add additional context to the error.

        Returns:
            Self for method chaining
        """
        self.context.update(kwargs)
        return self


class ConfigurationError(PEFTError):
    """
    Exception raised for errors in the configuration of PEFT methods.

    This includes invalid parameter values, incompatible configurations,
    or missing required settings.
    """

    def __init__(
        self,
        message: str,
        config: Optional[Any] = None,
        method: Optional[str] = None,
        invalid_params: Optional[List[str]] = None,
    ):
        """
        Initialize configuration error.

        Args:
            message: Error description
            config: The configuration object that caused the error
            method: PEFT method name (e.g., "lora", "adapter")
            invalid_params: List of invalid parameter names
        """
        context = {}
        if method:
            context["method"] = method
        if config is not None:
            context["config_type"] = type(config).__name__
            # Extract relevant config values for debugging
            if hasattr(config, "to_dict"):
                context["config_values"] = config.to_dict()
            elif hasattr(config, "__dict__"):
                context["config_values"] = {
                    k: v for k, v in config.__dict__.items() if not k.startswith("_")
                }
        if invalid_params:
            context["invalid_params"] = invalid_params

        super().__init__(message, context)
        self.config = config
        self.method = method
        self.invalid_params = invalid_params


class AdapterError(PEFTError):
    """
    Exception raised for errors in adapter operations.

    This covers adapter initialization, forward pass, and management errors.
    """

    def __init__(
        self,
        message: str,
        adapter_type: Optional[str] = None,
        task_id: Optional[int] = None,
        operation: Optional[str] = None,
    ):
        """
        Initialize adapter error.

        Args:
            message: Error description
            adapter_type: Type of adapter (e.g., "lora", "adapter")
            task_id: Task ID associated with the adapter
            operation: Operation that failed (e.g., "forward", "init", "switch")
        """
        context = {}
        if adapter_type:
            context["adapter_type"] = adapter_type
        if task_id is not None:
            context["task_id"] = task_id
        if operation:
            context["operation"] = operation

        super().__init__(message, context)
        self.adapter_type = adapter_type
        self.task_id = task_id
        self.operation = operation


class HookError(PEFTError):
    """
    Exception raised for errors in hook operations.

    Base class for hook-related errors including attachment and detachment.
    """

    def __init__(
        self,
        message: str,
        module_type: Optional[Union[str, Type]] = None,
        module_path: Optional[str] = None,
        hook_type: Optional[str] = None,
    ):
        """
        Initialize hook error.

        Args:
            message: Error description
            module_type: Type of the module
            module_path: Path to the module in the model
            hook_type: Type of hook (e.g., "forward", "backward")
        """
        context = {}
        if module_type:
            type_name = (
                module_type if isinstance(module_type, str) else module_type.__name__
            )
            context["module_type"] = type_name
        if module_path:
            context["module_path"] = module_path
        if hook_type:
            context["hook_type"] = hook_type

        super().__init__(message, context)
        self.module_type = module_type
        self.module_path = module_path
        self.hook_type = hook_type


class HookAttachError(HookError):
    """
    Exception raised when a hook cannot be attached to a module.

    Common causes include incompatible module types or existing hooks.
    """

    def __init__(
        self,
        message: str,
        module_type: Optional[Union[str, Type]] = None,
        module_path: Optional[str] = None,
        reason: Optional[str] = None,
    ):
        """
        Initialize hook attach error.

        Args:
            message: Error description
            module_type: Type of the module
            module_path: Path to the module
            reason: Specific reason for attachment failure
        """
        super().__init__(message, module_type, module_path)
        if reason:
            self.context["reason"] = reason
        self.reason = reason


class HookDetachError(HookError):
    """
    Exception raised when a hook cannot be detached from a module.

    This typically occurs when trying to detach a non-existent hook.
    """

    def __init__(
        self,
        message: str,
        module_type: Optional[Union[str, Type]] = None,
        module_path: Optional[str] = None,
        hook_id: Optional[str] = None,
    ):
        """
        Initialize hook detach error.

        Args:
            message: Error description
            module_type: Type of the module
            module_path: Path to the module
            hook_id: Identifier of the hook to detach
        """
        super().__init__(message, module_type, module_path)
        if hook_id:
            self.context["hook_id"] = hook_id
        self.hook_id = hook_id


class ForwardPassError(PEFTError):
    """
    Exception raised during the forward pass of a PEFT adapter.

    This includes shape mismatches, device mismatches, and computation errors.
    """

    def __init__(
        self,
        message: str,
        original_error: Optional[Exception] = None,
        adapter_type: Optional[str] = None,
        input_info: Optional[Dict[str, Any]] = None,
    ):
        """
        Initialize forward pass error.

        Args:
            message: Error description
            original_error: The underlying exception that caused this error
            adapter_type: Type of adapter where error occurred
            input_info: Information about the input that caused the error
        """
        context = {}
        if adapter_type:
            context["adapter_type"] = adapter_type
        if original_error:
            context["original_error"] = (
                f"{type(original_error).__name__}: {str(original_error)}"
            )
        if input_info:
            context.update(input_info)

        super().__init__(message, context)
        self.original_error = original_error
        self.adapter_type = adapter_type
        self.input_info = input_info


class ShapeMismatchError(ForwardPassError):
    """
    Exception raised when tensor shapes are incompatible.

    Provides detailed information about the shapes involved.
    """

    def __init__(
        self,
        message: str,
        expected_shape: Optional[Union[List[int], torch.Size]] = None,
        actual_shape: Optional[Union[List[int], torch.Size]] = None,
        operation: Optional[str] = None,
    ):
        """
        Initialize shape mismatch error.

        Args:
            message: Error description
            expected_shape: Expected tensor shape
            actual_shape: Actual tensor shape
            operation: Operation where mismatch occurred
        """
        input_info = {}
        if expected_shape is not None:
            input_info["expected_shape"] = list(expected_shape)
        if actual_shape is not None:
            input_info["actual_shape"] = list(actual_shape)
        if operation:
            input_info["operation"] = operation

        super().__init__(message, input_info=input_info)
        self.expected_shape = expected_shape
        self.actual_shape = actual_shape
        self.operation = operation


class DeviceMismatchError(ForwardPassError):
    """
    Exception raised when tensors are on different devices.

    Common in multi-GPU setups when tensors aren't properly transferred.
    """

    def __init__(
        self,
        message: str,
        tensor_devices: Optional[Dict[str, str]] = None,
        expected_device: Optional[str] = None,
    ):
        """
        Initialize device mismatch error.

        Args:
            message: Error description
            tensor_devices: Mapping of tensor names to their devices
            expected_device: Expected device for all tensors
        """
        input_info = {}
        if tensor_devices:
            input_info["tensor_devices"] = tensor_devices
        if expected_device:
            input_info["expected_device"] = expected_device

        super().__init__(message, input_info=input_info)
        self.tensor_devices = tensor_devices
        self.expected_device = expected_device


class NumericalInstabilityError(ForwardPassError):
    """
    Exception raised when numerical instability is detected.

    This includes NaN, Inf values, or extreme gradients.
    """

    def __init__(
        self,
        message: str,
        tensor_name: Optional[str] = None,
        issues: Optional[List[str]] = None,
        stats: Optional[Dict[str, float]] = None,
    ):
        """
        Initialize numerical instability error.

        Args:
            message: Error description
            tensor_name: Name of the problematic tensor
            issues: List of detected issues (e.g., ["contains NaN", "max value > 1e6"])
            stats: Statistics about the tensor (min, max, mean, etc.)
        """
        input_info = {}
        if tensor_name:
            input_info["tensor_name"] = tensor_name
        if issues:
            input_info["issues"] = issues
        if stats:
            input_info["tensor_stats"] = stats

        super().__init__(message, input_info=input_info)
        self.tensor_name = tensor_name
        self.issues = issues
        self.stats = stats


class RegistrationError(PEFTError):
    """
    Exception raised when component registration fails.

    This occurs when registering hooks, adapters, or factories.
    """

    def __init__(
        self,
        message: str,
        component_name: Optional[str] = None,
        registry_type: Optional[str] = None,
        existing_names: Optional[List[str]] = None,
    ):
        """
        Initialize registration error.

        Args:
            message: Error description
            component_name: Name of the component being registered
            registry_type: Type of registry (e.g., "hook", "adapter", "factory")
            existing_names: List of already registered names
        """
        context = {}
        if component_name:
            context["component_name"] = component_name
        if registry_type:
            context["registry_type"] = registry_type
        if existing_names:
            context["existing_names"] = existing_names

        super().__init__(message, context)
        self.component_name = component_name
        self.registry_type = registry_type
        self.existing_names = existing_names


# Utility functions for creating informative error messages


def format_shape_error(operation: str, shapes: Dict[str, List[int]]) -> str:
    """
    Format a shape error message with detailed information.

    Args:
        operation: The operation that failed
        shapes: Dictionary mapping tensor names to their shapes

    Returns:
        Formatted error message
    """
    shape_info = ", ".join(f"{name}: {shape}" for name, shape in shapes.items())
    return f"Shape mismatch in {operation}. Tensor shapes: {shape_info}"


def format_device_error(tensors: Dict[str, torch.device]) -> str:
    """
    Format a device error message.

    Args:
        tensors: Dictionary mapping tensor names to their devices

    Returns:
        Formatted error message
    """
    device_info = ", ".join(f"{name}: {device}" for name, device in tensors.items())
    return f"Device mismatch detected. Tensor devices: {device_info}"
