"""
Global exception handler for Parameter-Efficient Fine-Tuning (PEFT).

This module provides decorators and utilities for consistent error handling
across all PEFT methods, improving error reporting and debugging.
"""

import functools
from typing import Any, Callable, Dict, Optional, Type, TypeVar, Union, cast

import torch
import torch.nn as nn

from continuallearning.models.pefts.common.utils.exceptions import (
    ConfigurationError,
    ForwardPassError,
    HookError,
    PEFTError,
)
from continuallearning.utils.logging import get_logger

logger = get_logger(__name__)

F = TypeVar("F", bound=Callable[..., Any])


class ErrorContext:
    """Context information collector for PEFT errors."""

    @staticmethod
    def collect_tensor_info(tensor: torch.Tensor) -> Dict[str, Any]:
        """Collect information about a tensor for error reporting."""
        return {
            "shape": list(tensor.shape),
            "dtype": str(tensor.dtype),
            "device": str(tensor.device),
            "requires_grad": tensor.requires_grad,
            "is_contiguous": tensor.is_contiguous(),
            "min_value": float(tensor.min().item()) if tensor.numel() > 0 else None,
            "max_value": float(tensor.max().item()) if tensor.numel() > 0 else None,
            "has_nan": bool(torch.isnan(tensor).any().item()),
            "has_inf": bool(torch.isinf(tensor).any().item()),
        }

    @staticmethod
    def collect_module_info(module: nn.Module) -> Dict[str, Any]:
        """Collect information about a module for error reporting."""
        info: Dict[str, Any] = {
            "type": type(module).__name__,
            "training": module.training,
        }

        # Add common attributes
        for attr in ["in_features", "out_features", "bias", "padding", "stride"]:
            if hasattr(module, attr):
                value = getattr(module, attr)
                if isinstance(value, torch.Tensor):
                    info[f"{attr}_shape"] = list(value.shape)
                else:
                    info[attr] = value

        # Add weight information if available
        if hasattr(module, "weight") and module.weight is not None:
            info["weight_info"] = ErrorContext.collect_tensor_info(module.weight)

        # Add module path if available
        if hasattr(module, "_module_path"):
            info["module_path"] = module._module_path

        return info

    @staticmethod
    def collect_peft_context(obj: Any) -> Dict[str, Any]:
        """Collect PEFT-specific context information."""
        context = {}

        # Collect hook configuration if available
        if hasattr(obj, "hook_config"):
            config = obj.hook_config
            context["hook_config"] = {
                "type": type(config).__name__,
                "rank": getattr(config, "rank", None),
                "alpha": getattr(config, "alpha", None),
                "dropout": getattr(config, "dropout", None),
                "target_modules": getattr(config, "target_modules", None),
            }

        # Collect module information if available
        if hasattr(obj, "module") and isinstance(obj.module, nn.Module):
            context["module_info"] = ErrorContext.collect_module_info(obj.module)

        # Collect LoRA-specific information
        if hasattr(obj, "lora_A") and hasattr(obj, "lora_B"):
            context["lora_matrices"] = {
                "lora_A": ErrorContext.collect_tensor_info(obj.lora_A),
                "lora_B": ErrorContext.collect_tensor_info(obj.lora_B),
                "scale": getattr(obj, "scale", None),
            }

        return context


def handle_peft_errors(
    error_type: str = "general",
    fallback_behavior: Optional[str] = None,
    collect_context: bool = True,
) -> Callable[[F], F]:
    """
    Decorator to standardize error handling in PEFT code.

    This decorator wraps functions to catch exceptions and convert them to
    appropriate PEFT-specific exception types with detailed context information.

    Args:
        error_type: The type of operation being performed (e.g., "config", "forward", "hook")
        fallback_behavior: What to do on error ("raise", "return_none", "return_input")
        collect_context: Whether to collect detailed context information

    Returns:
        Callable: A decorator function
    """

    def decorator(func: F) -> F:
        @functools.wraps(func)
        def wrapper(*args: Any, **kwargs: Any) -> Any:
            try:
                return func(*args, **kwargs)
            except Exception as e:
                # Skip if already a properly handled PEFT error
                if isinstance(e, PEFTError) and hasattr(e, "_handled"):
                    raise

                # Get function context
                module_name = (
                    func.__module__.split(".")[-1] if func.__module__ else "unknown"
                )
                function_name = func.__name__
                context = f"{module_name}.{function_name}"

                # Collect detailed context if enabled and self is available
                error_context = {}
                if collect_context and args:
                    self_obj = args[0]  # Assume first arg is self
                    try:
                        error_context = ErrorContext.collect_peft_context(self_obj)
                        # Add input tensor info if available
                        if len(args) > 1 and isinstance(args[1], torch.Tensor):
                            error_context["input_info"] = (
                                ErrorContext.collect_tensor_info(args[1])
                            )
                    except Exception as ctx_error:
                        logger.debug(f"Failed to collect error context: {ctx_error}")

                # Create appropriate error based on error_type
                error_class: Type[PEFTError] = PEFTError
                if error_type == "config":
                    error_class = ConfigurationError
                elif error_type == "hook":
                    error_class = HookError
                elif error_type == "forward":
                    error_class = ForwardPassError

                # Format error message with context
                error_message = f"{error_type.title()} error in {context}: {str(e)}"

                # Log detailed error information
                logger.error(
                    error_message,
                    exc_info=True,
                    extra={"error_context": error_context} if error_context else {},
                )

                # Handle fallback behavior
                if fallback_behavior == "return_none":
                    logger.warning(f"Returning None due to error: {error_message}")
                    return None
                elif fallback_behavior == "return_input" and len(args) > 1:
                    logger.warning(
                        f"Returning input unchanged due to error: {error_message}"
                    )
                    return args[1]  # Return the first non-self argument

                # Create and raise the appropriate error
                peft_error = error_class(error_message)
                if isinstance(peft_error, ForwardPassError):
                    peft_error.original_error = e
                peft_error._handled = True  # Mark as handled to avoid re-processing
                raise peft_error from e

        return cast(F, wrapper)

    return decorator


def validate_tensor_operation(
    operation_name: str,
    expected_dims: Optional[int] = None,
    expected_device: Optional[Union[str, torch.device]] = None,
) -> Callable[[F], F]:
    """
    Decorator to validate tensor operations in PEFT methods.

    Args:
        operation_name: Name of the operation for error messages
        expected_dims: Expected number of dimensions for input tensors
        expected_device: Expected device for tensors

    Returns:
        Callable: A decorator function
    """

    def decorator(func: F) -> F:
        @functools.wraps(func)
        def wrapper(self: Any, x: torch.Tensor, *args: Any, **kwargs: Any) -> Any:
            # Validate tensor dimensions
            if expected_dims is not None and x.dim() != expected_dims:
                raise ValueError(
                    f"{operation_name} expects {expected_dims}D tensor, "
                    f"got {x.dim()}D tensor with shape {x.shape}"
                )

            # Validate device
            if expected_device is not None:
                expected = (
                    torch.device(expected_device)
                    if isinstance(expected_device, str)
                    else expected_device
                )
                if x.device.type != expected.type:
                    raise ValueError(
                        f"{operation_name} expects tensor on {expected}, "
                        f"got tensor on {x.device}"
                    )

            # Check for NaN/Inf
            if torch.isnan(x).any() or torch.isinf(x).any():
                logger.warning(
                    f"{operation_name} received tensor with NaN or Inf values"
                )

            return func(self, x, *args, **kwargs)

        return cast(F, wrapper)

    return decorator


def deprecated(
    old_name: str, new_name: str, remove_version: str = "2.0.0"
) -> Callable[[F], F]:
    """
    Decorator to mark functions as deprecated.

    Args:
        old_name: The deprecated API name
        new_name: The new API name to use instead
        remove_version: Version when the deprecated API will be removed

    Returns:
        Callable: A decorator function
    """

    def decorator(func: F) -> F:
        @functools.wraps(func)
        def wrapper(*args: Any, **kwargs: Any) -> Any:
            import warnings

            message = (
                f"'{old_name}' is deprecated and will be removed in "
                f"version {remove_version}. Use '{new_name}' instead."
            )
            warnings.warn(message, DeprecationWarning, stacklevel=2)
            logger.warning(f"DeprecationWarning: {message}")
            return func(*args, **kwargs)

        return cast(F, wrapper)

    return decorator


class PEFTErrorHandler:
    """
    Context manager for handling PEFT errors in a specific scope.

    Example:
        with PEFTErrorHandler("MyOperation") as handler:
            # Perform operations
            result = some_operation()
            handler.check_result(result)
    """

    def __init__(
        self,
        operation_name: str,
        fallback_value: Any = None,
        reraise: bool = True,
    ):
        """
        Initialize the error handler.

        Args:
            operation_name: Name of the operation for logging
            fallback_value: Value to return on error if reraise is False
            reraise: Whether to re-raise exceptions after logging
        """
        self.operation_name = operation_name
        self.fallback_value = fallback_value
        self.reraise = reraise
        self.error_occurred = False
        self.last_error: Optional[Exception] = None

    def __enter__(self) -> "PEFTErrorHandler":
        """Enter the context manager."""
        return self

    def __exit__(
        self,
        exc_type: Optional[Type[Exception]],
        exc_val: Optional[Exception],
        exc_tb: Optional[Any],
    ) -> bool:
        """Handle any exceptions that occurred in the context."""
        if exc_val is not None:
            self.error_occurred = True
            self.last_error = exc_val

            # Log the error with full traceback
            logger.error(
                f"Error in {self.operation_name}: {str(exc_val)}",
                exc_info=True,
            )

            # Return True to suppress the exception if not re-raising
            return not self.reraise

        return False

    def check_result(self, result: Any, error_message: str = "Operation failed") -> Any:
        """
        Check if a result is valid and raise an error if not.

        Args:
            result: The result to check
            error_message: Error message if result is invalid

        Returns:
            The result if valid

        Raises:
            PEFTError: If result is None or invalid
        """
        if result is None:
            raise PEFTError(f"{self.operation_name}: {error_message}")
        return result
