"""
Interface definitions for hook-based parameter-efficient fine-tuning.

This module defines interfaces for hook-based adaptation methods used in
parameter-efficient fine-tuning, ensuring consistent behavior across different
hook implementations.
"""

from abc import abstractmethod
import re
import fnmatch

# from typing import Any, Callable, Dict, Generic, List, TypeVar, cast

import torch.nn as nn

from continuallearning.interfaces.core.component import ComponentInterface
from continuallearning.interfaces.core.task_related_interface import (
    TaskAwareInterface,
    TaskIrrelevantInterface,
)
from typing import (
    Any,
    Dict,
    List,
    Tuple,
    Union,
    TypeVar,
    Generic,
    Callable,
    cast,
    Optional,
)
import torch


class HookInterface(ComponentInterface, TaskIrrelevantInterface, nn.Module):
    """
    Interface for individual hooks applied to specific modules.

    Hooks can modify the behavior of model layers by intercepting inputs/outputs
    or replacing the forward method entirely.
    """

    def __init__(self, *args, **kwargs):
        super().__init__()

    @property
    @abstractmethod
    def is_active(self) -> bool:
        """Check if this hook is currently active."""
        pass

    @property
    @abstractmethod
    def module(self) -> nn.Module:
        """Get the module this hook is attached to."""
        pass

    @property
    @abstractmethod
    def hook_type(self) -> Any:
        """Get the type of this hook."""
        pass

    @property
    @abstractmethod
    def is_attached(self) -> bool:
        """Check if this hook is currently attached."""
        pass

    @abstractmethod
    def attach(self) -> Any:
        """
        Attach this hook to its target module.

        Returns:
            Any: Implementation-specific hook handle
        """
        pass

    @abstractmethod
    def detach(self) -> None:
        """Remove this hook from its target module."""
        pass

    # Hook implementation methods
    # def forward(self, *args, **kwargs) -> Any:
    #     """
    #     Forward operation specific to hook parameters

    #     Returns:
    #         Any: Implementation-specific hook handle
    #     """
    #     raise NotImplementedError("Implement only when necessary")

    def _forward(self, *args, **kwargs) -> Any:
        """
        Forward operation specific to hook parameters

        Returns:
            Any: Implementation-specific hook handle
        """
        raise NotImplementedError("Implement only when necessary")

    def _hook_forward(self, *args, **kwargs) -> torch.Tensor:
        """
        Replacement forward method.

        Subclasses must implement this for FORWARD_REPLACE hooks.
        The method should return the same type as the original forward method
        of the module being hooked, typically torch.Tensor for neural network layers.

        Args:
            *args, **kwargs: Arguments for the module's forward method

        Returns:
            torch.Tensor: Output tensor
        """
        raise NotImplementedError(
            "Implement this method for FORWARD_REPLACE hooks to define custom forward behavior"
        )

    def _pre_forward_hook(
        self, module: nn.Module, inputs: Tuple[torch.Tensor]
    ) -> Union[None, Tuple[torch.Tensor]]:
        """
        Hook that runs before module forward.

        Subclasses should override this for FORWARD_PRE hooks.

        Args:
            module: The module being hooked
            inputs: Input tensors to the module

        Returns:
            Union[None, Tuple[torch.Tensor]]: Modified inputs or None
        """
        # Default implementation passes through inputs unchanged
        return None

    def _post_forward_hook(
        self, module: nn.Module, inputs: Tuple[torch.Tensor], output: torch.Tensor
    ) -> torch.Tensor:
        """
        Hook that runs after module forward.

        Subclasses should override this for FORWARD_POST hooks.

        Args:
            module: The module being hooked
            inputs: Input tensors to the module
            output: Output from the module

        Returns:
            torch.Tensor: Modified output
        """
        # Default implementation passes through output unchanged
        return output

    def _pre_backward_hook(
        self, module: nn.Module, grad_output: Tuple[torch.Tensor]
    ) -> Union[None, Tuple[torch.Tensor]]:
        """
        Hook that runs before module backward pass.

        Subclasses should override this for BACKWARD_PRE hooks.

        Args:
            module: The module being hooked
            grad_output: Incoming gradients

        Returns:
            Union[None, Tuple[torch.Tensor]]: Modified gradients or None
        """
        # Default implementation passes through gradients unchanged
        return None

    def _post_backward_hook(
        self,
        module: nn.Module,
        grad_input: Union[torch.Tensor, Tuple[torch.Tensor, ...], None],
        grad_output: Union[torch.Tensor, Tuple[torch.Tensor, ...], None],
    ) -> Union[None, torch.Tensor, Tuple[torch.Tensor, ...]]:
        """
        Hook that runs after module backward pass.

        Subclasses should override this for BACKWARD_POST hooks.

        Args:
            module: The module being hooked
            grad_input: Gradients going to module inputs
            grad_output: Gradients coming from module outputs

        Returns:
            Union[None, torch.Tensor, Tuple[torch.Tensor, ...]]: Modified input gradients or None
        """
        # Default implementation passes through gradients unchanged
        return None


class TaskSpecificHookInterface(HookInterface):
    """Interface for hooks specific to a single task."""

    _current_task: int = -1

    @property
    @abstractmethod
    def task_id(self) -> int:
        """Get the task ID this hook is associated with."""
        pass


class TaskAwareHookInterface(HookInterface, TaskAwareInterface, nn.Module):
    """Interface for hooks that are aware of multiple tasks."""

    _current_task: int = -1


T = TypeVar("T", bound=nn.Module)


# FIXME need further tests
class ModuleFinder(Generic[T]):
    """
    Helper class to find specific modules in a model.

    This class provides utility methods to find modules matching specific criteria,
    which is useful for hook managers that need to attach hooks to specific layers.

    Args:
        model: The model to search
    """

    def __init__(self, model: nn.Module, cache_results: bool = True):
        if not isinstance(model, nn.Module):
            raise TypeError(f"Expected nn.Module but got {type(model).__name__}")
        self.model = model
        self.cache_results = cache_results
        self._module_cache: Dict[str, Dict[str, T]] = {}
        self._path_cache: Optional[Dict[str, nn.Module]] = None

    def _get_all_modules(self) -> Dict[str, nn.Module]:
        """
        Get all modules with their paths, using cache if enabled.

        Returns:
            Dict[str, nn.Module]: Mapping of module paths to modules
        """
        if self.cache_results and self._path_cache is not None:
            return self._path_cache

        modules = {}

        def _collect_modules(module: nn.Module, path: str = "") -> None:
            for name, child in module.named_children():
                current_path = f"{path}.{name}" if path else name
                modules[current_path] = child
                _collect_modules(child, current_path)

        try:
            _collect_modules(self.model)
        except Exception as e:
            raise RuntimeError(f"Error collecting modules: {e}") from e

        if self.cache_results:
            self._path_cache = modules

        return modules

    def clear_cache(self) -> None:
        """Clear internal caches."""
        self._module_cache.clear()
        self._path_cache = None

    def find_by_type(self, module_type: type[T]) -> Dict[str, T]:
        """
        Find all modules of a specific type.

        Args:
            module_type: Type of modules to find

        Returns:
            Dict[str, T]: Dictionary mapping module paths to modules

        Raises:
            TypeError: If module_type is not a valid type
        """
        if not isinstance(module_type, type):
            raise TypeError(f"Expected a type but got {type(module_type).__name__}")

        cache_key = f"type_{module_type.__name__}"
        if self.cache_results and cache_key in self._module_cache:
            return self._module_cache[cache_key]

        all_modules = self._get_all_modules()
        results = {
            path: cast(T, module)
            for path, module in all_modules.items()
            if isinstance(module, module_type)
        }

        if self.cache_results:
            self._module_cache[cache_key] = results

        return results

    def _matches_pattern(
        self, path: str, pattern: str, pattern_type: str = "auto"
    ) -> bool:
        """
        Check if path matches the given pattern with multiple pattern type support.

        Args:
            path: Module path to check
            pattern: Pattern to match against
            pattern_type: Type of pattern ("auto", "glob", "regex", "exact")

        Returns:
            bool: True if path matches the pattern

        Raises:
            ValueError: If pattern_type is invalid or regex pattern is malformed
        """
        if not path or not pattern:
            return False

        if pattern_type == "exact":
            return pattern == path

        if pattern_type == "auto":
            # Auto-detect pattern type
            regex_metacharacters = set(r".^$+?{}[]|()\\") - {"*", "?"}
            if any(char in pattern for char in regex_metacharacters):
                pattern_type = "regex"
            elif "*" in pattern or "?" in pattern:
                pattern_type = "glob"
            else:
                pattern_type = "exact"

        if pattern_type == "regex":
            try:
                return bool(re.search(pattern, path))
            except re.error as e:
                raise ValueError(f"Invalid regex pattern '{pattern}': {e}") from e

        elif pattern_type == "glob":
            return fnmatch.fnmatch(path, pattern)

        elif pattern_type == "exact":
            return pattern in path or pattern == path

        else:
            raise ValueError(
                f"Invalid pattern_type: {pattern_type}. Use 'auto', 'glob', 'regex', or 'exact'"
            )

    def find_by_name_pattern(
        self,
        patterns: List[str],
        pattern_type: str = "auto",
        stop_on_match: bool = True,
    ) -> Dict[str, T]:
        """
        Find modules by name patterns with enhanced pattern support.

        Args:
            patterns: List of patterns to match
            pattern_type: Type of patterns ("auto", "glob", "regex", "exact")
            stop_on_match: Whether to stop recursing when a pattern matches

        Returns:
            Dict[str, T]: Dictionary mapping module paths to modules

        Raises:
            TypeError: If patterns is not a list of strings
            ValueError: If patterns is empty or pattern_type is invalid
        """
        if not isinstance(patterns, list):
            raise TypeError(
                f"Expected list of patterns but got {type(patterns).__name__}"
            )

        if not all(isinstance(p, str) for p in patterns):
            raise TypeError("All patterns must be strings")

        if not patterns:
            raise ValueError("Pattern list cannot be empty")

        cache_key = f"pattern_{hash(tuple(patterns))}_{pattern_type}_{stop_on_match}"
        if self.cache_results and cache_key in self._module_cache:
            return self._module_cache[cache_key]

        results: Dict[str, T] = {}

        if stop_on_match:
            # Use recursive search with early stopping
            self._recursive_pattern_search(self.model, patterns, pattern_type, results)
        else:
            # Search all modules without early stopping
            all_modules = self._get_all_modules()
            for path, module in all_modules.items():
                for pattern in patterns:
                    if self._matches_pattern(path, pattern, pattern_type):
                        results[path] = cast(T, module)
                        break

        if self.cache_results:
            self._module_cache[cache_key] = results

        return results

    def _recursive_pattern_search(
        self,
        module: nn.Module,
        patterns: List[str],
        pattern_type: str,
        results: Dict[str, T],
        path: str = "",
    ) -> None:
        """
        Recursively search modules with early stopping on pattern match.

        Args:
            module: Current module to search
            patterns: List of patterns to match
            pattern_type: Type of patterns
            results: Dictionary to store results
            path: Current module path
        """
        for name, child in module.named_children():
            current_path = f"{path}.{name}" if path else name

            # Check if this module matches any pattern
            matched = False
            for pattern in patterns:
                if self._matches_pattern(current_path, pattern, pattern_type):
                    results[current_path] = cast(T, child)
                    matched = True
                    break

            # Only recursively search if no match found (early stopping)
            if not matched:
                self._recursive_pattern_search(
                    child, patterns, pattern_type, results, current_path
                )

    def find_by_predicate(
        self,
        predicate: Callable[..., bool],
        include_path_in_predicate: bool = False,
    ) -> Dict[str, T]:
        """
        Find modules that satisfy a predicate function with enhanced options.

        Args:
            predicate: Function that takes a module (and optionally path) and returns True for matches
            include_path_in_predicate: Whether to pass module path to predicate

        Returns:
            Dict[str, T]: Dictionary mapping module paths to modules

        Raises:
            TypeError: If predicate is not callable
        """
        if not callable(predicate):
            raise TypeError(
                f"Expected callable predicate but got {type(predicate).__name__}"
            )

        cache_key = f"predicate_{id(predicate)}_{include_path_in_predicate}"
        if self.cache_results and cache_key in self._module_cache:
            return self._module_cache[cache_key]

        all_modules = self._get_all_modules()
        results: Dict[str, T] = {}

        for path, module in all_modules.items():
            try:
                if include_path_in_predicate:
                    # Support predicates that need both module and path
                    if predicate(module, path):
                        results[path] = cast(T, module)
                else:
                    if predicate(module):
                        results[path] = cast(T, module)
            except Exception as e:
                raise RuntimeError(
                    f"Error calling predicate on module {path}: {e}"
                ) from e

        if self.cache_results:
            self._module_cache[cache_key] = results

        return results

    def get_module_info(self) -> Dict[str, Dict[str, Any]]:
        """
        Get comprehensive information about all modules.

        Returns:
            Dict[str, Dict[str, Any]]: Module information including type, parameters, etc.
        """
        all_modules = self._get_all_modules()
        info = {}

        for path, module in all_modules.items():
            info[path] = {
                "type": type(module).__name__,
                "num_parameters": sum(p.numel() for p in module.parameters()),
                "num_children": len(list(module.children())),
                "depth": path.count("."),
                "trainable_parameters": sum(
                    p.numel() for p in module.parameters() if p.requires_grad
                ),
            }

        return info
