"""
Utility functions and classes for parameter-efficient fine-tuning.

This module provides shared functionality for PEFT methods, including
hook factories, module detection, and configuration management.
"""

import weakref
from typing import Any, Callable, Dict, List, Optional, TypeVar

import torch.nn as nn

from continuallearning.interfaces import HookFactoryInterface, ModuleFinder
from continuallearning.models.pefts.common.config.hook_manager_config import (
    ModuleDiscoveryConfig,
)
from continuallearning.registry import HOOK_FACTORY_REGISTRY

# 替换标准logging库为自定义日志系统
from continuallearning.utils.logging import get_logger

# 使用自定义日志系统创建记录器
logger = get_logger(__name__)

# Type variables for generics
T = TypeVar("T", bound=nn.Module)


class ModuleDetector:
    """
    Helper class for detecting specific types of modules in a model.

    This class provides extensible detection algorithms for various module types,
    reducing code duplication across different hook managers.

    Args:
        model: Model to search for modules
        config: Configuration for module detection
    """

    def __init__(
        self, model: nn.Module, config: Optional[ModuleDiscoveryConfig] = None
    ):
        self._model_ref = weakref.ref(model)
        self.config = config or ModuleDiscoveryConfig()
        self.finder = ModuleFinder(model, cache_results=self.config.cache_results)
        logger.info(f"Initializing ModuleDetector for model: {type(model).__name__}")

    @property
    def model(self) -> nn.Module:
        """
        Get the model instance.

        Returns:
            nn.Module: The model instance if still alive, None otherwise
        """
        if self._model_ref is None:
            raise RuntimeError("Model reference has not been set")

        model = self._model_ref()
        if model is None:
            raise RuntimeError("Model has been garbage collected")
        return model

    def _modules_dict_to_list(
        self, modules_dict: Dict[str, nn.Module]
    ) -> List[Dict[str, Any]]:
        """Convert modules dictionary to standardized list format."""
        return [
            {"path": path, "module": module} for path, module in modules_dict.items()
        ]

    def find_target_modules(self) -> List[Dict[str, Any]]:
        """Find target modules based on discovery configuration."""
        results = []
        discovery = self.config

        logger.debug("Searching for target modules using configured strategies...")

        # Strategy 1: Find by type
        if discovery.target_types:
            logger.debug(
                f"Strategy 1: Finding by type, targets: {[t.__name__ for t in discovery.target_types]}"
            )
            for module_type in discovery.target_types:
                modules = self.finder.find_by_type(module_type)
                logger.debug(
                    f"  - Found {len(modules)} modules of type {module_type.__name__}"
                )
                results.extend(self._modules_dict_to_list(modules))

        # Strategy 2: Find by name pattern
        if discovery.target_patterns:
            logger.debug(
                f"Strategy 2: Finding by name pattern: {discovery.target_patterns}"
            )
            pattern_modules = self.finder.find_by_name_pattern(
                patterns=discovery.target_patterns,
                pattern_type=discovery.pattern_type,
                stop_on_match=discovery.stop_on_match,
            )
            logger.debug(f"  - Found {len(pattern_modules)} modules matching patterns")
            results.extend(self._modules_dict_to_list(pattern_modules))

        # Strategy 3: Find by predicate
        if discovery.target_predicate:
            logger.debug("Strategy 3: Finding by predicate function")
            modules = self.finder.find_by_predicate(
                predicate=discovery.target_predicate,
                include_path_in_predicate=discovery.include_path_in_predicate,
            )
            logger.debug(f"  - Found {len(modules)} modules matching predicate")
            results.extend(self._modules_dict_to_list(modules))

        # Strategy 4-6: Specific layer types
        if discovery.find_transformer_layers:
            logger.debug("Strategy 4: Finding transformer layers")
            transformer_layers = self.find_transformer_layers()
            logger.debug(f"  - Found {len(transformer_layers)} transformer layers")
            results.extend(transformer_layers)

        if discovery.find_linear_layers:
            logger.debug("Strategy 5: Finding linear layers")
            linear_layers = self.find_linear_layers()
            logger.debug(f"  - Found {len(linear_layers)} linear layers")
            results.extend(linear_layers)

        if discovery.find_conv_layers:
            logger.debug("Strategy 6: Finding convolutional layers")
            conv_layers = self.find_conv_layers()
            logger.debug(f"  - Found {len(conv_layers)} convolutional layers")
            results.extend(conv_layers)

        # Remove duplicates based on module path
        unique_modules = {}
        for module_dict in results:
            path = module_dict["path"]
            if path not in unique_modules:
                unique_modules[path] = module_dict

        results = list(unique_modules.values())

        # Log deduplication info if duplicates were found
        initial_count = len(
            [module_dict for module_dict in results]
        )  # Count before deduplication
        if initial_count > len(unique_modules):
            logger.debug(
                f"Removed {initial_count - len(unique_modules)} duplicate modules"
            )

        # Apply additional filters
        if discovery.filter_predicate and results:
            logger.debug("Applying additional filters...")
            initial_count = len(results)
            results = self.filter_modules(results, discovery.filter_predicate)
            logger.debug(
                f"After filtering: kept {len(results)} modules, "
                f"excluded {initial_count - len(results)}"
            )

        logger.info(f"Total unique target modules found: {len(results)}")
        return results

    def filter_modules(
        self, modules: List[Dict[str, Any]], predicate: Callable[[nn.Module], bool]
    ) -> List[Dict[str, Any]]:
        """
        Filter modules based on a predicate.

        Args:
            modules: List of module dictionaries to filter
            predicate: Function that takes a module and returns whether to include it

        Returns:
            List[Dict[str, Any]]: Filtered list of modules
        """
        return [
            module_dict for module_dict in modules if predicate(module_dict["module"])
        ]

    def find_transformer_layers(self) -> List[Dict[str, Any]]:
        """Find transformer layers in the model."""
        name_patterns = self.config.transformer_name_patterns
        attr_patterns = self.config.transformer_attribute_patterns
        assert name_patterns is not None and attr_patterns is not None

        logger.debug(
            f"Searching for transformer layers using name patterns: {name_patterns}"
        )

        # Find modules matching the name patterns
        modules_by_name = self.finder.find_by_name_pattern(
            patterns=name_patterns,
            pattern_type=self.config.pattern_type,
            stop_on_match=self.config.stop_on_match,
        )
        logger.debug(f"Found {len(modules_by_name)} modules by name pattern")

        # Define predicate for attribute-based detection
        def has_transformer_attrs(module, path=None):
            return any(hasattr(module, attr) for attr in attr_patterns)

        # Find modules matching the attribute patterns
        modules_by_attr = self.finder.find_by_predicate(
            predicate=has_transformer_attrs,
            include_path_in_predicate=self.config.include_path_in_predicate,
        )
        logger.debug(f"Found {len(modules_by_attr)} modules by attribute pattern")

        # Combine and sort results
        all_modules = {**modules_by_name, **modules_by_attr}
        sorted_paths = sorted(all_modules.keys())
        logger.info(f"Found {len(sorted_paths)} transformer layers ready for hooks")

        # Return as list with layer indices
        result = []
        for idx, path in enumerate(sorted_paths):
            result.append({"path": path, "module": all_modules[path], "index": idx})
            logger.debug(
                f"Hookable transformer layer: [{idx}] {path} "
                f"({type(all_modules[path]).__name__})"
            )

        return result

    def find_linear_layers(self) -> List[Dict[str, Any]]:
        """Find linear layers in the model."""
        logger.debug("Searching for linear (fully connected) layers")
        modules = self.finder.find_by_type(nn.Linear)
        logger.debug(f"Found {len(modules)} modules of type Linear")

        # Find additional modules with linear features
        def has_linear_features(module, path=None):
            return (
                hasattr(module, "in_features")
                and hasattr(module, "out_features")
                and hasattr(module, "weight")
                and not isinstance(
                    module,
                    (
                        nn.Conv1d,
                        nn.Conv2d,
                        nn.Conv3d,
                        nn.ConvTranspose1d,
                        nn.ConvTranspose2d,
                        nn.ConvTranspose3d,
                    ),
                )
            )

        additional_modules = self.finder.find_by_predicate(
            predicate=has_linear_features,
            include_path_in_predicate=self.config.include_path_in_predicate,
        )
        logger.debug(f"Found {len(additional_modules)} additional linear modules")
        modules.update(additional_modules)

        result = [{"path": path, "module": module} for path, module in modules.items()]
        logger.info(f"Found {len(result)} linear layers in total")
        return result

    def find_conv_layers(self) -> List[Dict[str, Any]]:
        """Find convolutional layers in the model."""
        conv_types = (
            nn.Conv1d,
            nn.Conv2d,
            nn.Conv3d,
            nn.ConvTranspose1d,
            nn.ConvTranspose2d,
            nn.ConvTranspose3d,
        )

        logger.debug(
            f"Searching for convolutional layers: {[t.__name__ for t in conv_types]}"
        )
        modules = {}
        for module_type in conv_types:
            type_modules = self.finder.find_by_type(module_type)
            logger.debug(
                f"Found {len(type_modules)} modules of type {module_type.__name__}"
            )
            modules.update(type_modules)

        # Find additional modules with conv features
        def has_conv_features(module, path=None):
            return (
                hasattr(module, "kernel_size")
                and hasattr(module, "stride")
                and hasattr(module, "padding")
                and hasattr(module, "weight")
                and not isinstance(module, conv_types)  # Avoid duplicates
            )

        additional_modules = self.finder.find_by_predicate(
            predicate=has_conv_features,
            include_path_in_predicate=self.config.include_path_in_predicate,
        )
        logger.debug(f"Found {len(additional_modules)} additional conv modules")
        modules.update(additional_modules)

        result = [{"path": path, "module": module} for path, module in modules.items()]
        logger.info(f"Found {len(result)} convolutional layers in total")
        return result


# Utility function to check if a module is a linear module
def is_linear_module(module: nn.Module) -> bool:
    """
    Check if a module is a linear (fully connected) layer.

    A module is considered a linear layer if it has a weight attribute and either:
    1. Is an instance of nn.Linear, or
    2. Has in_features and out_features attributes typical of linear layers

    Args:
        module: Module to check

    Returns:
        bool: True if the module is a linear layer
    """
    if not hasattr(module, "weight"):
        return False

    # Check if it's an instance of nn.Linear
    if isinstance(module, nn.Linear):
        return True

    # Check for attributes specific to linear layers
    return hasattr(module, "in_features") and hasattr(module, "out_features")


# Utility function to check if a module is a convolutional module
def is_conv_module(module: nn.Module) -> bool:
    """
    Check if a module is a convolutional layer.

    A module is considered a convolutional layer if:
    1. Is an instance of common Conv types, or
    2. Has typical Conv attributes (kernel_size, stride, etc.)

    Args:
        module: Module to check

    Returns:
        bool: True if the module is a convolutional layer
    """
    if not hasattr(module, "weight"):
        return False

    # Check for common convolutional types
    conv_types = (
        nn.Conv1d,
        nn.Conv2d,
        nn.Conv3d,
        nn.ConvTranspose1d,
        nn.ConvTranspose2d,
        nn.ConvTranspose3d,
    )

    if isinstance(module, conv_types):
        return True

    # Check for attributes specific to convolutional layers
    return (
        hasattr(module, "kernel_size")
        and hasattr(module, "stride")
        and hasattr(module, "padding")
    )


# Simplified factory creation helper function that directly uses the registry
def create_hook_factory(hook_name: str, **config) -> HookFactoryInterface:
    """
    Get and instantiate a hook factory from the registry.

    This is a simplified version that directly uses the standard registry system,
    eliminating unnecessary indirection.

    Args:
        hook_type: Type of hook factory to create
        **config: Configuration for the factory

    Returns:
        HookFactoryProtocol: Factory function or object that creates hooks

    Raises:
        ValueError: If hook_type is not registered
    """
    try:
        # Get the factory class from the standard registry
        factory_cls = HOOK_FACTORY_REGISTRY.get(hook_name)
        if factory_cls is None:
            available_factories = list(HOOK_FACTORY_REGISTRY.list_registered())
            raise ValueError(
                f"Hook factory type '{hook_name}' not registered. "
                f"Available factories: {available_factories}"
            )

        config.update({"hook_name": hook_name})
        return factory_cls(**config)
    except Exception as e:
        logger.error(f"Error creating hook factory for {hook_name}: {e}")
        raise ValueError(
            f"Failed to create hook factory for {hook_name}: {str(e)}"
        ) from e
