"""
Base hook implementation and related utilities for parameter-efficient fine-tuning.

This module defines the foundation for all hook-based PEFT methods, including
the base hook class and hook type enumeration.
"""

from abc import abstractmethod

import functools
from typing import (
    Any,
    Dict,
    List,
    Optional,
    Tuple,
    Union,
    TypeVar,
    Generic,
    Callable,
    cast,
)
import torch
import torch.nn as nn

from continuallearning.interfaces import (
    HookType,
    HookState,
    TaskAwareHookInterface,
    HookInterface,
    RouterInterface,
    CombinerInterface,
    HookFactoryInterface,
)
import weakref

# 从异常模块导入异常类
from continuallearning.models.pefts.common.utils.exceptions import (
    HookAttachError,
    HookDetachError,
)
from continuallearning.models.pefts.common.config.hook_manager_config import (
    RouterConfig,
    CombinerConfig,
)
from continuallearning.models.pefts.common.config.config_factory import (
    create_peft_config,
)
from continuallearning.models.pefts.common.context import (
    get_current_task_ids,
)
from continuallearning.models.pefts.common.config.config import BasePEFTConfig
from continuallearning.registry import HOOK_REGISTRY, HOOK_FACTORY_REGISTRY

# 替换标准logging库为自定义日志系统
from continuallearning.utils.logging import get_logger
from continuallearning.registry import ROUTER_REGISTRY, COMBINER_REGISTRY

# 使用自定义日志系统创建记录器
logger = get_logger(__name__)

# Type variable for generics
T = TypeVar("T", bound=nn.Module)
BHook = TypeVar("BHook", bound="HookInterface")
THook = TypeVar("THook", bound="TaskAwareHookInterface")
HConfig = TypeVar("HConfig", bound="BasePEFTConfig")


class HookBehaviour:
    """ """

    _module_ref: weakref.ref[nn.Module]
    _hook_type: HookType = HookType.FORWARD_REPLACE
    _hook_handle: Optional[torch.utils.hooks.RemovableHandle] = None
    _original_forward: Optional[Callable] = None
    _state: HookState = HookState.INITIALIZED

    @property
    def _module(self) -> nn.Module:
        """Get the module this hook is attached to."""
        module = self._module_ref()
        if module is None:
            raise RuntimeError("Referenced module has been garbage collected")
        return module

    @property
    def is_active(self) -> bool:
        """Check if this hook is currently active."""
        return self._state == HookState.ACTIVE

    @property
    def is_attached(self) -> bool:
        """Check if this hook is currently attached."""
        return self._state == HookState.ACTIVE

    @property
    def module(self) -> nn.Module:
        """Get the module this hook is attached to."""
        return self._module

    @property
    def hook_type(self) -> HookType:
        """Get the type of this hook."""
        return self._hook_type

    def attach(self) -> Any:
        """
        Attach this hook to its target module.

        Depending on the hook type, this will register pre/post hooks
        or replace the module's forward method.

        Returns:
            Any: Handle to the attached hook, or self for FORWARD_REPLACE

        Raises:
            HookAttachError: If the hook cannot be attached
        """
        if self._state == HookState.ACTIVE:
            # Already attached
            logger.debug("Hook already attached, skipping")
            return self._hook_handle or self

        try:
            module_name = self._module.__class__.__name__
            module_id = id(self._module)
            # 从INFO降级到DEBUG - 详细操作过程应该在DEBUG级别
            logger.debug(
                f"Attaching {self._hook_type.name} hook to {module_name} module [id={module_id}]"
            )

            # Apply the appropriate hook type
            if self._hook_type == HookType.FORWARD_PRE:
                # 保留DEBUG级别 - 开始操作的详细信息
                logger.debug(f"Registering forward pre-hook to {module_name}...")
                self._hook_handle = self._module.register_forward_pre_hook(
                    self._pre_forward_hook
                )
                # 从INFO降级到DEBUG - 详细的成功操作信息
                logger.debug(
                    f"Successfully registered forward pre-hook to {module_name}"
                )

            elif self._hook_type == HookType.FORWARD_POST:
                logger.debug(f"Registering forward post-hook to {module_name}...")
                self._hook_handle = self._module.register_forward_hook(
                    self._post_forward_hook
                )
                # 从INFO降级到DEBUG - 详细的成功操作信息
                logger.debug(
                    f"Successfully registered forward post-hook to {module_name}"
                )

            elif self._hook_type == HookType.FORWARD_REPLACE:
                # Save the original forward method
                if hasattr(self._module, "forward"):
                    self._original_forward = self._module.forward

                    # Replace the forward method with our hooked version
                    @functools.wraps(self._module.forward)
                    def hooked_forward(*args, **kwargs):
                        return self._hook_forward(*args, **kwargs)

                    self._module.forward = hooked_forward
                    # 从INFO降级到DEBUG - 详细的操作成功信息
                    logger.debug(
                        f"Successfully replaced {module_name}'s forward method"
                    )
                else:
                    error_msg = f"Module {module_name} has no forward method"
                    # 保留ERROR级别 - 这是真正的错误情况
                    logger.error(error_msg)
                    raise HookAttachError(error_msg)

            elif self._hook_type == HookType.BACKWARD_PRE:
                logger.debug(f"Registering backward pre-hook to {module_name}...")
                self._hook_handle = self._module.register_backward_pre_hook(
                    self._pre_backward_hook
                )
                # 从INFO降级到DEBUG - 详细的成功操作信息
                logger.debug(
                    f"Successfully registered backward pre-hook to {module_name}"
                )

            elif self._hook_type == HookType.BACKWARD_POST:
                logger.debug(f"Registering backward post-hook to {module_name}...")
                self._hook_handle = self._module.register_full_backward_hook(
                    self._post_backward_hook
                )
                # 从INFO降级到DEBUG - 详细的成功操作信息
                logger.debug(
                    f"Successfully registered backward post-hook to {module_name}"
                )

            else:
                raise ValueError(f"Unsupported hook type: {self._hook_type}")

            # Mark as active
            self._state = HookState.ACTIVE

            # Return handle based on hook type
            return self._hook_handle or self
        except Exception as e:
            logger.error(
                f"Error attaching hook to {self._module.__class__.__name__}: {e}"
            )
            # Ensure we're in a clean state
            self._state = HookState.INITIALIZED
            if isinstance(e, HookAttachError):
                raise
            raise HookAttachError(
                f"Cannot attach hook to {self._module.__class__.__name__}: {str(e)}"
            ) from e

    def detach(self) -> None:
        """
        Remove this hook from its target module.

        For FORWARD_REPLACE hooks, this restores the original forward method.
        For other hook types, this removes the registered hook.
        """
        if self._state != HookState.ACTIVE:
            # warning
            logger.warning(
                f"Attempting to detach inactive hook: {self._module.__class__.__name__}"
            )
            return

        try:
            if self._hook_type == HookType.FORWARD_REPLACE:
                # Restore the original forward method
                if hasattr(self, "_original_forward") and self._original_forward:
                    self._module.forward = self._original_forward
                    self._original_forward = None
                else:
                    logger.error(
                        f"Original forward method not found for module: {self._module.__class__.__name__}"
                    )

            elif self._hook_handle is not None:
                # Remove registered hook
                self._hook_handle.remove()
                self._hook_handle = None
            else:
                logger.warning(f"No hook handle found for {self._hook_type} hook")

            # Mark as detached
            self._state = HookState.DETACHED

        except Exception as e:
            logger.error(f"Error while detaching hook: {e}")
            # Even if detaching fails, mark the hook as detached to avoid inconsistent state
            self._state = HookState.DETACHED
            raise HookDetachError(f"Failed to detach hook: {str(e)}") from e

    # Hook implementation methods
    def _forward(self, *args, **kwargs) -> Any:
        """
        Forward operation specific to hook parameters

        Returns:
            Any: Implementation-specific hook handle
        """
        raise NotImplementedError("Implement only when necessary")

    def _hook_forward(self, *args, **kwargs) -> torch.Tensor:
        """
        Replacement forward method.

        Subclasses must implement this for FORWARD_REPLACE hooks.
        The method should return the same type as the original forward method
        of the module being hooked, typically torch.Tensor for neural network layers.

        Args:
            *args, **kwargs: Arguments for the module's forward method

        Returns:
            torch.Tensor: Output tensor
        """
        raise NotImplementedError(
            "Implement this method for FORWARD_REPLACE hooks to define custom forward behavior"
        )

    def _pre_forward_hook(
        self, module: nn.Module, inputs: Tuple[torch.Tensor]
    ) -> Union[None, Tuple[torch.Tensor]]:
        """
        Hook that runs before module forward.

        Subclasses should override this for FORWARD_PRE hooks.

        Args:
            module: The module being hooked
            inputs: Input tensors to the module

        Returns:
            Union[None, Tuple[torch.Tensor]]: Modified inputs or None
        """
        # Default implementation passes through inputs unchanged
        return None

    def _post_forward_hook(
        self, module: nn.Module, inputs: Tuple[torch.Tensor, ...], output: torch.Tensor
    ) -> torch.Tensor:
        """
        Hook that runs after module forward.

        Subclasses should override this for FORWARD_POST hooks.

        Args:
            module: The module being hooked
            inputs: Input tensors to the module
            output: Output from the module

        Returns:
            torch.Tensor: Modified output
        """
        # Default implementation passes through output unchanged
        return output

    def _pre_backward_hook(
        self, module: nn.Module, grad_output: Tuple[torch.Tensor]
    ) -> Union[None, Tuple[torch.Tensor]]:
        """
        Hook that runs before module backward pass.

        Subclasses should override this for BACKWARD_PRE hooks.

        Args:
            module: The module being hooked
            grad_output: Incoming gradients

        Returns:
            Union[None, Tuple[torch.Tensor]]: Modified gradients or None
        """
        # Default implementation passes through gradients unchanged
        return None

    def _post_backward_hook(
        self,
        module: nn.Module,
        grad_input: Union[torch.Tensor, Tuple[torch.Tensor, ...], None],
        grad_output: Union[torch.Tensor, Tuple[torch.Tensor, ...], None],
    ) -> Union[None, torch.Tensor, Tuple[torch.Tensor, ...]]:
        """
        Hook that runs after module backward pass.

        Subclasses should override this for BACKWARD_POST hooks.

        Args:
            module: The module being hooked
            grad_input: Gradients going to module inputs
            grad_output: Gradients coming from module outputs

        Returns:
            Union[None, torch.Tensor, Tuple[torch.Tensor, ...]]: Modified input gradients or None
        """
        # Default implementation passes through gradients unchanged
        return None


class BaseHook(
    Generic[HConfig],
    HookBehaviour,
    HookInterface,
):
    """
    Base class for hooks that modify module behavior.

    This class provides the foundation for implementing various hook types,
    with standardized methods for attaching, detaching, and managing hook state.

    Args:
        module: Module to hook
        hook_type: Type of hook to apply
    """

    _hook_type: HookType = HookType.FORWARD_REPLACE

    def __init__(
        self,
        *,
        module: nn.Module,
        hook_config: HConfig,
        **kwargs,
    ):
        super().__init__(**kwargs)
        self.hook_config = hook_config
        self._module_ref = weakref.ref(module)

    def forward(self, *args, **kwargs) -> torch.Tensor:
        return self._forward(*args, **kwargs)

    def _hook_forward(self, *args, **kwargs) -> torch.Tensor:
        """
        Forward method to process input through the hook.

        This method should be overridden by subclasses to implement
        specific forward behavior for the hook.

        Args:
            *args: Positional arguments for the forward pass
            **kwargs: Keyword arguments for the forward pass

        Returns:
            torch.Tensor: Output tensor after processing
        """
        raise NotImplementedError(
            "Implement this method to define custom forward behavior"
        )

    def get_parameters(self) -> List[nn.Parameter]:
        """
        Get trainable parameters for this hook.

        Default implementation returns an empty list. Override in subclasses
        that have trainable parameters.

        Returns:
            List[nn.Parameter]: List of trainable parameters
        """
        params = []
        # exclude the parameters of the module itself
        for _, param in self.named_parameters():
            params.append(param)
        return params

    def get_trainable_parameters(self) -> List[nn.Parameter]:
        """
        Get trainable parameters managed by this hook.

        This method is a wrapper around get_parameters to maintain
        compatibility with the TaskAwareHookManagerInterface.

        Returns:
            List[nn.Parameter]: List of trainable parameters
        """
        params = []
        for _, param in self.named_parameters():
            if param.requires_grad:
                params.append(param)
        return params


@HOOK_REGISTRY.register("base")
class BaseTaskAwareHook(Generic[BHook], HookBehaviour, TaskAwareHookInterface):
    """
    Task-aware decorator built on BaseHook that implements task-related interfaces.

    This class extends BaseHook to provide task-specific functionality,
    enabling hooks to manage multiple tasks with distinct configurations.
    It serves as an adapter between the basic hook mechanism and
    task-aware adaptation requirements.

    Args:
        module: Module to hook
        hook_type: Type of hook to apply
        config: Optional configuration for the hook
        hook_name: Optional name identifier for the hook
    """

    _hook_func: Callable[..., BHook]

    def __init__(
        self,
        *,
        router: RouterInterface,
        combiner: CombinerInterface,
        module: nn.Module,
        hook_config: Optional[BasePEFTConfig] = None,
        **kwargs,
    ):
        super().__init__(**kwargs)

        self.hook_config: BasePEFTConfig | None = hook_config
        self._module_ref = weakref.ref(module)

        self._current_task = -1
        self._hooks: nn.ModuleDict = nn.ModuleDict()
        self.router = router
        self.combiner = combiner

    def _create_hook(
        self,
    ) -> BHook:
        """Create hook instance for specific task."""
        return self._hook_func(
            module=self._module,
            hook_config=self.hook_config,
        )

    def _hook_forward(self, x, *args, **kwargs) -> torch.Tensor:
        """
        Replacement forward method that intercepts the original forward call.

        This method checks for task_ids in the context and passes them to
        the forward method if available.
        """
        # Get task_ids from context if available
        context_task_ids = get_current_task_ids()
        # Call our forward method with task_ids from context
        return self.forward(x, task_ids=context_task_ids, *args, **kwargs)

    def forward(
        self,
        x: torch.Tensor,
        task_ids: Optional[List[int]] = None,
        *args,
        **kwargs,
    ) -> torch.Tensor:
        """
        Forward method to process input through the hook.

        This method supports flexible task selection:
        - If task_ids is None: process with all registered tasks
        - If task_ids is an int: process with single task
        - If task_ids is a list: process with multiple specified tasks

        Args:
            x: Input tensor
            task_ids: Optional task identifier(s). If None, uses all registered tasks
            *args: Additional positional arguments
            **kwargs: Additional keyword arguments

        Returns:
            torch.Tensor: Output tensor after processing
        """
        # Handle different task_ids scenarios
        if task_ids is None or len(task_ids) < 1:
            # Default behavior: use all registered tasks
            active_task_ids = self.registered_tasks
            if not active_task_ids:
                raise RuntimeError(
                    "No tasks registered. Please register at least one task before forward pass."
                )
        elif len(task_ids) == 1:
            # Single task mode
            if task_ids[0] not in self.registered_tasks:
                raise ValueError(
                    f"Task {task_ids} is not registered. Available tasks: {self.registered_tasks}"
                )
            return self.process_with_task(x, task_ids[0], *args, **kwargs)
        else:
            # Multiple specific tasks mode
            active_task_ids = task_ids
            # Validate all task_ids are registered
            unregistered = set(active_task_ids) - set(self.registered_tasks)
            if unregistered:
                raise ValueError(
                    f"Tasks {unregistered} are not registered. Available tasks: {self.registered_tasks}"
                )

        # Multi-task processing
        return self.process_with_tasks(x, active_task_ids, *args, **kwargs)

    def register_task(self, task_id: int, **kwargs) -> None:
        """
        Register a new task with this hook.

        Args:
            task_id: Identifier for the new task
            config: Optional task-specific configuration
        """
        task_key = str(task_id)  # ModuleDict 需要字符串键
        if task_key in self._hooks:
            logger.warning(f"Task {task_id} already registered, continuing")
            return

        # Create a new hook for the task
        self._hooks[task_key] = self._create_hook()

    def set_task(self, task_id: int) -> None:
        """
        Configure the model for a specific task.

        Args:
            task_id: Task identifier
        """
        self._current_task = task_id

    def set_trainable_tasks(self, task_ids: int | List[int], **kwargs) -> None:
        """
        Set which tasks should be trainable (unfrozen) while freezing all others.

        This method configures the trainable state for router, combiner, and hooks
        associated with the specified tasks. All other tasks will be frozen.

        Args:
            task_ids: Task identifier(s) to make trainable
            **kwargs: Additional configuration parameters passed to router/combiner
        """
        # Convert single task_id to list for uniform processing
        if isinstance(task_ids, int):
            task_ids = [task_ids]

        # Validate all task IDs are registered
        for task_id in sorted(task_ids):
            if task_id not in self.registered_tasks:
                raise ValueError(f"Task {task_id} is not registered")

        # Get tasks to freeze (all registered tasks except specified ones)
        tasks_to_freeze = [tid for tid in self.registered_tasks if tid not in task_ids]

        self._freeze(tasks_to_freeze)
        self._unfreeze(task_ids)

        # FIXME 存在重复调用问题
        if hasattr(self.router, "set_trainable_tasks"):
            self.router.set_trainable_tasks(task_ids, **kwargs)

        logger.debug(
            f"Set trainable tasks: {task_ids}, frozen tasks: {tasks_to_freeze}"
        )

    def _freeze(self, task_ids: int | List[int]) -> None:
        """
        Freeze parameters for specific tasks.

        This method sets requires_grad=False for all parameters associated
        with the specified task IDs in hooks, router, and combiner.

        Args:
            task_ids: Task identifier(s) to freeze parameters for
        """
        # Convert single task_id to list for uniform processing
        if isinstance(task_ids, int):
            task_ids = [task_ids]

        # Freeze hook parameters for specified tasks
        for task_id in sorted(task_ids):
            task_key = str(task_id)
            if task_key in self._hooks:
                hook = self._hooks[task_key]
                # Freeze all parameters in the hook
                for param in hook.parameters():
                    param.requires_grad = False
                logger.debug(f"Frozen parameters for task {task_id} hook")
            else:
                logger.warning(f"Task {task_id} not found in hooks, skipping freeze")

        # Freeze router parameters if it supports task-specific operations
        if hasattr(self.router, "_freeze"):
            self.router._freeze(task_ids)

    def _unfreeze(self, task_ids: int | List[int]) -> None:
        """
        Unfreeze parameters for specific tasks.

        This method sets requires_grad=True for all parameters associated
        with the specified task IDs in hooks, router, and combiner.

        Args:
            task_ids: Task identifier(s) to unfreeze parameters for
        """
        # Convert single task_id to list for uniform processing
        if isinstance(task_ids, int):
            task_ids = [task_ids]

        # Unfreeze hook parameters for specified tasks
        for task_id in sorted(task_ids):
            task_key = str(task_id)
            if task_key in self._hooks:
                hook = self._hooks[task_key]
                # Unfreeze all parameters in the hook
                for param in hook.parameters():
                    param.requires_grad = True
                logger.debug(f"Unfrozen parameters for task {task_id} hook")
            else:
                logger.warning(f"Task {task_id} not found in hooks, skipping unfreeze")

        # Unfreeze router parameters if it supports task-specific operations
        if hasattr(self.router, "_unfreeze"):
            self.router._unfreeze(task_ids)

    def process_with_task(
        self, x: torch.Tensor, task_id: int, *args, **kwargs
    ) -> torch.Tensor:
        """Process input with a specific task."""
        task_key = str(task_id)
        if task_key not in self._hooks:
            raise ValueError(f"Task {task_id} is not registered")

        # Single task doesn't need router/combiner
        return self._hooks[task_key].forward(x, *args, **kwargs)

    def process_with_tasks(
        self, x: torch.Tensor, task_ids: List[int], *args, **kwargs
    ) -> torch.Tensor:
        """Process input among multiple tasks."""

        # Get routing weights for specified tasks
        weights = self.router.route(x, task_ids=task_ids, *args, **kwargs)

        # Compute outputs only for specified tasks
        o_hook = {}
        for task_id in sorted(task_ids):
            task_key = str(task_id)
            if task_key in self._hooks:
                o_hook[task_id] = self._hooks[task_key].forward(x, *args, **kwargs)

        # Combine outputs with weights
        o = self.combiner.combine(o_hook, weights, task_ids=task_ids, *args, **kwargs)
        return o["combiner_output"]

    @property
    def current_task(self) -> int:
        """Get the current task ID."""
        return self._current_task

    @property
    def registered_tasks(self) -> List[int]:
        """Get list of registered task IDs."""
        if self._hooks:
            task_ids = list(self._hooks.keys())
            task_ids = [int(task_id) for task_id in task_ids]
        else:
            task_ids = []
        return task_ids

    def get_parameters(self) -> List[nn.Parameter]:
        """
        Get trainable parameters for this hook.

        Default implementation returns an empty list. Override in subclasses
        that have trainable parameters.

        Returns:
            List[nn.Parameter]: List of trainable parameters
        """
        params = []
        target_attrs = ["router", "combiner", "_hooks"]
        for attr in target_attrs:
            results = self._collect_from_attr(attr, "get_parameters")
            params.extend(results)
        return params

    def get_trainable_parameters(self) -> List[nn.Parameter]:
        """
        Get trainable parameters managed by this hook.

        This method is a wrapper around get_parameters to maintain
        compatibility with the TaskAwareHookManagerInterface.

        Returns:
            List[nn.Parameter]: List of trainable parameters
        """
        params = []
        target_attrs = ["router", "combiner", "_hooks"]
        for attr in target_attrs:
            results = self._collect_from_attr(attr, "get_trainable_parameters")
            params.extend(results)
        return params

    def _collect_from_attr(
        self, attr: str, func_name: str, *args, **kwargs
    ) -> List[Any]:
        """
        Collect results from calling a method on all hooks.

        Args:
            func_name: Name of the method to call
            *args, **kwargs: Arguments to pass to the method

        Returns:
            List[Any]: Flattened list of results from all hooks
        """
        results = []
        result = None
        attr = attr if attr else "self._hooks"
        if isinstance(attr, Dict):
            for v in attr.values():
                method = getattr(v, func_name)
                result = method(*args, **kwargs)
        elif isinstance(attr, List):
            for v in attr:
                method = getattr(v, func_name)
                result = method(*args, **kwargs)
        else:
            method = getattr(attr, func_name)
            result = method(*args, **kwargs)
        if result is None:
            raise ValueError(
                f"Method {func_name} returned None, expected a list or single result"
            )
        else:
            if isinstance(result, list):
                results.extend(result)
            else:
                results.append(result)
            return results


@HOOK_FACTORY_REGISTRY.register("base")
class BaseHookFactory(Generic[THook, HConfig], HookFactoryInterface):
    """
    Base factory for creating task-aware hooks.
    """

    def __init__(
        self,
        hook_name: str,
        router_name: str,
        combiner_name: str,
        hook_config: Union[HConfig, Dict[str, Any]],
        router_config: Dict[str, Any] | RouterConfig,
        combiner_config: Dict[str, Any] | CombinerConfig,
        **kwargs,
    ):
        super().__init__(**kwargs)
        self.hook_name = hook_name
        self.router_name = router_name
        self.combiner_name = combiner_name
        self.router_cls = ROUTER_REGISTRY.get(router_name)
        self.combiner_cls = COMBINER_REGISTRY.get(combiner_name)
        self.router_config = (
            router_config.to_dict()
            if not isinstance(router_config, dict)
            else router_config
        )
        self.combiner_config = (
            combiner_config.to_dict()
            if not isinstance(combiner_config, dict)
            else combiner_config
        )
        tconfig = create_peft_config(hook_name, hook_config)
        self.hook_config: HConfig = cast(HConfig, tconfig)

    @abstractmethod
    def create_hook_instance(
        self,
        module: nn.Module,
        router: RouterInterface,
        combiner: CombinerInterface,
        config: HConfig,
        *args,
        **kwargs,
    ) -> THook:
        """
        Create a specific hook instance.

        This method must be implemented by subclasses to create
        the appropriate hook type.
        """
        pass

    def __call__(self, module_path: str, module: nn.Module) -> THook:
        """
        Create a hook for a specific module.
        """
        # Create router and combiner instances
        router = self.router_cls(**self.router_config)
        combiner = self.combiner_cls(**self.combiner_config)

        # Create hook using the abstract method
        hook = self.create_hook_instance(
            module=module,
            router=router,
            combiner=combiner,
            config=self.hook_config,
        )

        logger.debug(f"Created hook for module at path: {module_path}")
        return hook

    @property
    def config(self) -> BasePEFTConfig:
        """
        Get the configuration for this hook factory.

        Returns:
            BasePEFTConfig: Configuration object for this factory
        """
        return self.hook_config


HOOK_REGISTRY.register("base")(BaseTaskAwareHook)
HOOK_FACTORY_REGISTRY.register("base")(BaseHookFactory)
