"""
Low-Rank Adaptation (LoRA) implementation for continual learning.

This module implements the LoRA method from:
"LoRA: Low-Rank Adaptation of Large Language Models" (Hu et al., 2021)
using a hook-based approach for modular integration.
"""

import torch.nn as nn
from continuallearning.models.pefts.hook_managers import (
    create_hook_manager,
    UnifiedHookManager,
)
from continuallearning.models.pefts.modules._base_module import BaseHookAdapter
from continuallearning.registry import HOOK_ADAPTER_REGISTRY
from continuallearning.utils.logging import get_logger

logger = get_logger(__name__)


@HOOK_ADAPTER_REGISTRY.register("lora")
class HookBasedLoRAAdapter(BaseHookAdapter):
    _collect_features: bool = False

    def _create_hook_manager(self, model: nn.Module) -> UnifiedHookManager:
        return create_hook_manager(
            model, self._hook_manager_config, manager_type="unified"
        )
