# """
# Unified interface for Parameter-Efficient Fine-Tuning (PEFT) methods.

# This module provides a simplified interface for creating and managing PEFT adapters,
# abstracting away implementation details and providing a consistent user experience.
# """

# from typing import Dict, Any, Union, Optional, List, Type
# from enum import Enum

# import torch
# import torch.nn as nn

# from continuallearning.models.pefts.common.config import (
#     BasePEFTConfig,
#     LoRAConfig,
#     PrefixConfig,
#     PromptConfig,
#     AdapterConfig,
#     BypassLoRAConfig,
# )
# from continuallearning.models.pefts.common.config_factory import create_peft_config
# from continuallearning.models.pefts.common.exceptions import ConfigurationError, PEFTError

# from continuallearning.models.backbones.base import BaseBackbone
# from continuallearning.interfaces.models.adapter import AdapterInterface
# from continuallearning.registry import HOOK_ADAPTER_REGISTRY
# from continuallearning.utils.logging import get_logger

# logger = get_logger(__name__)


# def create_peft_adapter(
#     method: Union[str, PEFTMethod],
#     backbone: BaseBackbone,
#     config: Optional[Union[Dict[str, Any], BasePEFTConfig]] = None,
#     **kwargs,
# ) -> AdapterInterface:
#     """
#     Create a PEFT adapter for the specified method.

#     This factory function provides a unified interface for creating different
#     types of PEFT adapters, handling configuration and initialization details.

#     Args:
#         method: PEFT method name or enum
#         backbone: Backbone model to adapt
#         config: Optional configuration dict or object
#         **kwargs: Additional parameters for the adapter

#     Returns:
#         AdapterInterface: Initialized PEFT adapter

#     Raises:
#         ValueError: If the method is not supported
#         ConfigurationError: If configuration is invalid
#     """
#     # Convert enum to string if needed
#     if isinstance(method, PEFTMethod):
#         method_name = method.value
#     else:
#         method_name = method.lower()

#     # Map method names to registry keys
#     registry_keys = {
#         "lora": "lora",
#         "bypass_lora": "bypass_lora",
#         "prefix_tuning": "prefix_tuning",
#         "prefix": "prefix_tuning",
#         "prompt_tuning": "prompt_tuning",
#         "prompt": "prompt_tuning",
#         "p_tuning": "p_tuning",
#         "adapter": "adapter",
#         "ia3": "ia3",
#     }

#     # Check if method is supported
#     if method_name not in registry_keys:
#         supported = ", ".join(registry_keys.keys())
#         raise ValueError(
#             f"Unsupported PEFT method: '{method_name}'. "
#             f"Supported methods are: {supported}"
#         )

#     # Get registry key
#     registry_key = registry_keys[method_name]

#     try:
#         # Create adapter using registry
#         adapter_cls = HOOK_ADAPTER_REGISTRY.get(registry_key)
#         if adapter_cls is None:
#             raise ValueError(f"No adapter found for method '{method_name}'")

#         # Prepare arguments
#         adapter_kwargs = dict(kwargs)
#         adapter_kwargs["backbone"] = backbone
#         if config is not None:
#             adapter_kwargs["config"] = config

#         # Create and return adapter
#         adapter = adapter_cls(**adapter_kwargs)
#         return adapter

#     except Exception as e:
#         # Provide informative error message
#         if isinstance(e, (ValueError, ConfigurationError)):
#             raise
#         else:
#             raise PEFTError(f"Failed to create {method_name} adapter: {str(e)}") from e


# def list_available_peft_methods() -> List[str]:
#     """
#     Get a list of available PEFT methods.

#     Returns:
#         List[str]: Names of available PEFT methods
#     """
#     return list(PEFTMethod.__members__.keys())


# def get_adapter_config_class(method: Union[str, PEFTMethod]) -> Type[BasePEFTConfig]:
#     """
#     Get the configuration class for a specific PEFT method.

#     Args:
#         method: PEFT method name or enum

#     Returns:
#         Type[BasePEFTConfig]: The configuration class for the method

#     Raises:
#         ValueError: If the method is not supported
#     """
#     # Convert enum to string if needed
#     if isinstance(method, PEFTMethod):
#         method_name = method.value
#     else:
#         method_name = method.lower()

#     # Map method names to config classes
#     config_classes = {
#         "lora": LoRAConfig,
#         "bypass_lora": BypassLoRAConfig,
#         "prefix_tuning": PrefixConfig,
#         "prefix": PrefixConfig,
#         "prompt_tuning": PromptConfig,
#         "prompt": PromptConfig,
#         "adapter": AdapterConfig,
#     }

#     # Check if method is supported
#     if method_name not in config_classes:
#         supported = ", ".join(config_classes.keys())
#         raise ValueError(
#             f"No configuration class found for method '{method_name}'. "
#             f"Supported methods are: {supported}"
#         )

#     # Return config class
#     return config_classes[method_name]
