"""
Context management for task-aware hooks.

This module provides a thread-safe context manager for passing task information
through the model hierarchy without modifying existing interfaces.
"""

import threading
from contextlib import contextmanager
from typing import Dict, List, Optional, Union, Any, overload, TypeVar, Literal
from dataclasses import dataclass, field
from enum import Enum

import torch

from continuallearning.utils.logging import get_logger

logger = get_logger(__name__)

T = TypeVar("T")


class ContextMode(Enum):
    """Context operation modes."""

    IDLE = "idle"
    COLLECTING = "collecting"
    CONSUMING = "consuming"


@dataclass
class TaskContext:
    """Task context containing collected information."""

    task_ids: Optional[List[int]] = None
    backbone_features: Dict[str, torch.Tensor] = field(default_factory=dict)
    metadata: Dict[str, Any] = field(default_factory=dict)

    def clear(self) -> None:
        """Clear all context data."""
        self.task_ids = None
        self.backbone_features.clear()
        self.metadata.clear()

    def add_feature(self, layer_name: str, feature: torch.Tensor) -> None:
        """Add a backbone feature to the context."""
        self.backbone_features[layer_name] = feature.clone().detach()

    @overload
    def get_feature(self, identifier: str) -> Optional[torch.Tensor]:
        """Get a backbone feature by layer name."""
        ...

    @overload
    def get_feature(self, identifier: int) -> Optional[torch.Tensor]:
        """Get a backbone feature by index."""
        ...

    def get_feature(self, identifier: Union[str, int]) -> Optional[torch.Tensor]:
        """
        Get a backbone feature from the context.

        Args:
            identifier: Either a layer name (str) or index (int)

        Returns:
            The requested feature tensor or None if not found

        Example:
            # Get by layer name
            feature = context.get_feature("layer3")

            # Get by index
            feature = context.get_feature(2)  # Gets the 3rd feature (0-indexed)
        """
        if isinstance(identifier, str):
            # Get by layer name
            return self.backbone_features.get(identifier)
        elif isinstance(identifier, int):
            # Get by index
            if identifier < 0 or identifier >= len(self.backbone_features):
                logger.warning(
                    f"Feature index {identifier} out of range. "
                    f"Available features: {len(self.backbone_features)}"
                )
                return None
            # Convert dict to list and get by index
            feature_list = list(self.backbone_features.values())
            return feature_list[identifier]
        else:
            raise TypeError(
                f"Identifier must be str or int, got {type(identifier).__name__}"
            )

    def get_features(self) -> Dict[str, torch.Tensor]:
        """
        Get all backbone features.

        Returns:
            Dictionary mapping layer names to feature tensors

        Example:
            all_features = context.get_features()
            for layer_name, feature in all_features.items():
                print(f"{layer_name}: {feature.shape}")
        """
        return self.backbone_features.copy()

    def has_feature(self, layer_name: str) -> bool:
        """Check if a feature exists in the context."""
        return layer_name in self.backbone_features

    def delete_feature(self, layer_name: str) -> None:
        """Delete a feature from the context by layer name."""
        if layer_name in self.backbone_features:
            del self.backbone_features[layer_name]
        else:
            logger.warning(f"Feature '{layer_name}' not found in context to delete.")

    def clone(self) -> "TaskContext":
        """Create a deep copy of the context."""
        return TaskContext(
            task_ids=self.task_ids,
            backbone_features={
                k: v.clone().detach() for k, v in self.backbone_features.items()
            },
            metadata=self.metadata.copy(),
        )


class GlobalContextManager:
    """
    Thread-safe global context manager using singleton pattern.

    This manager handles two primary operations:
    1. collect_context: Collect context information (features, task IDs, etc.)
    2. task_context: Use collected context for task-specific operations
    """

    _instance: Optional["GlobalContextManager"] = None
    _lock = threading.Lock()

    def __new__(cls) -> "GlobalContextManager":
        if cls._instance is None:
            with cls._lock:
                if cls._instance is None:
                    cls._instance = super().__new__(cls)
        return cls._instance

    def __init__(self):
        if hasattr(self, "_initialized"):
            return

        self._local = threading.local()
        self._collected_context: Optional[TaskContext] = None
        self._context_lock = threading.Lock()
        self._initialized = True
        logger.debug("Initialized global context manager")

    @property
    def mode(self) -> ContextMode:
        """Get current context mode."""
        return getattr(self._local, "mode", ContextMode.IDLE)

    @property
    def current_context(self) -> Optional[TaskContext]:
        """Get the current active context."""
        return getattr(self._local, "active_context", None)

    @property
    def is_collecting(self) -> bool:
        """Check if currently in collecting mode."""
        return self.mode == ContextMode.COLLECTING

    @property
    def is_consuming(self) -> bool:
        """Check if currently in consuming mode."""
        return self.mode == ContextMode.CONSUMING

    def _ensure_collecting_context(self) -> TaskContext:
        """Ensure a collecting context exists for the current thread."""
        if not hasattr(self._local, "collecting_context"):
            self._local.collecting_context = TaskContext()
        return self._local.collecting_context

    def add_feature(self, layer_name: str, feature: torch.Tensor) -> None:
        """Add a feature during collection phase."""
        if self.is_collecting:
            context = self._ensure_collecting_context()
            context.add_feature(layer_name, feature)
            logger.debug(f"Collected feature from layer: {layer_name}")
        else:
            raise RuntimeError(
                f"Attempted to add feature '{layer_name}' outside of collection context"
            )

    def delete_feature(self, layer_name: str) -> None:
        """Delete a feature from the current context."""
        if self.current_context:
            self.current_context.delete_feature(layer_name)
            logger.debug(f"Deleted feature from layer: {layer_name}")
        else:
            raise RuntimeError(
                f"Attempted to delete feature '{layer_name}' without an active context"
            )

    @overload
    def get_feature(self, identifier: str) -> Optional[torch.Tensor]:
        """Get a feature by layer name from the current active context."""
        ...

    @overload
    def get_feature(self, identifier: int) -> Optional[torch.Tensor]:
        """Get a feature by index from the current active context."""
        ...

    def get_feature(self, identifier: Union[str, int]) -> Optional[torch.Tensor]:
        """
        Get a feature from the current active context.

        Args:
            identifier: Either a layer name (str) or index (int)

        Returns:
            The requested feature tensor or None if not found
        """
        if self.current_context:
            return self.current_context.get_feature(identifier)
        else:
            raise RuntimeError("No active context available to retrieve features")

    @overload
    def get_features(self, format: Literal["dict"] = "dict") -> Dict[str, torch.Tensor]:
        """Get all features as a dictionary mapping layer names to tensors."""
        ...

    @overload
    def get_features(self, format: Literal["list"]) -> List[torch.Tensor]:
        """Get all features as a list of tensors."""
        ...

    def get_features(
        self, format: Literal["dict", "list"] = "dict"
    ) -> Union[Dict[str, torch.Tensor], List[torch.Tensor]]:
        """
        Get all features from the current active context.

        Args:
            format: Return format - "dict" for name->tensor mapping,
                   "list" for tensor list only

        Returns:
            Either a dictionary mapping layer names to tensors (format="dict")
            or a list of tensors in order of addition (format="list")

        Raises:
            RuntimeError: If no active context is available

        Example:
            # Get as dictionary
            features_dict = manager.get_features("dict")
            for name, tensor in features_dict.items():
                print(f"{name}: {tensor.shape}")

            # Get as list
            features_list = manager.get_features("list")
            for i, tensor in enumerate(features_list):
                print(f"Feature {i}: {tensor.shape}")
        """
        if not self.current_context:
            raise RuntimeError("No active context available to retrieve features")

        if format == "dict":
            return self.current_context.get_features()
        elif format == "list":
            return list(self.current_context.get_features().values())
        else:
            raise ValueError(f"Invalid format '{format}'. Must be 'dict' or 'list'.")

    def get_task_ids(self) -> Optional[List[int]]:
        """Get task IDs from the current active context."""
        if self.current_context:
            return self.current_context.task_ids
        return None

    @contextmanager
    def collect_context(
        self,
        task_ids: Optional[List[int]] = None,
        save_collected: bool = True,
        **metadata,
    ):
        """
        Context manager for collecting context information.

        During this phase, backbone features and other context information
        are collected for later use.

        Args:
            task_ids: Task identifiers to associate with collected context
            save_collected: Whether to save collected context for later use
            **metadata: Additional metadata to include in context

        Example:
            with context_manager.collect_context(task_ids=1):
                # Features are collected during this forward pass
                output = model(input_tensor)
        """
        if self.mode != ContextMode.IDLE:
            raise RuntimeError(
                f"Cannot enter collect_context while in {self.mode} mode"
            )

        # Set up collection mode
        self._local.mode = ContextMode.COLLECTING
        self._local.collecting_context = TaskContext(
            task_ids=task_ids, metadata=metadata
        )

        try:
            logger.debug(f"Started collecting context for tasks: {task_ids}")
            yield self._local.collecting_context

            # Save collected context if requested
            if save_collected:
                with self._context_lock:
                    self._collected_context = self._local.collecting_context.clone()
                logger.debug("Saved collected context for later use")

        finally:
            # Clean up collection state
            self._local.mode = ContextMode.IDLE
            if hasattr(self._local, "collecting_context"):
                delattr(self._local, "collecting_context")
            logger.debug("Finished collecting context")

    @contextmanager
    def task_context(
        self,
        task_ids: Optional[List[int]] = None,
        use_collected: bool = True,
        clear_collected: bool = True,
        **metadata,
    ):
        """
        Context manager for using collected context information.

        During this phase, previously collected context is made available
        for task-specific operations.

        Args:
            task_ids: Override task IDs (uses collected if None)
            use_collected: Whether to use previously collected context
            clear_collected: Whether to clear collected context after use
            **metadata: Additional metadata to merge with context

        Example:
            # First collect context
            with context_manager.collect_context(task_ids=1):
                features = model.extract_features(input_tensor)

            # Then use collected context
            with context_manager.task_context():
                output = adapter(features)  # Can access collected features
        """
        if self.mode != ContextMode.IDLE:
            raise RuntimeError(f"Cannot enter task_context while in {self.mode} mode")

        # Prepare context for consumption
        if use_collected and self._collected_context:
            with self._context_lock:
                context = self._collected_context.clone()
                if task_ids is not None:
                    context.task_ids = task_ids
                context.metadata.update(metadata)
        else:
            context = TaskContext(task_ids=task_ids, metadata=metadata)

        # Set up consumption mode
        self._local.mode = ContextMode.CONSUMING
        self._local.active_context = context

        try:
            logger.debug(f"Started task context with tasks: {context.task_ids}")
            yield context

        finally:
            # Clean up consumption state
            self._local.mode = ContextMode.IDLE
            self._local.active_context = None

            # Clear collected context if requested
            if clear_collected and use_collected:
                with self._context_lock:
                    self._collected_context = None
                logger.debug("Cleared collected context")

            logger.debug("Finished task context")

    def clear_collected_context(self) -> None:
        """Manually clear the collected context."""
        with self._context_lock:
            self._collected_context = None
        logger.debug("Manually cleared collected context")

    def has_collected_context(self) -> bool:
        """Check if there is collected context available."""
        return self._collected_context is not None


# Global instance for easy access
_global_context = GlobalContextManager()


def get_global_context() -> GlobalContextManager:
    """Get the global context manager instance."""
    return _global_context


def get_current_task_ids() -> Optional[List[int]]:
    """Get current task IDs from global context."""
    return _global_context.get_task_ids()


def add_backbone_feature(layer_name: str, feature: torch.Tensor) -> None:
    """Add a backbone feature to global context during collection."""
    _global_context.add_feature(layer_name, feature)


def delete_backbone_feature(layer_name: str) -> None:
    """Delete a backbone feature from global context."""
    _global_context.delete_feature(layer_name)


@overload
def get_backbone_feature(identifier: str) -> Optional[torch.Tensor]:
    """Get a backbone feature by layer name from global context."""
    ...


@overload
def get_backbone_feature(identifier: int) -> Optional[torch.Tensor]:
    """Get a backbone feature by index from global context."""
    ...


def get_backbone_feature(identifier: Union[str, int]) -> Optional[torch.Tensor]:
    """Get a backbone feature from global context."""
    return _global_context.get_feature(identifier)


@overload
def get_all_backbone_features(
    format: Literal["dict"] = "dict",
) -> Dict[str, torch.Tensor]:
    """Get all backbone features as a dictionary from global context."""
    ...


@overload
def get_all_backbone_features(format: Literal["list"]) -> List[torch.Tensor]:
    """Get all backbone features as a list from global context."""
    ...


def get_all_backbone_features(
    format: Literal["dict", "list"] = "dict",
) -> Union[Dict[str, torch.Tensor], List[torch.Tensor]]:
    """
    Get all backbone features from global context.

    Args:
        format: Return format - "dict" or "list"

    Returns:
        Features in requested format
    """
    return _global_context.get_features(format)


@contextmanager
def collect_context(
    task_ids: Optional[List[int]] = None, save_collected: bool = True, **metadata
):
    """Convenience function for collecting context."""
    with _global_context.collect_context(
        task_ids=task_ids, save_collected=save_collected, **metadata
    ) as ctx:
        yield ctx


@contextmanager
def task_context(
    task_ids: Optional[List[int]] = None,
    use_collected: bool = True,
    clear_collected: bool = True,
    **metadata,
):
    """Convenience function for using task context."""
    with _global_context.task_context(
        task_ids=task_ids,
        use_collected=use_collected,
        clear_collected=clear_collected,
        **metadata,
    ) as ctx:
        yield ctx
