import logging
import numpy as np
import torch
from typing import Tuple, Any, Optional, Callable
from torch.utils.data import DataLoader

from learners.registry import register_memory_manager
from learners.interfaces import MemoryInterface


@register_memory_manager("standard")
class MemoryManager(MemoryInterface):
    """Manages memory of exemplars for rehearsal learning."""

    def __init__(
        self,
        memory_size: int = 2000,
        fixed_memory: bool = True,
        memory_per_class: Optional[int] = None,
    ):
        self.memory_size = memory_size
        self.fixed_memory = fixed_memory
        self.memory_per_class = memory_per_class
        self.data_memory = np.array([])
        self.targets_memory = np.array([])

    @property
    def exemplar_size(self) -> int:
        """Get the current number of exemplars in memory."""
        assert len(self.data_memory) == len(self.targets_memory), "Memory size mismatch"
        return len(self.targets_memory)

    def get_memory_per_class(self, total_classes: int) -> int:
        """Calculate memory per class based on settings."""
        if not self.fixed_memory and self.memory_per_class is not None:
            # When fixed_memory is False, use the specified samples per class
            return self.memory_per_class
        elif not self.fixed_memory and self.memory_per_class is None:
            raise ValueError(
                "Memory per class must be specified when fixed_memory is False."
            )
        elif self.fixed_memory and self.memory_per_class is not None:
            raise ValueError(
                "Memory per class cannot be specified when fixed_memory is True."
            )
        else:  # self.fixed_memory is True
            # When fixed_memory is True, distribute memory evenly across all classes
            return self.memory_size // total_classes if total_classes > 0 else 0

    def build_exemplars(
        self,
        data_manager: Any,
        network: torch.nn.Module,
        extract_features_func: Callable,
        known_classes: int,
        total_classes: int,
        device: torch.device,
    ) -> None:
        """Build exemplar set for rehearsal."""
        per_class = self.get_memory_per_class(total_classes)

        if self.fixed_memory:
            self._construct_exemplar_unified(
                data_manager,
                network,
                extract_features_func,
                known_classes,
                total_classes,
                per_class,
                device,
            )
        else:
            # Reduce old exemplars to maintain balanced memory
            self._reduce_exemplar(
                data_manager,
                network,
                extract_features_func,
                known_classes,
                per_class,
                device,
            )
            # Add new exemplars
            self._construct_exemplar(
                data_manager,
                network,
                extract_features_func,
                known_classes,
                total_classes,
                per_class,
                device,
            )

    def get_memory(self) -> Optional[Tuple[np.ndarray, np.ndarray]]:
        """Get the current memory."""
        if len(self.data_memory) == 0:
            return None
        return (self.data_memory, self.targets_memory)

    # Implementations of exemplar selection algorithms
    def _reduce_exemplar(
        self,
        data_manager,
        network,
        extract_features_func,
        known_classes,
        per_class,
        device,
    ):
        """Reduce exemplar set to make room for new classes."""
        logging.info(f"Reducing exemplars to {per_class} per class")

        dummy_data = self.data_memory.copy()
        dummy_targets = self.targets_memory.copy()
        self.data_memory, self.targets_memory = np.array([]), np.array([])

        # For each old class, keep only the first 'per_class' samples
        for class_idx in range(known_classes):
            mask = np.where(dummy_targets == class_idx)[0]
            dd, dt = dummy_data[mask][:per_class], dummy_targets[mask][:per_class]

            # Append to memory
            self.data_memory = (
                np.concatenate((self.data_memory, dd))
                if len(self.data_memory) > 0
                else dd
            )
            self.targets_memory = (
                np.concatenate((self.targets_memory, dt))
                if len(self.targets_memory) > 0
                else dt
            )

    def _construct_exemplar(
        self,
        data_manager,
        network,
        extract_features_func,
        known_classes,
        total_classes,
        per_class,
        device,
    ):
        """Construct exemplar set for new classes."""
        logging.info(f"Constructing exemplars ({per_class} per class)")

        for class_idx in range(known_classes, total_classes):
            # Get data for this class
            data, targets, class_dataset = data_manager.get_dataset(
                np.arange(class_idx, class_idx + 1),
                source="train",
                mode="test",
                ret_data=True,
            )

            # Extract features
            class_loader = DataLoader(
                class_dataset,
                batch_size=128,
                shuffle=False,
                num_workers=4,
            )
            vectors, _ = extract_features_func(class_loader)
            vectors = vectors.cpu().numpy()

            # Normalize features
            vectors = vectors / (np.linalg.norm(vectors, axis=1, keepdims=True) + 1e-8)
            class_mean = np.mean(vectors, axis=0)

            # Herding selection
            selected_exemplars = []
            exemplar_vectors = []

            for k in range(1, per_class + 1):
                S = np.sum(exemplar_vectors, axis=0) if exemplar_vectors else 0
                mu_p = (vectors + S) / k
                i = np.argmin(np.sqrt(np.sum((class_mean - mu_p) ** 2, axis=1)))

                selected_exemplars.append(np.array(data[i]))
                exemplar_vectors.append(np.array(vectors[i]))

                # Remove selected sample to avoid duplicates
                vectors = np.delete(vectors, i, axis=0)
                data = np.delete(data, i, axis=0)

            # Add to memory
            selected_exemplars = np.array(selected_exemplars)
            exemplar_targets = np.full(per_class, class_idx)

            self.data_memory = (
                np.concatenate((self.data_memory, selected_exemplars))
                if len(self.data_memory) > 0
                else selected_exemplars
            )
            self.targets_memory = (
                np.concatenate((self.targets_memory, exemplar_targets))
                if len(self.targets_memory) > 0
                else exemplar_targets
            )

    def _construct_exemplar_unified(
        self,
        data_manager,
        network,
        extract_features_func,
        known_classes,
        total_classes,
        per_class,
        device,
    ):
        """Unified exemplar construction that rebuilds the entire memory."""
        logging.info(f"Constructing unified exemplars ({per_class} per class)")

        # Then construct exemplars for new classes using the same approach
        self._construct_exemplar(
            data_manager,
            network,
            extract_features_func,
            known_classes,
            total_classes,
            per_class,
            device,
        )
