import torch
import numpy as np
import logging
import os
from typing import Any, Tuple, List, Optional, Callable
from torch.utils.data import DataLoader

from registry import register_prototype_manager
from learners.interfaces import PrototypeInterface


@register_prototype_manager("standard")
class PrototypeManager(PrototypeInterface):
    """
    Manages class prototypes (means and covariances) for probabilistic classification.

    This class handles computation, storage and usage of class statistics (means and
    covariances) for creating multivariate Gaussian distributions that represent classes
    in feature space.
    """

    def __init__(
        self,
        feature_dim: int,
        regularization_epsilon: float = 1e-6,
        fallback_epsilon: float = 1e-4,
    ) -> None:
        """
        Initialize prototype manager.

        Args:
            feature_dim: Dimension of feature vectors
            regularization_epsilon: Small value added to covariance diagonal for stability
            fallback_epsilon: Value used when Cholesky decomposition fails
        """
        if feature_dim <= 0:
            raise ValueError(f"Feature dimension must be positive, got {feature_dim}")

        self._feature_dim: int = feature_dim
        self._gaussian_distributions: List[torch.distributions.MultivariateNormal] = []
        self._regularization_epsilon = regularization_epsilon
        self._fallback_epsilon = fallback_epsilon
        self._num_classes = 0
        self._is_fitted = False

        # Create new tensors
        self._class_means: torch.Tensor = torch.zeros(dtype=torch.float64)
        self._class_covs: torch.Tensor = torch.zeros(dtype=torch.float64)

    @property
    def feature_dim(self) -> int:
        """Get feature dimension."""
        return self._feature_dim

    @property
    def num_classes(self) -> int:
        """Get number of classes."""
        return self._num_classes

    @property
    def is_fitted(self) -> bool:
        """Check if distributions have been created."""
        return self._is_fitted

    @property
    def class_means(self) -> torch.Tensor:
        """Get copy of class means tensor (read-only)."""
        if self._class_means is None:
            raise RuntimeError("Class means have not been computed yet")
        return self._class_means.clone()

    @property
    def class_covs(self) -> torch.Tensor:
        """Get copy of class covariances tensor (read-only)."""
        if self._class_covs is None:
            raise RuntimeError("Class covariances have not been computed yet")
        return self._class_covs.clone()

    def fit(
        self,
        data_manager: Any,
        extract_tokens_fn: Callable,
        known_classes: int = 0,
        total_classes: int = 0,
        batch_size: int = 128,
        num_workers: int = 4,
        device: Optional[torch.device] = None,
    ) -> "PrototypeManager":
        """
        Compute class statistics and create distributions in one step.

        Args:
            data_manager: Data manager containing dataset
            extract_tokens_fn: Function to extract feature vectors from dataloader
            known_classes: Number of previously known classes
            total_classes: Total number of classes including current task
            device: Device to perform computation on
            batch_size: Batch size for data loading
            num_workers: Number of worker processes for data loading

        Returns:
            self for method chaining
        """
        self._validate_fit_params(known_classes, total_classes, batch_size, num_workers)

        # Auto-detect device if not specified
        if device is None:
            device = torch.device("cuda" if torch.cuda.is_available() else "cpu")

        self._compute_class_statistics(
            data_manager,
            extract_tokens_fn,
            known_classes,
            total_classes,
            device,
            batch_size,
            num_workers,
        )
        self._create_gaussian_distributions()
        self._is_fitted = True
        return self

    def _validate_fit_params(
        self,
        known_classes: int,
        total_classes: int,
        batch_size: int,
        num_workers: int,
    ) -> None:
        """Validate parameters for fit method."""
        if total_classes <= known_classes:
            raise ValueError(
                f"total_classes ({total_classes}) must be greater than known_classes ({known_classes})"
            )

        if batch_size <= 0:
            raise ValueError(f"batch_size must be positive, got {batch_size}")

        if num_workers < 0:
            raise ValueError(f"num_workers must be non-negative, got {num_workers}")

    def _compute_class_statistics(
        self,
        data_manager: Any,
        extract_tokens_fn: Callable[[DataLoader], Tuple[torch.Tensor, torch.Tensor]],
        known_classes: int,
        total_classes: int,
        device: torch.device,
        batch_size: int = 128,
        num_workers: int = 4,
    ) -> None:
        """
        Compute class means and covariances for all classes.

        Args:
            data_manager: Data manager for accessing dataset
            extract_tokens_fn: Function to extract tokens from a dataloader
            known_classes: Number of previously known classes
            total_classes: Total number of classes including current task
            device: Device to perform computation on
            batch_size: Batch size for data loading
            num_workers: Number of worker processes for data loading
        """
        logging.info(
            f"Computing class statistics for classes {known_classes}-{total_classes-1}"
        )
        self._num_classes = total_classes

        # Initialize or expand tensors
        self._initialize_statistics_tensors(known_classes, total_classes)

        # Process each class
        for class_idx in range(known_classes, total_classes):
            self._compute_statistics_for_class(
                class_idx,
                data_manager,
                extract_tokens_fn,
                batch_size,
                num_workers,
                device,
            )

    def _initialize_statistics_tensors(
        self, known_classes: int, total_classes: int
    ) -> None:
        """Initialize or expand tensors for class statistics."""
        if self._class_means is not None and self._class_covs is not None:
            # Expand existing tensors
            new_class_means = torch.zeros(
                (total_classes, self._feature_dim), dtype=torch.float64
            )
            new_class_means[:known_classes] = self._class_means
            self._class_means = new_class_means

            new_class_covs = torch.zeros(
                (total_classes, self._feature_dim, self._feature_dim),
                dtype=torch.float64,
            )
            new_class_covs[:known_classes] = self._class_covs
            self._class_covs = new_class_covs
        else:
            # Create new tensors
            self._class_means = torch.zeros(
                (total_classes, self._feature_dim), dtype=torch.float64
            )
            self._class_covs = torch.zeros(
                (total_classes, self._feature_dim, self._feature_dim),
                dtype=torch.float64,
            )

    def _compute_statistics_for_class(
        self,
        class_idx: int,
        data_manager: Any,
        extract_tokens_fn: Callable,
        batch_size: int,
        num_workers: int,
        device: torch.device,
    ) -> None:
        """Compute statistics for a single class."""
        try:
            # Get dataset for this class
            class_dataset = data_manager.get_dataset(
                np.arange(class_idx, class_idx + 1),
                source="train",
                mode="test",
                ret_data=True,
            )

            # Create dataloader
            class_loader = DataLoader(
                class_dataset,
                batch_size=batch_size,
                shuffle=False,
                num_workers=num_workers,
            )

            # Extract features
            vectors, _ = extract_tokens_fn(class_loader)
            vectors = vectors.to("cpu", torch.float64)

            if vectors.shape[0] == 0:
                self._handle_empty_class(class_idx)
                return

            # Compute mean
            class_mean = torch.mean(vectors, dim=0)

            # Compute covariance
            centered = vectors - class_mean
            cov_mat = torch.matmul(centered.T, centered) / max(vectors.shape[0] - 1, 1)
            cov_mat = (cov_mat + cov_mat.T) / 2  # Ensure symmetry
            class_cov = (
                cov_mat
                + torch.eye(class_mean.shape[-1], dtype=torch.float64)
                * self._regularization_epsilon
            )  # Add regularization

            if torch.isnan(class_mean).any() or torch.isnan(cov_mat).any():
                self._handle_nan_statistics(class_idx)
            else:
                self._class_means[class_idx, :] = class_mean.cpu()
                self._class_covs[class_idx, ...] = class_cov.cpu()

        except Exception as e:
            logging.error(f"Error computing statistics for class {class_idx}: {e}")
            self._handle_empty_class(class_idx)

    def _handle_empty_class(self, class_idx: int) -> None:
        """Handle case of empty class or computation failure."""
        logging.warning(
            f"No valid samples for class {class_idx}, using fallback values"
        )
        self._class_means[class_idx, :] = torch.zeros(
            self._feature_dim, dtype=torch.float64
        )
        self._class_covs[class_idx, ...] = (
            torch.eye(self._feature_dim, dtype=torch.float64)
            * self._regularization_epsilon
        )

    def _handle_nan_statistics(self, class_idx: int) -> None:
        """Handle NaN values in computed statistics."""
        logging.warning(
            f"NaN values detected for class {class_idx}, using fallback values"
        )
        self._class_means[class_idx, :] = torch.zeros(
            self._feature_dim, dtype=torch.float64
        )
        self._class_covs[class_idx, ...] = (
            torch.eye(self._feature_dim, dtype=torch.float64)
            * self._regularization_epsilon
        )

    def _create_gaussian_distributions(self) -> None:
        """Create Gaussian distribution objects for each class."""
        if self._class_means is None or self._class_covs is None:
            raise RuntimeError(
                "Cannot create distributions: class statistics not computed"
            )

        self._gaussian_distributions = []

        for class_idx in range(self._class_means.shape[0]):
            try:
                class_mean = self._class_means[class_idx].clone().cpu()
                class_cov = self._class_covs[class_idx].clone().cpu()

                # Ensure covariance matrix is symmetric
                cov_matrix = (class_cov + class_cov.T) / 2

                try:
                    # Try Cholesky decomposition
                    L = torch.linalg.cholesky(cov_matrix)
                except Exception:
                    # If Cholesky fails, try to fix the matrix
                    logging.warning(
                        f"Cholesky decomposition failed for class {class_idx}"
                    )
                    cov_matrix_pd = cov_matrix + self._fallback_epsilon * torch.eye(
                        class_mean.size(-1)
                    )
                    L = torch.linalg.cholesky(cov_matrix_pd)

                # Reconstruct covariance to ensure positive definiteness
                class_cov = L @ L.T

                # Create distribution
                dist = torch.distributions.MultivariateNormal(class_mean, class_cov)
                self._gaussian_distributions.append(dist)
            except Exception as e:
                logging.error(
                    f"Failed to create distribution for class {class_idx}: {e}"
                )
                # Create a simple gaussian with high variance as fallback
                fallback_cov = torch.eye(self._feature_dim, dtype=torch.float64) * (
                    1.0 + self._fallback_epsilon
                )
                fallback_mean = torch.zeros(self._feature_dim, dtype=torch.float64)
                dist = torch.distributions.MultivariateNormal(
                    fallback_mean, fallback_cov
                )
                self._gaussian_distributions.append(dist)

    def predict_log_proba(self, features: torch.Tensor) -> torch.Tensor:
        """
        Compute log probabilities for input samples.

        Args:
            features: Feature vectors of shape (batch_size, feature_dim)

        Returns:
            Log probabilities of shape (batch_size, n_classes)
        """
        self._check_is_fitted()
        self._validate_features(features)

        # Move to CPU and cast to double
        features = features.cpu().to(torch.float64)

        # Compute log probabilities for each class
        batch_size = features.shape[0]
        log_probs = torch.empty((batch_size, self._num_classes), dtype=torch.float64)

        for i, dist in enumerate(self._gaussian_distributions):
            try:
                log_probs[:, i] = dist.log_prob(features)
            except Exception as e:
                logging.error(f"Error computing log probabilities for class {i}: {e}")
                log_probs[:, i] = torch.full(
                    (batch_size,), float("-inf"), dtype=torch.float64
                )

        return log_probs

    def predict_proba(self, features: torch.Tensor) -> torch.Tensor:
        """
        Compute probabilities for input samples.

        Args:
            features: Feature vectors of shape (batch_size, feature_dim)

        Returns:
            Probabilities of shape (batch_size, n_classes)
        """
        log_probs = self.predict_log_proba(features)

        # Numerically stable conversion from log probabilities to probabilities
        max_log_probs = torch.max(log_probs, dim=1, keepdim=True)[0]
        exp_log_probs = torch.exp(log_probs - max_log_probs)
        probs = exp_log_probs / exp_log_probs.sum(dim=1, keepdim=True)

        return probs

    def predict(self, features: torch.Tensor) -> torch.Tensor:
        """
        Predict class labels for input samples.

        Args:
            features: Feature vectors of shape (batch_size, feature_dim)

        Returns:
            Predicted class indices of shape (batch_size,)
        """
        log_probs = self.predict_log_proba(features)
        return torch.argmax(log_probs, dim=1)

    def _check_is_fitted(self) -> None:
        """Check if the model is fitted."""
        if not self._is_fitted:
            raise RuntimeError(
                "This PrototypeManager instance is not fitted yet. "
                "Call 'fit' with appropriate arguments before using this estimator."
            )

    def _validate_features(self, features: torch.Tensor) -> None:
        """Validate input features."""
        if not isinstance(features, torch.Tensor):
            raise TypeError(f"features must be torch.Tensor, got {type(features)}")

        if features.dim() != 2:
            raise ValueError(f"features must be 2D tensor, got shape {features.shape}")

        if features.shape[1] != self._feature_dim:
            raise ValueError(
                f"features dimension mismatch: expected {self._feature_dim}, got {features.shape[1]}"
            )

    def save(self, path: str) -> None:
        """
        Save model to disk.

        Args:
            path: Path to save the model
        """
        os.makedirs(
            os.path.dirname(path) if os.path.dirname(path) else ".", exist_ok=True
        )

        state_dict = {
            "feature_dim": self._feature_dim,
            "class_means": self._class_means,
            "class_covs": self._class_covs,
            "regularization_epsilon": self._regularization_epsilon,
            "fallback_epsilon": self._fallback_epsilon,
            "num_classes": self._num_classes,
            "is_fitted": self._is_fitted,
        }

        try:
            torch.save(state_dict, path)
            logging.info(f"Model saved to {path}")
        except Exception as e:
            logging.error(f"Failed to save model to {path}: {e}")
            raise

    @classmethod
    def load(cls, path: str) -> "PrototypeManager":
        """
        Load model from disk.

        Args:
            path: Path to load the model from

        Returns:
            Loaded PrototypeManager instance
        """
        try:
            state_dict = torch.load(path)

            # Create instance
            instance = cls(
                feature_dim=state_dict["feature_dim"],
                regularization_epsilon=state_dict.get("regularization_epsilon", 1e-6),
                fallback_epsilon=state_dict.get("fallback_epsilon", 1e-4),
            )

            # Restore state
            instance._class_means = state_dict["class_means"]
            instance._class_covs = state_dict["class_covs"]
            instance._num_classes = state_dict.get(
                "num_classes", instance._class_means.shape[0]
            )
            instance._is_fitted = state_dict.get("is_fitted", False)

            # Recreate distributions if fitted
            if instance._is_fitted:
                instance._create_gaussian_distributions()

            logging.info(f"Model loaded from {path}")
            return instance
        except Exception as e:
            logging.error(f"Failed to load model from {path}: {e}")
            raise

    def get_distribution(
        self, class_idx: int
    ) -> torch.distributions.MultivariateNormal:
        """
        Get distribution for specific class.

        Args:
            class_idx: Class index

        Returns:
            Multivariate normal distribution for that class
        """
        self._check_is_fitted()

        if class_idx < 0 or class_idx >= self._num_classes:
            raise ValueError(
                f"class_idx out of range [0, {self._num_classes-1}]: {class_idx}"
            )

        return self._gaussian_distributions[class_idx]

    def __repr__(self) -> str:
        """Get string representation of the model."""
        status = "fitted" if self._is_fitted else "not fitted"
        return (
            f"PrototypeManager(feature_dim={self._feature_dim}, "
            f"num_classes={self._num_classes}, {status})"
        )
