"""
Evaluation Interface for Continual Learning

This interface defines methods for evaluating continual learning models,
including accuracy computation, nearest mean classifier evaluation, and metrics calculation.
"""

from abc import ABC, abstractmethod
from typing import Any, Dict, Optional, Tuple
import torch
import numpy as np
from torch.utils.data import DataLoader


class EvaluationManager(ABC):
    """Interface for evaluating continual learning models"""

    @abstractmethod
    def eval_task(self) -> Tuple[Dict[str, Any], Optional[Dict[str, Any]]]:
        """
        Evaluate model on current task

        Returns:
            Tuple of (CNN accuracy results, NME accuracy results)
        """
        pass

    @abstractmethod
    def compute_accuracy(self, model: torch.nn.Module, loader: DataLoader) -> float:
        """
        Compute accuracy of model on given data loader

        Args:
            model: Model to evaluate
            loader: Data loader for evaluation

        Returns:
            Accuracy percentage
        """
        pass

    @abstractmethod
    def eval_cnn(self, loader: DataLoader) -> Tuple[np.ndarray, np.ndarray]:
        """
        Evaluate using CNN classifier

        Args:
            loader: Data loader for evaluation

        Returns:
            Tuple of (predictions, ground truth)
        """
        pass

    @abstractmethod
    def eval_nme(
        self,
        loader: DataLoader,
        class_means: torch.Tensor
    ) -> Tuple[np.ndarray, np.ndarray]:
        """
        Evaluate using Nearest Mean Classifier

        Args:
            loader: Data loader for evaluation
            class_means: Class mean representations

        Returns:
            Tuple of (predictions, ground truth)
        """
        pass


class AccuracyCalculator(ABC):
    """Interface for calculating different types of accuracy metrics"""

    @abstractmethod
    def calculate_top1_accuracy(
        self,
        predictions: np.ndarray,
        targets: np.ndarray
    ) -> float:
        """Calculate top-1 accuracy"""
        pass

    @abstractmethod
    def calculate_topk_accuracy(
        self,
        predictions: np.ndarray,
        targets: np.ndarray,
        k: int = 5
    ) -> float:
        """Calculate top-k accuracy"""
        pass

    @abstractmethod
    def calculate_grouped_accuracy(
        self,
        predictions: np.ndarray,
        targets: np.ndarray,
        known_classes: int,
        increment_size: int
    ) -> Dict[str, float]:
        """
        Calculate accuracy grouped by tasks

        Args:
            predictions: Model predictions
            targets: Ground truth targets
            known_classes: Number of classes from previous tasks
            increment_size: Number of classes in current task

        Returns:
            Dictionary with grouped accuracy metrics
        """
        pass


class MetricsEvaluator(ABC):
    """Interface for evaluating various continual learning metrics"""

    @abstractmethod
    def compute_forgetting_measure(
        self,
        accuracy_matrix: np.ndarray
    ) -> Tuple[float, np.ndarray]:
        """
        Compute forgetting measure

        Args:
            accuracy_matrix: Matrix of accuracies [task_i, eval_task_j]

        Returns:
            Tuple of (average forgetting, per-task forgetting)
        """
        pass

    @abstractmethod
    def compute_backward_transfer(
        self,
        accuracy_matrix: np.ndarray
    ) -> Tuple[float, np.ndarray]:
        """
        Compute backward transfer

        Args:
            accuracy_matrix: Matrix of accuracies [task_i, eval_task_j]

        Returns:
            Tuple of (average backward transfer, per-task backward transfer)
        """
        pass

    @abstractmethod
    def compute_forward_transfer(
        self,
        accuracy_matrix: np.ndarray
    ) -> Tuple[float, np.ndarray]:
        """
        Compute forward transfer

        Args:
            accuracy_matrix: Matrix of accuracies [task_i, eval_task_j]

        Returns:
            Tuple of (average forward transfer, per-task forward transfer)
        """
        pass

    @abstractmethod
    def compute_average_accuracy(
        self,
        accuracy_matrix: np.ndarray
    ) -> float:
        """Compute average accuracy across all tasks"""
        pass


class CovarianceAnalyzer(ABC):
    """Interface for analyzing feature covariance matrices"""

    @abstractmethod
    def compute_robust_covariance(
        self,
        vectors: torch.Tensor,
        mean_vec: torch.Tensor,
        regularization_methods: list[str]
    ) -> torch.Tensor:
        """
        Compute robust positive definite covariance matrix

        Args:
            vectors: Feature vectors
            mean_vec: Mean vector for the class
            regularization_methods: List of regularization methods

        Returns:
            Positive definite covariance matrix
        """
        pass

    @abstractmethod
    def validate_covariance_matrix(
        self,
        cov_matrix: torch.Tensor,
        class_idx: int
    ) -> bool:
        """
        Validate covariance matrix properties

        Args:
            cov_matrix: Covariance matrix to validate
            class_idx: Class index for logging

        Returns:
            True if valid, False otherwise
        """
        pass

    @abstractmethod
    def is_positive_definite(
        self,
        matrix: torch.Tensor,
        tolerance: float = 1e-8
    ) -> bool:
        """Check if matrix is positive definite"""
        pass


class DistanceCalculator(ABC):
    """Interface for calculating distances in feature space"""

    @abstractmethod
    def euclidean_distance(
        self,
        features: torch.Tensor,
        centroids: torch.Tensor
    ) -> torch.Tensor:
        """Calculate Euclidean distance to centroids"""
        pass

    @abstractmethod
    def mahalanobis_distance(
        self,
        features: torch.Tensor,
        mean: torch.Tensor,
        cov_inv: torch.Tensor
    ) -> torch.Tensor:
        """Calculate Mahalanobis distance"""
        pass

    @abstractmethod
    def cosine_distance(
        self,
        features: torch.Tensor,
        centroids: torch.Tensor
    ) -> torch.Tensor:
        """Calculate cosine distance to centroids"""
        pass


class ResultLogger(ABC):
    """Interface for logging evaluation results"""

    @abstractmethod
    def log_task_results(
        self,
        task_id: int,
        results: Dict[str, Any]
    ) -> None:
        """Log results for a specific task"""
        pass

    @abstractmethod
    def log_final_results(
        self,
        all_results: Dict[str, Any]
    ) -> None:
        """Log final aggregated results"""
        pass

    @abstractmethod
    def save_results(
        self,
        filepath: str,
        results: Dict[str, Any]
    ) -> None:
        """Save results to file"""
        pass
