"""
Base classifier implementations with improved design following DRY principle.
"""

import torch
import torch.nn as nn
import torch.nn.functional as F
from abc import ABC, abstractmethod
from typing import Dict, Any, Type
import logging


class BaseClassifier(nn.Module, ABC):
    """
    Abstract base class for all classifiers following DRY principle.

    This class provides common functionality shared across different classifier types.
    """

    def __init__(
        self, embed_dim: int, nb_classes: int, with_norm: bool = False, **kwargs: Any
    ):
        super().__init__()
        self.embed_dim = embed_dim
        self.nb_classes = nb_classes
        self.with_norm = with_norm

        # Initialize weight parameter
        self.weight = nn.Parameter(torch.randn(self.nb_classes, self.embed_dim))

        # Optional normalization layer
        if self.with_norm:
            self.norm = nn.LayerNorm(self.embed_dim)
            self.norm.reset_parameters()
        else:
            self.norm = None

        self._init_classifier_specific_params(**kwargs)
        self._init_weights()

    @abstractmethod
    def _init_classifier_specific_params(self, **kwargs: Any) -> None:
        """Initialize classifier-specific parameters."""
        pass

    def _init_weights(self) -> None:
        """Initialize weights using truncated normal distribution."""
        nn.init.trunc_normal_(self.weight, std=0.02)
        self._init_classifier_specific_weights()

    @abstractmethod
    def _init_classifier_specific_weights(self) -> None:
        """Initialize classifier-specific weights."""
        pass

    def _apply_normalization(self, logits: torch.Tensor) -> torch.Tensor:
        """Apply normalization if enabled."""
        if self.norm is not None:
            return self.norm(logits)
        return logits

    @abstractmethod
    def _compute_logits(self, normalized_input: torch.Tensor) -> torch.Tensor:
        """Compute classifier-specific logits."""
        pass

    def forward(self, logits: torch.Tensor) -> Dict[str, torch.Tensor]:
        """
        Forward pass through the classifier.

        Args:
            logits: Input tensor of shape [B, D] where D is the embedding dimension

        Returns:
            Dict containing the computed logits
        """
        normalized_input = self._apply_normalization(logits)
        output_logits = self._compute_logits(normalized_input)
        return {"logits": output_logits}

    def after_medium(self):
        pass

    def before_train(self):
        pass

    def after_train(self):
        pass

    def before_task(self):
        pass

    def after_task(self):
        pass


class CosineClf(BaseClassifier):
    """
    Cosine similarity-based classifier with optional temperature scaling.
    """

    def _init_classifier_specific_params(self, **kwargs: Any) -> None:
        """Initialize temperature parameter for cosine similarity scaling."""
        fc_temperture = kwargs.get("fc_temperture", False)

        self.temperature = nn.Parameter(torch.ones(self.embed_dim).float())
        self.temperature.requires_grad = fc_temperture

    def _init_classifier_specific_weights(self) -> None:
        """Initialize temperature weights if trainable."""
        if self.temperature.requires_grad:
            nn.init.ones_(self.temperature)

    def _compute_logits(self, normalized_input: torch.Tensor) -> torch.Tensor:
        """
        Compute cosine similarity with temperature scaling.

        Args:
            normalized_input: [B, D] normalized input features

        Returns:
            Cosine similarity logits scaled by temperature
        """
        # Normalize input features
        input_norm = F.normalize(normalized_input, p=2, dim=-1)

        # Normalize and scale weights
        weight_norm = F.normalize(self.weight, p=2, dim=-1) * self.temperature

        # Compute cosine similarity
        return F.linear(input_norm, weight_norm)


class CosineNormClf(BaseClassifier):
    """
    Cosine similarity-based classifier with normalization and statistics tracking.
    """

    def _init_classifier_specific_params(self, **kwargs: Any) -> None:
        """Initialize temperature parameter and statistics tracking parameters."""
        fc_temperture = kwargs.get("fc_temperture", False)

        self.temperature = nn.Parameter(torch.ones(self.embed_dim).float())
        self.temperature.requires_grad = fc_temperture

        # Initialize statistics tracking
        self._mean = 0
        self._var = torch.tensor(0)
        self._total = 0

    def _init_classifier_specific_weights(self) -> None:
        """Initialize temperature weights if trainable."""
        if self.temperature.requires_grad:
            nn.init.ones_(self.temperature)

    def _compute_logits(self, normalized_input: torch.Tensor) -> torch.Tensor:
        """
        Compute cosine similarity with temperature scaling and normalize logits
        during inference using tracked statistics.

        Args:
            normalized_input: [B, D] normalized input features

        Returns:
            Normalized cosine similarity logits
        """
        # Normalize input features
        input_norm = F.normalize(normalized_input, p=2, dim=-1)

        # Normalize and scale weights
        weight_norm = F.normalize(self.weight, p=2, dim=-1) * self.temperature

        # Compute cosine similarity
        dists = F.linear(input_norm, weight_norm)

        # Track statistics during training
        mean = dists.mean().clone().detach()
        var = dists.var().clone().detach()
        if self.training:
            self._total += 1
            self._mean = (self._mean * (self._total - 1) + mean) / self._total
            self._var = (self._var * (self._total - 1) + var) / self._total

        # Apply normalization during inference
        if not self.training:
            dists = (dists - self._mean) / (torch.sqrt(self._var + 1e-6))

        return dists


class CosineAugClf(BaseClassifier):
    """
    Augmented cosine similarity-based classifier with pairwise class weights.
    """

    def __init__(
        self, embed_dim: int, nb_classes: int, with_norm: bool = False, **kwargs: Any
    ):
        # Override to create weights based on class combinations
        super(BaseClassifier, self).__init__()
        self.embed_dim = embed_dim
        self.nb_classes = nb_classes
        self.with_norm = with_norm

        # Calculate number of weights for pairwise combinations: n(n-1)/2
        nb_weights: int = nb_classes * (nb_classes - 1) // 2 + nb_classes
        self.weight = nn.Parameter(torch.randn(nb_weights, self.embed_dim))

        # Optional normalization layer
        if self.with_norm:
            self.norm = nn.LayerNorm(self.embed_dim)
            self.norm.reset_parameters()
        else:
            self.norm = None

        self._init_classifier_specific_params(**kwargs)
        self._init_weights()

    def _init_classifier_specific_params(self, **kwargs: Any) -> None:
        """Initialize temperature parameter for cosine similarity scaling."""
        fc_temperture = kwargs.get("fc_temperture", False)

        self.temperature = nn.Parameter(torch.ones(self.embed_dim).float())
        self.temperature.requires_grad = fc_temperture

    def _init_classifier_specific_weights(self) -> None:
        """Initialize temperature weights if trainable."""
        if self.temperature.requires_grad:
            nn.init.ones_(self.temperature)

    def _compute_logits(self, normalized_input: torch.Tensor) -> torch.Tensor:
        """
        Compute cosine similarity with temperature scaling for augmented weights.

        Args:
            normalized_input: [B, D] normalized input features

        Returns:
            Cosine similarity logits using pairwise class weights
        """
        # Normalize
        input_norm = F.normalize(normalized_input, p=2, dim=-1)
        weight_norm = F.normalize(self.weight, p=2, dim=-1) * self.temperature

        # Compute cosine similarity
        return F.linear(input_norm, weight_norm)

    def after_train(self):
        """
        Optional method to perform actions after training.
        Can be overridden by subclasses if needed.
        """
        self.weight.data = self.weight.data[: self.nb_classes]
        logger = logging.getLogger(__name__)
        logger.info(f"Weight shape reduced from {self.weight.data.shape[0] + (self.weight.data.shape[0] - self.nb_classes)} to {self.nb_classes} after training")


class LinearClf(BaseClassifier):
    """
    Linear classifier with optional bias term.
    """

    def _init_classifier_specific_params(self, **kwargs: Any) -> None:
        """Initialize bias parameter if enabled."""
        fc_bias = kwargs.get("fc_bias", True)

        if fc_bias:
            self.bias = nn.Parameter(torch.zeros(self.nb_classes))
        else:
            self.register_parameter("bias", None)

    def _init_classifier_specific_weights(self) -> None:
        """Initialize bias weights if present."""
        if self.bias is not None:
            nn.init.constant_(self.bias, 0)

    def _compute_logits(self, normalized_input: torch.Tensor) -> torch.Tensor:
        """
        Compute linear transformation logits.

        Args:
            normalized_input: [B, D] normalized input features

        Returns:
            Linear transformation results
        """
        return F.linear(normalized_input, self.weight, self.bias)


# Registry of available classifier types
CLASSIFIER_REGISTRY: Dict[str, Type[BaseClassifier]] = {
    "cosine": CosineClf,
    "cosine_norm": CosineNormClf,
    "cosine_aug": CosineAugClf,
    "linear": LinearClf,
}


def build_base_classifier(
    name: str, in_features: int, out_features: int, **kwargs: Any
) -> BaseClassifier:
    """
    Create a classifier instance based on the specified type.

    Args:
        name: Classifier type name ('cosine', 'cosine_norm', 'cosine_aug', 'linear')
        in_features: Input feature dimension
        out_features: Output feature dimension
        **kwargs: Additional keyword arguments, including:
            - with_norm: Whether to use normalization layer
            - fc_temperture: Whether to use temperature scaling (for cosine classifiers)
            - fc_bias: Whether to use bias (for LinearClf)

    Returns:
        BaseClassifier: Instantiated classifier of the requested type
    """
    name = name.lower()
    if name not in CLASSIFIER_REGISTRY:
        raise ValueError(
            f"Unknown classifier type: {name}. Available types: {list(CLASSIFIER_REGISTRY.keys())}"
        )

    cls = CLASSIFIER_REGISTRY[name]
    return cls(in_features, out_features, **kwargs)
