"""
Single backbone network implementations.

This module provides implementations of models that use a single backbone.
"""

import logging
from typing import Dict, List, Optional, Tuple, Union, Any

import torch
import torch.nn as nn
import torch.nn.functional as F

from models.config import ModelConfig
from models.base import BaseModel
from models.interfaces import BackboneBase
from models.components.backbone.factory import BackboneFactory
from models.registry.base import register_model

logger = logging.getLogger(__name__)


class BaseSingle(BaseModel):
    """Base single backbone model implementation.

    This model uses a single backbone for feature extraction and a classifier
    for making predictions.
    """

    def __init__(self, config: ModelConfig) -> None:
        """Initialize BaseSingle model.

        Args:
            config: Model configuration
        """
        super().__init__(config)

        # Create backbone
        self.backbone = BackboneFactory.create(config)

        # Track the features dimension
        self.feature_dim = self.backbone.get_output_dim()

        # Initialize classifier for initial task
        self.num_classes = config.init_cls
        self.classifier = nn.Linear(self.feature_dim, self.num_classes)

        # Store configuration
        self.fc_func = config.fc_func
        self.init_cls = config.init_cls
        self.inc_cls = config.inc_cls

        # Initialize task-specific classifiers
        self.task_cls = [self.init_cls]
        self.classifiers = nn.ModuleList([self.classifier])
        self.current_task = 0

        # Visual attention for interpretability
        self.cam_visual = config.cam_visual

        logger.info(f"Created BaseSingle model with {self.feature_dim} feature dimension")

    def forward(self, x: torch.Tensor, task_id: Optional[int] = None) -> Dict[str, torch.Tensor]:
        """Forward pass through the model.

        Args:
            x: Input tensor of shape [B, C, H, W]
            task_id: ID of the current task (if applicable)

        Returns:
            Dictionary containing output tensors:
                - 'features': Feature tensor
                - 'logits': Output logits
        """
        # Extract features using backbone
        features = self.get_features(x)

        # Use task_id if provided, otherwise use current_task
        task = self.current_task if task_id is None else task_id

        # Get classifier for the task
        if task >= len(self.classifiers):
            task = len(self.classifiers) - 1
            logger.warning(f"Requested task_id {task_id} out of range, using task {task}")

        classifier = self.classifiers[task]

        # Apply classifier to get logits
        if self.fc_func == 'cosine':
            # Normalize features and weights for cosine similarity
            features_norm = F.normalize(features, p=2, dim=1)
            weight_norm = F.normalize(classifier.weight, p=2, dim=1)
            logits = F.linear(features_norm, weight_norm)
        else:
            logits = classifier(features)

        # Build output dictionary
        output = {
            'features': features,
            'logits': logits
        }

        # Add class activation maps if requested
        if self.cam_visual and self.training:
            # Implementation depends on backbone
            if hasattr(self.backbone, 'get_attention_maps'):
                output['cam'] = self.backbone.get_attention_maps(x)

        return output

    def get_features(self, x: torch.Tensor, task_id: Optional[int] = None) -> torch.Tensor:
        """Extract features from the input.

        Args:
            x: Input tensor of shape [B, C, H, W]
            task_id: ID of the current task (not used in BaseSingle)

        Returns:
            Feature tensor of shape [B, feature_dim]
        """
        return self.backbone(x)

    def add_task(self, num_classes: int) -> None:
        """Add a new task.

        Args:
            num_classes: Number of classes in the new task
        """
        self.task_cls.append(num_classes)
        classifier = nn.Linear(self.feature_dim, num_classes)
        classifier.to(self.get_device())
        self.classifiers.append(classifier)
        self.current_task += 1

        logger.info(f"Added task {self.current_task} with {num_classes} classes")

    def after_task(self) -> None:
        """Operations to perform after completing a task."""
        # Base implementation does nothing
        pass

    def get_param_groups(self) -> List[Dict[str, Any]]:
        """Get parameter groups for optimization.

        Returns:
            List of parameter groups with different learning rates
        """
        # Separate backbone and classifier parameters
        backbone_params = list(self.backbone.parameters())
        classifier_params = list(self.classifiers.parameters())

        return [
            {'params': backbone_params, 'lr_mult': 0.1},  # Lower learning rate for backbone
            {'params': classifier_params, 'lr_mult': 1.0}  # Higher learning rate for classifier
        ]


class SSIAT(BaseSingle):
    """Single backbone model with Self-Supervised Incremental Adapter Training.

    This model uses parameter-efficient fine-tuning with adapters that are
    trained incrementally using both supervised and self-supervised signals.
    """

    def after_task(self) -> None:
        """Operations to perform after completing a task.

        For SSIAT, this updates the adapter configuration if needed.
        """
        # Check if backbone supports adapters
        if hasattr(self.backbone, 'set_peft_config') and self.config.peft:
            # Update PEFT configuration if needed
            # This is where adapter-specific logic would go
            pass

        logger.info(f"Completed task {self.current_task}")


# Alias for backward compatibility
BaseSingle.SSIAT = SSIAT


@register_model('base_single')
def base_single(config: ModelConfig) -> BaseSingle:
    """Create a BaseSingle model instance.

    Args:
        config: Model configuration

    Returns:
        Instantiated BaseSingle model
    """
    return BaseSingle(config)


@register_model('ssiat')
def ssiat(config: ModelConfig) -> SSIAT:
    """Create an SSIAT model instance.

    Args:
        config: Model configuration

    Returns:
        Instantiated SSIAT model
    """
    return SSIAT(config)
