"""
Interfaces for task routing strategies.
Interface Definition: continuallearning/interfaces/models/pefts/routing/router.py

This module defines interfaces for routing inputs across multiple
task-specific components based on various strategies such as hard routing,
soft routing, and top-k expert selection.
"""

import torch
import torch.nn as nn
from typing import Dict, Any, Optional, List
from continuallearning.interfaces import RouterInterface


class BaseRouter(RouterInterface, nn.Module):
    """Implementation of RouterInterface for routing inputs across task-specific components.

    This router supports multiple routing strategies:
    - 'hard': Select single best expert
    - 'soft': Weighted combination of experts
    - 'top_k': Select top-k experts
    """

    # TaskAwareInterface
    def register_task(self, task_id: int, **kwargs) -> None:
        """
        Register a new task with this hook.

        Args:
            task_id: Identifier for the new task
            config: Optional task-specific configuration
        """
        if task_id in self.registered_tasks:
            print(f"Task {task_id} is already registered.")
            return

        new_weights = nn.Parameter(
            torch.zeros(self.num_experts, self.embed_dim, dtype=torch.float32)
        )
        self.__init_weights(new_weights)
        self.router_weights[str(task_id)] = new_weights
        self._expert_counts[str(task_id)] = torch.zeros(
            self.num_experts, dtype=torch.float32
        )

    def set_task(self, task_id: int) -> None:
        """
        Configure the model for a specific task.

        Args:
            task_id: Task identifier
        """
        self._current_task = task_id

    def set_trainable_tasks(self, task_ids: int | List[int], **kwargs) -> None:
        """
        Prepare this router for a specific task.

        Args:
            task_id: Identifier for the task to prepare
            **kwargs: Additional preparation parameters
        """
        if isinstance(task_ids, int):
            task_ids_ls: List[int] = [task_ids]
        elif isinstance(task_ids, list):
            task_ids_ls: List[int] = task_ids

        other_task_id: List[int] = []
        for tid in self.registered_tasks:
            if tid not in task_ids_ls:
                other_task_id.append(tid)

        self._unfreeze(task_ids_ls)
        self._freeze(other_task_id)

    def _freeze(self, task_ids: int | List[int]) -> None:
        """
        Freeze parameters for a specific task.

        Args:
            task_id: Task identifier or list of identifiers to freeze parameters for
        """
        # Convert single task_id to list for uniform processing
        task_ids = [task_ids] if isinstance(task_ids, int) else task_ids

        for tid in task_ids:
            task_key = str(tid)
            if task_key in self.router_weights:
                self.router_weights[task_key].requires_grad = False
            else:
                raise ValueError(f"Task {tid} is not registered in router weights")

    def _unfreeze(self, task_ids: int | List[int]) -> None:
        """
        Unfreeze parameters for a specific task.

        Args:
            task_id: Task identifier or list of identifiers to unfreeze parameters for
        """
        # Convert single task_id to list for uniform processing
        task_ids = [task_ids] if isinstance(task_ids, int) else task_ids

        for tid in task_ids:
            task_key = str(tid)
            if task_key in self.router_weights:
                self.router_weights[task_key].requires_grad = True
            else:
                raise ValueError(f"Task {tid} is not registered in router weights")

    @property
    def current_task(self) -> int:
        """Get the current task ID."""
        return self._current_task

    @property
    def registered_tasks(self) -> List[int]:
        """Get list of registered task IDs."""
        registered_ids = self.router_weights.keys()
        return [int(task_id) for task_id in registered_ids]

    # TaskIrrelevantInterface
    def get_parameters(self) -> List[nn.Parameter]:
        """
        Get all parameters managed by this module.

        Returns:
            List[nn.Parameter]: List of all parameters including frozen ones
        """
        return list(self.parameters())

    def get_trainable_parameters(self) -> List[nn.Parameter]:
        """
        Get trainable parameters managed by this module.

        Returns:
            List[nn.Parameter]: List of parameters with requires_grad=True
        """
        return [param for param in self.parameters() if param.requires_grad]
