import torch
from abc import abstractmethod
from typing import Optional
from jaxtyping import Shaped
from torch import Tensor, nn

class FieldComponent(nn.Module):
    """Field modules that can be combined to store and compute the fields.

    Args:
        in_dim: Input dimension to module.
        out_dim: Output dimension to module.
    """

    def __init__(self, in_dim: Optional[int] = None, out_dim: Optional[int] = None):
        super().__init__()
        self.in_dim = in_dim
        self.out_dim = out_dim

    def build_nn_modules(self):
        """Function instantiates any torch.nn members within the module.
        If none exist, do nothing."""

    def set_in_dim(self, in_dim: int):
        """Sets input dimension of encoding

        Args:
            in_dim: input dimension
        """
        if in_dim <= 0:
            raise ValueError("Input dimension should be greater than zero")
        self.in_dim = in_dim

    def get_out_dim(self):
        """Calculates output dimension of encoding."""
        if self.out_dim is None:
            raise ValueError("Output dimension has not been set")
        return self.out_dim

    @abstractmethod
    def forward(self, in_tensor: Shaped[Tensor, "*bs input_dim"]):
        """
        Returns processed tensor

        Args:
            in_tensor: Input tensor to process
        """
        raise NotImplementedError
  
class Embedding(FieldComponent):
    """Index into embeddings.
    # TODO: add different types of initializations

    Args:
        in_dim: Number of embeddings
        out_dim: Dimension of the embedding vectors
    """

    def __init__(self, in_dim: int, out_dim: int):
        super().__init__()
        self.in_dim = in_dim
        self.out_dim = out_dim
        self.build_nn_modules()

    def build_nn_modules(self):
        self.embedding = torch.nn.Embedding(self.in_dim, self.out_dim)

    def mean(self, dim=0):
        """Return the mean of the embedding weights along a dim."""
        return self.embedding.weight.mean(dim)

    def forward(self, in_tensor):
        """Call forward

        Args:
            in_tensor: input tensor to process
        """
        return self.embedding(in_tensor)


class MLP(nn.Module):
    def __init__(self, in_dim=32, hidden_dim=32, out_dim=3, n_layers=2, out_act=nn.Sigmoid()):
        super(MLP, self).__init__()
        
        self.head = nn.Sequential(nn.Linear(in_dim, hidden_dim), nn.ReLU(True))
        
        self.body = []
        for _ in range(n_layers - 2):
            self.body.append(nn.Linear(hidden_dim, hidden_dim))
            self.body.append(nn.ReLU(True))
        if self.body:
            self.body = nn.Sequential(*self.body)
        
        if out_act:
            self.tail = nn.Sequential(nn.Linear(hidden_dim, out_dim), out_act)
        else:
            self.tail = nn.Sequential(nn.Linear(hidden_dim, out_dim))
        
    def forward(self, x):
        out = self.head(x)
        if self.body:
            out = self.body(out)
        out = self.tail(out)
        return out
    

class PosEmbedding(nn.Module):
    def __init__(self, N_freqs):
        super(PosEmbedding, self).__init__()
        self.N_freqs = N_freqs
        # Register as buffer so it's moved with model.to(device)
        freq_bands = 2 ** torch.linspace(0, N_freqs - 1, N_freqs)
        self.register_buffer('freq_bands', freq_bands)

    def forward(self, x):
        """
        Input:  x, shape [..., D]
        Output: [..., D * (1 + 2 * N_freqs)]
        """
    
        if self.N_freqs < 1:
            return x
        # Shape: [..., D] → [..., D, 1] → [..., D, N_freqs]
        x_expanded = x.unsqueeze(-1) * self.freq_bands   # [..., D, N_freqs]
        
        sin = torch.sin(x_expanded).reshape(x_expanded.shape[0], -1)
        cos = torch.cos(x_expanded).reshape(x_expanded.shape[0], -1)
        # Concatenate original x, sin, and cos along the last dimension
        pe = torch.cat([x, sin, cos], dim=-1)            # [..., D * (1 + 2 * N_freqs)]
        return pe
    
class RegionFeature(nn.Module):
    def __init__(self, n_region_scales, max_n_regions, min_n_regions, max_region_dim, min_region_dim):
        super(RegionFeature, self).__init__()
        self.n_region_scales = n_region_scales
        self.max_n_regions = max_n_regions
        self.min_n_regions = min_n_regions
        self.max_region_dim = max_region_dim
        self.min_region_dim = min_region_dim
        
        n_region_seqs = [self.max_n_regions / (2**i) for i in range(n_region_scales)]
        region_dim_seqs = [self.min_region_dim * (2**i) for i in range(n_region_scales)]
        self.region_feat_dim = sum(region_dim_seqs)
        
        for i in range(n_region_scales):
            n_region = n_region_seqs[i]
            region_dim = region_dim_seqs[i]
            self.register_buffer(f"_region_feature_{i}", Embedding(n_region, region_dim))
        
    def forward(self, labels):
        pass