import torch.nn as nn
from torch.nn import LayerNorm
from torch import Tensor
from mamba_ssm import Mamba


class BidirectionalMambaBlock(nn.Module):
    def __init__(self, n_hidden, mamba_state, mamba_conv_kernel, dropout_mamba):
        super(BidirectionalMambaBlock, self).__init__()
        self.mamba = nn.ModuleList([
            Mamba(d_model=n_hidden, d_state=mamba_state, d_conv=mamba_conv_kernel),
            Mamba(d_model=n_hidden, d_state=mamba_state, d_conv=mamba_conv_kernel)
        ])
        self.norm_mamba = LayerNorm(n_hidden)
        self.dropout_mamba = nn.Dropout(dropout_mamba)
    
    def forward(self, x):
        """
        Forward pass for the Bidirectional Mamba module.
        
        Args:
            x (torch.Tensor): Input tensor of shape (batch_size, seq_len, feature_dim).
            inference (bool): Flag to indicate if the model is in inference mode.
        
        Returns:
            torch.Tensor: Output tensor after applying Mamba transformations.
        """
        x_fw = x + self._mamba(x, self.mamba[0], self.norm_mamba, self.dropout_mamba)
        x_bw = x.flip(dims=[1]) + self._mamba(x.flip(dims=[1]), self.mamba[1], self.norm_mamba, self.dropout_mamba)
        x = (x_fw + x_bw.flip(dims=[1])) / 2
        return x

    def _mamba(self, x: Tensor, mamba: Mamba, norm: nn.Module, dropout: nn.Module):
        x = norm(x)
        x = mamba.forward(x)
        return dropout(x)

class BidirectionalMamba(nn.Module):
    def __init__(self, n_layer, n_hidden, mamba_state, mamba_conv_kernel, dropout_mamba):
        super(BidirectionalMamba, self).__init__()
        layers = []
        for _ in range(n_layer):
            layers.append(BidirectionalMambaBlock(n_hidden, mamba_state, mamba_conv_kernel, dropout_mamba))
        self.mamba = nn.Sequential(*layers)

    def forward(self, x):
        return self.mamba(x)