import torch.nn as nn
import torch
from model.modules.classifier import BaseCls
from model.modules.cnn import CNN
from model.modules.mamba import BidirectionalMamba

class CMamba(nn.Module):
    def __init__(
        self,
        cnn: CNN,
        mamba: BidirectionalMamba,
        classifier: BaseCls,
    ):
        super(CMamba, self).__init__()
        self.cnn = cnn
        self.mamba = mamba
        self.classifier = classifier

    def forward(self, x):
        x = x[0]
        # input size : (batch_size, n_freq, n_frames)
        x = x.transpose(1, 2).unsqueeze(1)
        x = self.cnn(x)
        bs, chan, frames, freq = x.size()
        x = x.permute(0, 2, 1, 3).reshape(bs, frames, -1)
        x = self.mamba(x)
        strong, weak = self.classifier(x)
        return strong, weak

if __name__ == "__main__":
    from model.modules.cnn import *
    from model.modules.rnn import *
    from model.modules.classifier import *
    
    crnn = CMamba(
        cnn=CNN(
            n_in_channel=1,
            channels=[16, 32, 64, 128, 128, 128, 128],
            kernel_size=[3, 3, 3, 3, 3, 3, 3],
            stride=[1, 1, 1, 1, 1, 1, 1],
            padding=[1, 1, 1, 1, 1, 1, 1],
            pooling=[(2, 2), (2, 2), (1, 2), (1, 2), (1, 2), (1, 2), (1, 2)],
            dropout=0.5,
            normalization="batch"
        ),
        mamba=BidirectionalMamba(
            n_layer=2,
            n_hidden=128, 
            mamba_state=16, 
            mamba_conv_kernel=4,
            dropout_mamba=0
        ),
        classifier=LinearAttentionCls(
            n_in=128,
            n_classes=10,
            dropout=0.5
        )
    ).cuda()
    x = torch.randn(2, 128, 626).cuda()
    strong, weak = crnn(x)
    print(strong.shape, weak.shape)  # Expected output: (2, 10