from torch import nn as nn

class RNN(nn.Module):
    def __init__(self):
        super(RNN, self).__init__()
        """
        Initialization of RNN instance
        Args:            
            n_in: int, number of input
            n_hidden: int, number of hidden layers
            dropout: float, dropout
            num_layers: int, number of layers
        """

    def forward(self, x):
        """
        Forward pass through the RNN.
        Args:
            x: input tensor of shape (batch_size, seq_length, n_in)
        Returns:
            recurrent: output tensor of shape (batch_size, seq_length, n_hidden)
        """
        raise NotImplementedError("This method should be overridden by subclasses.")


class BidirectionalGRU(RNN):
    def __init__(self, n_in, n_hidden, dropout=0, num_layers=1):

        """
            Initialization of BidirectionalGRU instance
        Args:
            n_in: int, number of input
            n_hidden: int, number of hidden layers
            dropout: flat, dropout
            num_layers: int, number of layers
        """
        super(BidirectionalGRU, self).__init__()
        self.rnn = nn.GRU(
            n_in,
            n_hidden,
            bidirectional=True,
            dropout=dropout,
            batch_first=True,
            num_layers=num_layers,
        )

    def forward(self, input_feat):
        recurrent, _ = self.rnn(input_feat)
        return recurrent