import torch.nn as nn

class Model(nn.Module):
    def __init__(self):
        self.lstm = nn.LSTM(input_size=config.hidden_size,
                            hidden_size=config.rnn_hidden,
                            num_layers=config.num_layers,
                            bidirectional=True,
                            batch_first=True,
                            dropout=config.dropout)

    def forward(self, x):
        pass