import oneflow as torch
import oneflow.nn as nn


class LSTMWithLayerNorm(nn.Module):
    def __init__(self, input_size, hidden_size, bidirectional=False, dropout=0.1):
        super(LSTMWithLayerNorm, self).__init__()

        self.lstm = nn.LSTM(
            input_size=input_size,
            hidden_size=hidden_size,
            num_layers=1,
            batch_first=True, 
            bidirectional=bidirectional
        )

        self.layer_norm = nn.LayerNorm(hidden_size)
        self.dropout = nn.Dropout(dropout)
        self.bidirectional = bidirectional

    def forward(self, inputs, inputs_length, hidden=None):
        
        if self.bidirectional:         
            inputs = nn.utils.rnn.pack_padded_sequence(inputs, inputs_length, batch_first=True)
        
        self.lstm.flatten_parameters()
        outputs, hidden = self.lstm(inputs, hidden)

        if self.bidirectional:
            outputs, _ = nn.utils.rnn.pad_packed_sequence(outputs, batch_first=True)
            
        outputs = self.layer_norm(outputs)
        outputs = self.dropout(outputs)

        return outputs, hidden


# class Conv2DLSTM(nn.Module):
#     def __init__(self, ):
#         super(Conv2DLSTM, self).__init__()

#     def forward()