import oneflow as torch
import oneflow.nn as nn


RecurrentLayer = {
    'rnn': nn.RNN,
    'gru': nn.GRU,
    'lstm': nn.LSTM
}


class CommonRNNLayer(nn.Module):
    def __init__(self, input_size, hidden_size, project_size, layer_norm=True, dropout=0.1,
                 bidirectional=False, residual=False, rnn_type='lstm'):
        super(CommonRNNLayer, self).__init__()

        self.rnn_type = rnn_type
        self.layer_norm = layer_norm
        self.dropout = dropout
        self.bidirectional = bidirectional
        self.residual = residual if input_size == project_size else False

        self.layer = RecurrentLayer[rnn_type](
            input_size=input_size,
            hidden_size=hidden_size,
            bias=True,
            batch_first=True,
            dropout=0.0,
            bidirectional=bidirectional)

        self.project_layer = nn.Linear(hidden_size * 2 if bidirectional else hidden_size, project_size)

        if self.layer_norm:
            self.norm = nn.LayerNorm(project_size)

        self.dropout = nn.Dropout(dropout)

    def forward(self, x, x_lens=None, hidden=None):
        # x [b, t, v]
        # h tensor of tuple   [1, b, hidden_size] 

        residual = x

        if self.bidirectional:         
            x = nn.utils.rnn.pack_padded_sequence(x, x_lens, batch_first=True)
        
        self.layer.flatten_parameters()

        x, hidden = self.layer(x, hidden)

        if self.bidirectional:
            x, _ = nn.utils.rnn.pad_packed_sequence(x, batch_first=True)

        x = self.project_layer(x)

        if self.layer_norm:
            x = self.norm(x)

        if self.residual:
            if residual.size(1) == x.size(1):
                x += residual

        x = self.dropout(x)

        return x, hidden

    def inference(self, x, hidden):
        return self.forward(x, hidden)