from torch.nn import Module, Linear, RNN, LSTM, GRU


class RNNs(Module):

    model_name: str
    input_size: int
    hidden_size: int
    batch_first: bool
    time_step: int
    output_size: int

    def __init__(self, model_name: str, input_size: int, hidden_size: int, batch_first: bool, time_step: int,
                 output_size: int):
        super(RNNs, self).__init__()
        if model_name == 'RNN':
            self.recurrent_layer = RNN(input_size=input_size, hidden_size=hidden_size, batch_first=batch_first)
        elif model_name == 'GRU':
            self.recurrent_layer = GRU(input_size=input_size, hidden_size=hidden_size, batch_first=batch_first)
        elif model_name == 'LSTM':
            self.recurrent_layer = LSTM(input_size=input_size, hidden_size=hidden_size, batch_first=batch_first)
        else:
            raise NameError('Wrong model name.')

        self.fully_connected = Linear(in_features=time_step * hidden_size, out_features=output_size)

    def forward(self, x):
        out, _ = self.recurrent_layer(x, None)
        out = out.reshape(out.shape[0], out.shape[1] * out.shape[2])
        out = self.fully_connected(out)
        return out


if __name__ == '__main__':
    import torch
    rnn = RNNs('RNN', 10, 20, True, 3, 1)
    input = torch.randn(5, 3, 10)
    h0 = torch.randn(2, 3, 20)
    output = rnn(input)
    print(output)
    # rnn.state_dict()