from torch import nn
import torch


class RNN(nn.Module):
    def __init__(self, vocab_size, hidden_size, num_layers=1, bidirectional=False, device=None):
        super().__init__()
        self.device = device if device else torch.device("cuda" if torch.cuda.is_available() else "cpu")
        self.vocab_size = vocab_size
        self.hidden_size = hidden_size
        self.num_layers = num_layers

        self.bi_hidden_size = 2 * hidden_size if bidirectional else hidden_size

        self.rnn = nn.RNN(vocab_size, hidden_size, num_layers, bidirectional=bidirectional, device=self.device)
        self.fc = nn.Linear(self.bi_hidden_size, vocab_size, self.device)

    def forward(self, x, h):
        x = x.to(self.device)
        assert h.shape[0] == 2 * self.num_layers
        result, h = self.rnn(x, h)
        result = result.reshape(-1, self.bi_hidden_size)
        return self.fc(result), h
