import torch
from torch import nn

class R_LSTM(nn.Module):
    def __init__(self,word_length):
        super(R_LSTM, self).__init__()

        self.rnn = nn.LSTM(
            input_size=256,
            hidden_size=64,
            num_layers=1,
            batch_first=True,
        )

        self.out = nn.Linear(64, 3)
        self.embedding = torch.nn.Embedding(word_length, 256)

    def forward(self, x):

        input = self.embedding(x).view(-1,20,256)
        r_out, (h_n, h_c) = self.rnn(input, None)

        out = self.out(r_out[:, -1, :])
        return out