import torch
from torch import nn
import torch.nn.functional as F


class TextRNNClf(nn.Module):
    def __init__(self,
                 vocab_size,
                 padding_idx,
                 embed_size=300,
                 hidden_size=128,
                 num_layers=1):
        super().__init__()
        self.embedding = nn.Embedding(num_embeddings=vocab_size,
                                      embedding_dim=embed_size,
                                      padding_idx=padding_idx)

        self.rnn = nn.GRU(input_size=embed_size,
                           hidden_size=hidden_size)

        self.fc = nn.Linear(in_features=hidden_size,
                            out_features=2)

    def forward(self, x):
        x = self.embedding(x)
        y, h = self.rnn(x)
        out = self.fc(y[:, -1, :])
        out = F.log_softmax(out, dim=-1)
        return out

