# coding=utf-8
import torch
import torch.nn as nn
from torchinfo import summary


class RnnNet(nn.Module):
    def __init__(self):
        super(RnnNet, self).__init__()

        # 词编码层，30522是词的数量，每个词被编码为100维的向量
        self.embed = nn.Embedding(num_embeddings=30522, embedding_dim=100)

        # rnn 神经单元
        self.cell = nn.GRUCell(input_size=100, hidden_size=512)

        # 线性输出
        self.fc = nn.Linear(in_features=512, out_features=2)

    def forward(self, x):
        # [10,15] -> [10,15,100]
        x = self.embed(x)
        # 初试记忆为空
        h = None
        # 从前往后读句中的每个词
        for i in range(x.shape[1]):
            h = self.cell(x[:, i], h)

        # 根据最后一个词的记忆，分类整句话
        return self.fc(h)


def getModel():
    return RnnNet()


if __name__ == '__main__':
    model = getModel()
