import torch.nn as nn

"""
词向量
"""


class EmbeddingLayer(nn.Module):
    def __init__(self, vocab_size=4096, embedding_size=512):
        super(EmbeddingLayer, self).__init__()
        self.embedding = nn.Embedding(vocab_size, embedding_size)

    def forward(self, x):
        return self.embedding(x)


if __name__ == '__main__':
    import torch

    test_input = torch.randint(0, 10, (4, 8))
    model = EmbeddingLayer(10, 16)
    print(model(test_input))
