from torch import nn
import torch


class MyEmbedding(nn.Module):
    def __init__(self, vocab_size, embedding_dims):
        super().__init__()

        # 随机初始化一个对应的词表
        self.embeddings = torch.randn((vocab_size, embedding_dims))

    def forward(self, indices):
        return self.embeddings[indices.long()]


if __name__ == '__main__':
    embedding = MyEmbedding(10, 3)
    inputs = torch.LongTensor([[1, 2, 4, 5], [4, 3, 2, 9]])
    outputs = embedding(inputs)
    print(outputs.shape)
