import torch
import torch.nn as nn
import math


class Embeddings(nn.Module):

    def __init__(self, feature_dim, vocab_size):
        """类的初始化函数, 有两个参数, feature_dim: 指词嵌入的维度, vocab_size: 指词表的大小."""
        super(Embeddings, self).__init__()
        self.lut = nn.Embedding(vocab_size, feature_dim)
        self.feature_dim = feature_dim

    def forward(self, x):
        # print('x before Embeddings size: ', x.size())
        x = self.lut(x) * math.sqrt(self.feature_dim)
        # print('x after Embeddings size: ', x.size())
        return x


if __name__ == '__main__':
    embedding = nn.Embedding(8, 3)
    input = torch.tensor([[1, 2, 3, 4, 5, 6, 7]])

    output = embedding(input)
    print(input.size())
    print(input)
    print(output.size())
    print(output)

    feature_dim = 512  # 词嵌入维度是512维
    vocab_size = 1000  # 词表大小是1000
    emb = Embeddings(feature_dim, vocab_size)
    output = emb(input)
    print(output.size())
