from torch import nn
import torch


class SkipGramModel(nn.Module):
    def __init__(self, vocab_size, embedding_dims):  # embedding_dims 自定义（一般是3或者4）
        super().__init__()
        self.vocab_size = vocab_size
        self.embedding_dims = embedding_dims

        # 定义维度词表
        self.embedding = nn.Embedding(vocab_size, embedding_dims)
        # 线性层(从词向量维度回到词分类)
        self.fc = nn.Linear(embedding_dims, vocab_size)

    def forward(self, x):  # x 输入时是2维的 (batch_size, seq_len)
        batch_size, seq_len = x.shape
        x = self.embedding(x)  # (batch_size, seq_len, embedding_dims)
        x = x.reshape(-1, self.embedding_dims)  # (batch_size * seq_len, embedding_dims)
        x = self.fc(x)  # (batch_size * seq_len, vocab_size)
        # return x.reshape(batch_size, seq_len, self.vocab_size)
        return x  # 更有利于做交叉熵损失


if __name__ == '__main__':
    inputs = torch.randint(0, 10, (3, 4))
    model = SkipGramModel(10, 3)
    outputs = model(inputs)
    print(outputs.shape)
