import torch
from config.config import cfg
from model.Embedding import TokenEmbedding, PositionalEncoding
from utils.log_helper import log_init

if __name__ == "__main__":
    # 创建日志打印对象
    logger = log_init("test_embedding", cfg.log_dir)

    # 创建测试样本
    # shape: [2, 5]
    x = torch.tensor([[1, 3, 5, 7, 9], [2, 4, 6, 8, 10]], dtype=torch.long)

    # [src_len, batch_size] = [5, 2]
    x = x.reshape(5, 2)

    # 第一步：Token Embedding
    token_embedding = TokenEmbedding(vocab_size=11, emb_size=512)
    x = token_embedding(tokens=x)
    logger.info(f"token embedding output shape = {x.shape}")

    # 第二步：Positional Encoding
    pos_embedding = PositionalEncoding(d_model=512)
    x = pos_embedding(x=x)
    logger.info(f"pe output shape = {x.shape}")
