import torch
from torch import nn

from model.MyTransformer import MyTransformerEncoderLayer, MyTransformerEncoder, MyTransformer
from utils.log_helper import log_init

if __name__ == '__main__':
    logger = log_init("test_transformer")
    src_len = 5
    tgt_len = 6

    dmodel = 32
    num_head = 4
    batch_size = 2

    num_layers = 2

    # shape: [src_len, batch_size, embed_dim]
    src = torch.rand(src_len, batch_size, dmodel)
    # shape: [batch_size, src_len]
    src_key_padding_mask = torch.tensor([[True, True, True, False, False],
                                         [True, True, True, True, False]])

    # shape: [tgt_len, batch_size, embed_dim]
    tgt = torch.rand((tgt_len, batch_size, dmodel))

    # shape: [batch_size, tgt_len]
    tgt_key_padding_mask = torch.tensor([[True, True, True, False, False, False],
                                         [True, True, True, True, False, False]])

    # ============ 测试 MyMultiheadAttention ============
    # my_mh = MyMultiheadAttention(embed_dim=dmodel, num_heads=num_head)
    # r = my_mh(src, src, src, key_padding_mask=src_key_padding_mask)

    # ============ 测试 MyTransformerEncoder ============
    # transformer_encoder_layer = MyTransformerEncoderLayer(d_model=dmodel, nhead=num_head)
    # transformer_encoder = MyTransformerEncoder(encoder_layer=transformer_encoder_layer,
    #                                            num_layers=num_layers,
    #                                            norm=nn.LayerNorm(dmodel))
    # memory = transformer_encoder(src, mask=None,
    #                              src_key_padding_mask=src_key_padding_mask)
    # logger.info(f"memory shape = {memory.shape}")

    # ============ 测试 MyTransformer 整体 ============
    my_transformer = MyTransformer(d_model=dmodel,
                                   nhead=num_head,
                                   num_encoder_layers=6,
                                   num_decoder_layers=6,
                                   dim_feedforward=500)
    tgt_mask = my_transformer.generate_square_subsequent_mask(tgt_len)
    out = my_transformer(src=src, tgt=tgt,
                         tgt_mask=tgt_mask,
                         src_key_padding_mask=src_key_padding_mask,
                         tgt_key_padding_mask=tgt_key_padding_mask,
                         memory_key_padding_mask=src_key_padding_mask)
    logger.debug(f"Transformer output shape = {out.shape}")
