import torch.nn as nn

"""
残差网络与归一化
"""


class AddAndNorm(nn.Module):
    def __init__(self, d_model=512):
        super(AddAndNorm, self).__init__()
        self.norm = nn.LayerNorm(d_model)

    def forward(self, x, sublayer):
        return self.norm(x + sublayer(x))


if __name__ == '__main__':
    import torch
    from embedding_layer import EmbeddingLayer
    from multi_head_attention_layer import MultiHeadAttention

    test_input = torch.randint(0, 10, (4, 8))
    embedding = EmbeddingLayer(10, 512)
    test_input = embedding(test_input)
    multi_head_attention = MultiHeadAttention()
    model = AddAndNorm()
    output = model(test_input, multi_head_attention)
    print(output)
