# -*- encoding: utf-8 -*-
'''
@File    :   transformer.py
@Time    :   2021/11/30 16:03
@Author  :   ZhangChaoYang
@Desc    :   NLP领域的主流框架--Transformer
'''

import tensorflow as tf
from tensorflow.keras import Model, Sequential
from tensorflow.keras.layers import LayerNormalization, Dense, ReLU, Dropout
from tensorflow_addons.layers import MultiHeadAttention  # tensorflow_addons里实现了多头注意力，tensorflow里没有实现


class Transformer(Model):
    def __init__(self, embedding_dim, hidden_dim, head_size=128):
        super(Transformer, self).__init__()
        self.norm_layer = LayerNormalization(epsilon=1e-3)
        self.attention_layer = MultiHeadAttention(head_size=head_size, num_heads=4, dropout=0.1)
        self.fc_layer = Sequential([
            Dense(units=hidden_dim, use_bias=False),
            Dropout(0.1),
            ReLU(),
            Dense(units=embedding_dim, use_bias=False),
            Dropout(0.1),
        ])

    def call(self, inputs, training=None, mask=None):
        attn_out = self.attention_layer(inputs=[inputs, inputs, inputs])  # K、V、Q都相同。
        fc_input = self.norm_layer(attn_out + inputs)
        fc_out = self.fc_layer(fc_input)
        out = self.norm_layer(fc_out + fc_input)
        return out  # 输出跟输入的维度一样，Transformer就是对输出进行了重新编码


if __name__ == '__main__':
    batch_size = 1
    seq_len = 10
    embedding_dim = 32
    x = tf.random.normal(shape=(batch_size, seq_len, embedding_dim))
    model = Transformer(embedding_dim=embedding_dim, hidden_dim=embedding_dim * 2, head_size=16)
    y = model(x)
    print(y.shape)  # 验证输出跟输入的维度一样

# python .\models\transformer.py
