import tensorflow as tf
from keras.layers import Layer

class Attention(Layer):
    def __init__(self, **kwargs):
        super(Attention, self).__init__(**kwargs)

    def build(self, input_shape):
        print(f"input_shape: {input_shape}")

        if len(input_shape) != 3:
            raise ValueError(f"Expected 3D input, but got input shape: {input_shape}")

        self.W = self.add_weight(shape=(input_shape[2], 1), initializer='random_normal', trainable=True)
        super(Attention, self).build(input_shape)

    def call(self, inputs):
        # 使用 tf.keras.backend 来调用 tanh
        e = tf.keras.backend.tanh(tf.keras.backend.dot(inputs, self.W))  # e = tanh(W * input)
        a = tf.keras.backend.softmax(e, axis=1)  # 计算注意力权重
        weighted_input = inputs * a  # 加权
        return tf.keras.backend.sum(weighted_input, axis=1)  # 返回加权后的序列表示
