import tensorflow as tf
from tensorflow import keras

from ZhengqiLoader import ZhengqiLoader


def generate_CBAM():
    input_xs = tf.keras.Input([5, 5,1])
    reduction_ratio = 0.5
    batch_size, hidden_num = input_xs.get_shape().as_list()[0], input_xs.get_shape().as_list()[3]
    # channel attention
    maxpool_channel = tf.reduce_max(tf.reduce_max(input_xs, axis=1, keepdims=True), axis=2, keepdims=True)
    avgpool_channel = tf.reduce_mean(tf.reduce_mean(input_xs, axis=1, keepdims=True), axis=2, keepdims=True)
    maxpool_channel = tf.keras.layers.Flatten()(maxpool_channel)
    avgpool_channel = tf.keras.layers.Flatten()(avgpool_channel)
    mlp_1_max = tf.keras.layers.Dense(units=int(hidden_num * reduction_ratio), activation=tf.nn.relu)(maxpool_channel)
    mlp_2_max = tf.keras.layers.Dense(units=hidden_num)(mlp_1_max)
    mlp_2_max = tf.reshape(mlp_2_max, [-1, 1, 1, hidden_num])
    mlp_1_avg = tf.keras.layers.Dense(units=int(hidden_num * reduction_ratio), activation=tf.nn.relu)(avgpool_channel)
    mlp_2_avg = tf.keras.layers.Dense(units=hidden_num, activation=tf.nn.relu)(mlp_1_avg)
    mlp_2_avg = tf.reshape(mlp_2_avg, [-1, 1, 1, hidden_num])
    channel_attention = tf.nn.sigmoid(mlp_2_max + mlp_2_avg)
    channel_refined_feature = input_xs * channel_attention
    # spatial attention
    maxpool_spatial = tf.reduce_max(channel_refined_feature, axis=3, keepdims=True)
    avgpool_spatial = tf.reduce_mean(channel_refined_feature, axis=3, keepdims=True)
    max_avg_pool_spatial = tf.concat([maxpool_spatial, avgpool_spatial], axis=3)
    conv_layer = tf.keras.layers.Conv2D(filters=1, kernel_size=(3, 3), padding="same", activation=None)(
        max_avg_pool_spatial)
    spatial_attention = tf.nn.sigmoid(conv_layer)
    # spatial_attention=tf.cast(spatial_attention,dtype=tf.float64)
    refined_feature = channel_refined_feature * spatial_attention
    output_layer = refined_feature + input_xs

    output_layer = tf.cast(output_layer, dtype=tf.float64)
    cov = tf.keras.layers.Conv2D(70, 3, padding='SAME', activation=tf.nn.relu)(
        output_layer)  # 15个3*3卷积核，激活函数为tf.nn.relu 15,70
    cov = tf.keras.layers.BatchNormalization()(cov)
    cov = tf.keras.layers.Conv2D(150, 3, padding='SAME', activation=tf.nn.relu)(cov)  # 75,150
    cov = tf.keras.layers.MaxPool2D(strides=[2, 2])(cov)
    cov = tf.keras.layers.Conv2D(375, 3, padding='SAME', activation=tf.nn.relu)(cov)  # 375,450
    flat = tf.keras.layers.Flatten()(cov)
    dense = tf.keras.layers.Dense(40, activation=tf.nn.relu)(flat)
    dense = tf.keras.layers.Dense(20, activation=tf.nn.relu)(dense)
    # dense = tf.keras.layers.Dense(10, activation=tf.nn.relu)(dense)
    logits = tf.keras.layers.Dense(6)(dense)
    generate = tf.keras.Model(inputs=input_xs, outputs=logits)

    return generate,generate.summary()
if __name__ == '__main__':
    URL = './zhengqi_train.txt'
    BACH_SIZE = 10
    loader = ZhengqiLoader(URL)
    x_train_data, train_dataset, y_train, x_test_data, y_test = loader.preprocess(BACH_SIZE)
    print(x_train_data.shape)
    a,b = generate_CBAM()



