"""
# -*- coding: utf-8 -*-
# @Time    : 2023/10/8 8:37
# @Author  : 王摇摆
# @FileName: model_cnn_with_attention.py
# @Software: PyCharm
# @Blog    ：https://blog.csdn.net/weixin_44943389?type=blog
"""
from tensorflow.keras import layers, models


def create_cnn_model_with_channel_attention(input_shape):
    model = models.Sequential()

    model.add(layers.Conv1D(32, 3, activation='relu', input_shape=input_shape))
    model.add(layers.MaxPooling1D(2))
    model.add(layers.Dropout(0.2))

    model.add(layers.Conv1D(64, 3, activation='relu'))
    model.add(layers.MaxPooling1D(2))
    model.add(layers.Dropout(0.2))

    # 添加通道注意力
    model.add(layers.GlobalAveragePooling1D())  # 对每个通道求平均值
    model.add(layers.Dense(64, activation='relu'))  # 注意力权重
    model.add(layers.Dense(128, activation='relu'))  # 注意力权重
    model.add(layers.Dense(1, activation='sigmoid'))

    return model
