import types

import tensorflow as tf
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers.convolutional import Convolution1D, MaxPooling1D

# 参数默认值
args = {
    'patience': 20,
    'batch_size': 64,
    'dropout': 0.6,
    'nb_filter1': 200,
    'nb_filter2': 96,
    'filter_len1': 19,
    'filter_len2': 11,
    'hidden': 200,
    'num_transformer_layers': 11,
    'dropout_rate': 0.4,
    'channels': 96,
    'num_heads': 4,
}

args = types.SimpleNamespace(**args)


# Merge类定义
class MyModel(tf.keras.Model):
    def __init__(self):
        super().__init__()
        # 定义注意力模块参数 （没有用到）
        whole_attention_kwargs = {
            'attention_dropout_rate': 0.05,
            'initializer': None,
            'key_size': 64,
            'num_heads': args.num_heads,
            'num_relative_position_features': args.channels // args.num_heads,
            'positional_dropout_rate': 0.01,
            'relative_position_functions': [
                'positional_features_exponential',
                'positional_features_central_mask',
                'positional_features_gamma'
            ],
            'relative_positions': True,
            'scaling': True,
            'value_size': args.channels // args.num_heads,
            'zero_initialize': True
        }

        # 读取卷积层和其他层参数
        self.a1 = Convolution1D(int(args.nb_filter1), int(args.filter_len1), padding='same', input_shape=(1000, 4))
        self.a2 = Activation('relu')
        self.a3 = Dropout(float(args.dropout))

        self.a4 = Convolution1D(int(args.nb_filter2), int(args.filter_len2), padding='same')
        self.a5 = Activation('relu')
        self.a6 = Dropout(float(args.dropout))

        self.a8 = Flatten()
        self.a9 = Dense(int(args.hidden), activation='relu')
        self.a10 = Dropout(float(args.dropout))
        self.a11 = Dense(1, activation='linear')
        self.a12 = Activation('sigmoid')

    def call(self, inputs):
        x = self.a1(inputs)
        x = self.a2(x)
        x = self.a3(x)
        x = self.a4(x)
        x = self.a5(x)
        x = self.a6(x)
        x = self.a8(x)
        x = self.a9(x)
        x = self.a10(x)
        x = self.a11(x)
        x = self.a12(x)
        return x
