import tensorflow as tf from typing import Tuple def _inception_module( input_tensor, stride=1, activation="linear", use_bottleneck=True, kernel_size=40, bottleneck_size=32, nb_filters=32, ): if use_bottleneck and int(input_tensor.shape[-1]) > 1: input_inception = tf.keras.layers.Conv1D( filters=bottleneck_size, kernel_size=1, padding="same", activation=activation, use_bias=False, )(input_tensor) else: input_inception = input_tensor # kernel_size_s = [3, 5, 8, 11, 17] kernel_size_s = [kernel_size // (2**i) for i in range(3)] conv_list = [] for i in range(len(kernel_size_s)): conv_list.append( tf.keras.layers.Conv1D( filters=nb_filters, kernel_size=kernel_size_s[i], strides=stride, padding="same", activation=activation, use_bias=False, )(input_inception) ) max_pool_1 = tf.keras.layers.MaxPool1D(pool_size=3, strides=stride, padding="same")( input_tensor ) conv_6 = tf.keras.layers.Conv1D( filters=nb_filters, kernel_size=1, padding="same", activation=activation, use_bias=False, )(max_pool_1) conv_list.append(conv_6) x = tf.keras.layers.Concatenate(axis=2)(conv_list) x = tf.keras.layers.BatchNormalization()(x) x = tf.keras.layers.Activation(activation="relu")(x) return x def _shortcut_layer(input_tensor, out_tensor): shortcut_y = tf.keras.layers.Conv1D( filters=int(out_tensor.shape[-1]), kernel_size=1, padding="same", use_bias=False )(input_tensor) shortcut_y = tf.keras.layers.BatchNormalization()(shortcut_y) x = tf.keras.layers.Add()([shortcut_y, out_tensor]) x = tf.keras.layers.Activation("relu")(x) return x def build_age_model( input_shape: Tuple[int, int], nb_classes: int, depth: int = 6, use_residual: bool = True, )-> tf.keras.models.Model: """ Model proposed by HI Fawas et al 2019 "Finding AlexNet for Time Series Classification - InceptionTime" """ input_layer = tf.keras.layers.Input(input_shape) x = input_layer input_res = input_layer for d in range(depth): x = _inception_module(x) if use_residual and d % 3 == 2: x = _shortcut_layer(input_res, x) input_res = x gap_layer = tf.keras.layers.GlobalAveragePooling1D()(x) output_layer = tf.keras.layers.Dense(units=nb_classes, activation="linear")( gap_layer ) model = tf.keras.models.Model(inputs=input_layer, outputs=output_layer) model.compile( loss=tf.keras.losses.MeanAbsoluteError(), optimizer=tf.keras.optimizers.Adam(learning_rate=0.001), metrics=[tf.keras.metrics.MeanSquaredError()], ) return model def build_gender_model( input_shape: Tuple[int, int], nb_classes: int, depth: int = 6, use_residual: bool = True, )-> tf.keras.models.Model: """ Model proposed by HI Fawas et al 2019 "Finding AlexNet for Time Series Classification - InceptionTime" """ input_layer = tf.keras.layers.Input(input_shape) x = input_layer input_res = input_layer for d in range(depth): x = _inception_module(x) if use_residual and d % 3 == 2: x = _shortcut_layer(input_res, x) input_res = x gap_layer = tf.keras.layers.GlobalAveragePooling1D()(x) output_layer = tf.keras.layers.Dense(units=nb_classes, activation="sigmoid")( gap_layer ) model = tf.keras.models.Model(inputs=input_layer, outputs=output_layer) model.compile( loss=tf.keras.losses.BinaryCrossentropy(), optimizer=tf.keras.optimizers.Adam(learning_rate=0.001), metrics=[tf.keras.metrics.AUC(curve='ROC',name="AUROC")], ) return model