from keras.layers import Activation, Convolution2D, Dropout, Conv2D from keras.layers import AveragePooling2D, BatchNormalization from keras.layers import GlobalAveragePooling2D from keras.models import Sequential from keras.layers import Flatten from keras.models import Model from keras.layers import Input from keras.layers import MaxPooling2D from keras.layers import SeparableConv2D from keras import layers from keras.regularizers import l2 def simple_CNN(input_shape, num_classes): model = Sequential() model.add(Convolution2D(filters=16, kernel_size=(7, 7), padding='same', name='image_array', input_shape=input_shape)) model.add(BatchNormalization()) model.add(Convolution2D(filters=16, kernel_size=(7, 7), padding='same')) model.add(BatchNormalization()) model.add(Activation('relu')) model.add(AveragePooling2D(pool_size=(2, 2), padding='same')) model.add(Dropout(.5)) model.add(Convolution2D(filters=32, kernel_size=(5, 5), padding='same')) model.add(BatchNormalization()) model.add(Convolution2D(filters=32, kernel_size=(5, 5), padding='same')) model.add(BatchNormalization()) model.add(Activation('relu')) model.add(AveragePooling2D(pool_size=(2, 2), padding='same')) model.add(Dropout(.5)) model.add(Convolution2D(filters=64, kernel_size=(3, 3), padding='same')) model.add(BatchNormalization()) model.add(Convolution2D(filters=64, kernel_size=(3, 3), padding='same')) model.add(BatchNormalization()) model.add(Activation('relu')) model.add(AveragePooling2D(pool_size=(2, 2), padding='same')) model.add(Dropout(.5)) model.add(Convolution2D(filters=128, kernel_size=(3, 3), padding='same')) model.add(BatchNormalization()) model.add(Convolution2D(filters=128, kernel_size=(3, 3), padding='same')) model.add(BatchNormalization()) model.add(Activation('relu')) model.add(AveragePooling2D(pool_size=(2, 2), padding='same')) model.add(Dropout(.5)) model.add(Convolution2D(filters=256, kernel_size=(3, 3), padding='same')) model.add(BatchNormalization()) model.add(Convolution2D(filters=num_classes, kernel_size=(3, 3), padding='same')) model.add(GlobalAveragePooling2D()) model.add(Activation('softmax',name='predictions')) return model def simpler_CNN(input_shape, num_classes): model = Sequential() model.add(Convolution2D(filters=16, kernel_size=(5, 5), padding='same', name='image_array', input_shape=input_shape)) model.add(BatchNormalization()) model.add(Convolution2D(filters=16, kernel_size=(5, 5), strides=(2, 2), padding='same')) model.add(BatchNormalization()) model.add(Activation('relu')) model.add(Dropout(.25)) model.add(Convolution2D(filters=32, kernel_size=(5, 5), padding='same')) model.add(BatchNormalization()) model.add(Convolution2D(filters=32, kernel_size=(5, 5), strides=(2, 2), padding='same')) model.add(BatchNormalization()) model.add(Activation('relu')) model.add(Dropout(.25)) model.add(Convolution2D(filters=64, kernel_size=(3, 3), padding='same')) model.add(BatchNormalization()) model.add(Convolution2D(filters=64, kernel_size=(3, 3), strides=(2, 2), padding='same')) model.add(BatchNormalization()) model.add(Activation('relu')) model.add(Dropout(.25)) model.add(Convolution2D(filters=64, kernel_size=(1, 1), padding='same')) model.add(BatchNormalization()) model.add(Convolution2D(filters=128, kernel_size=(3, 3), strides=(2, 2), padding='same')) model.add(BatchNormalization()) model.add(Activation('relu')) model.add(Dropout(.25)) model.add(Convolution2D(filters=256, kernel_size=(1, 1), padding='same')) model.add(BatchNormalization()) model.add(Convolution2D(filters=128, kernel_size=(3, 3), strides=(2, 2), padding='same')) model.add(Convolution2D(filters=256, kernel_size=(1, 1), padding='same')) model.add(BatchNormalization()) model.add(Convolution2D(filters=num_classes, kernel_size=(3, 3), strides=(2, 2), padding='same')) model.add(Flatten()) #model.add(GlobalAveragePooling2D()) model.add(Activation('softmax',name='predictions')) return model def tiny_XCEPTION(input_shape, num_classes, l2_regularization=0.01): regularization = l2(l2_regularization) # base img_input = Input(input_shape) x = Conv2D(5, (3, 3), strides=(1, 1), kernel_regularizer=regularization, use_bias=False)(img_input) x = BatchNormalization()(x) x = Activation('relu')(x) x = Conv2D(5, (3, 3), strides=(1, 1), kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = Activation('relu')(x) # module 1 residual = Conv2D(8, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) residual = BatchNormalization()(residual) x = SeparableConv2D(8, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = Activation('relu')(x) x = SeparableConv2D(8, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = MaxPooling2D((3, 3), strides=(2, 2), padding='same')(x) x = layers.add([x, residual]) # module 2 residual = Conv2D(16, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) residual = BatchNormalization()(residual) x = SeparableConv2D(16, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = Activation('relu')(x) x = SeparableConv2D(16, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = MaxPooling2D((3, 3), strides=(2, 2), padding='same')(x) x = layers.add([x, residual]) # module 3 residual = Conv2D(32, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) residual = BatchNormalization()(residual) x = SeparableConv2D(32, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = Activation('relu')(x) x = SeparableConv2D(32, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = MaxPooling2D((3, 3), strides=(2, 2), padding='same')(x) x = layers.add([x, residual]) # module 4 residual = Conv2D(64, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) residual = BatchNormalization()(residual) x = SeparableConv2D(64, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = Activation('relu')(x) x = SeparableConv2D(64, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = MaxPooling2D((3, 3), strides=(2, 2), padding='same')(x) x = layers.add([x, residual]) x = Conv2D(num_classes, (3, 3), #kernel_regularizer=regularization, padding='same')(x) x = GlobalAveragePooling2D()(x) output = Activation('softmax',name='predictions')(x) model = Model(img_input, output) return model def mini_XCEPTION(input_shape, num_classes, l2_regularization=0.01): regularization = l2(l2_regularization) # base img_input = Input(input_shape) x = Conv2D(8, (3, 3), strides=(1, 1), kernel_regularizer=regularization, use_bias=False)(img_input) x = BatchNormalization()(x) x = Activation('relu')(x) x = Conv2D(8, (3, 3), strides=(1, 1), kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = Activation('relu')(x) # module 1 residual = Conv2D(16, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) residual = BatchNormalization()(residual) x = SeparableConv2D(16, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = Activation('relu')(x) x = SeparableConv2D(16, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = MaxPooling2D((3, 3), strides=(2, 2), padding='same')(x) x = layers.add([x, residual]) # module 2 residual = Conv2D(32, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) residual = BatchNormalization()(residual) x = SeparableConv2D(32, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = Activation('relu')(x) x = SeparableConv2D(32, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = MaxPooling2D((3, 3), strides=(2, 2), padding='same')(x) x = layers.add([x, residual]) # module 3 residual = Conv2D(64, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) residual = BatchNormalization()(residual) x = SeparableConv2D(64, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = Activation('relu')(x) x = SeparableConv2D(64, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = MaxPooling2D((3, 3), strides=(2, 2), padding='same')(x) x = layers.add([x, residual]) # module 4 residual = Conv2D(128, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) residual = BatchNormalization()(residual) x = SeparableConv2D(128, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = Activation('relu')(x) x = SeparableConv2D(128, (3, 3), padding='same', kernel_regularizer=regularization, use_bias=False)(x) x = BatchNormalization()(x) x = MaxPooling2D((3, 3), strides=(2, 2), padding='same')(x) x = layers.add([x, residual]) x = Conv2D(num_classes, (3, 3), #kernel_regularizer=regularization, padding='same')(x) x = GlobalAveragePooling2D()(x) output = Activation('softmax',name='predictions')(x) model = Model(img_input, output) return model def big_XCEPTION(input_shape, num_classes): img_input = Input(input_shape) x = Conv2D(32, (3, 3), strides=(2, 2), use_bias=False)(img_input) x = BatchNormalization(name='block1_conv1_bn')(x) x = Activation('relu', name='block1_conv1_act')(x) x = Conv2D(64, (3, 3), use_bias=False)(x) x = BatchNormalization(name='block1_conv2_bn')(x) x = Activation('relu', name='block1_conv2_act')(x) residual = Conv2D(128, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) residual = BatchNormalization()(residual) x = SeparableConv2D(128, (3, 3), padding='same', use_bias=False)(x) x = BatchNormalization(name='block2_sepconv1_bn')(x) x = Activation('relu', name='block2_sepconv2_act')(x) x = SeparableConv2D(128, (3, 3), padding='same', use_bias=False)(x) x = BatchNormalization(name='block2_sepconv2_bn')(x) x = MaxPooling2D((3, 3), strides=(2, 2), padding='same')(x) x = layers.add([x, residual]) residual = Conv2D(256, (1, 1), strides=(2, 2), padding='same', use_bias=False)(x) residual = BatchNormalization()(residual) x = Activation('relu', name='block3_sepconv1_act')(x) x = SeparableConv2D(256, (3, 3), padding='same', use_bias=False)(x) x = BatchNormalization(name='block3_sepconv1_bn')(x) x = Activation('relu', name='block3_sepconv2_act')(x) x = SeparableConv2D(256, (3, 3), padding='same', use_bias=False)(x) x = BatchNormalization(name='block3_sepconv2_bn')(x) x = MaxPooling2D((3, 3), strides=(2, 2), padding='same')(x) x = layers.add([x, residual]) x = Conv2D(num_classes, (3, 3), #kernel_regularizer=regularization, padding='same')(x) x = GlobalAveragePooling2D()(x) output = Activation('softmax',name='predictions')(x) model = Model(img_input, output) return model if __name__ == "__main__": input_shape = (64, 64, 1) num_classes = 7 #model = tiny_XCEPTION(input_shape, num_classes) #model.summary() #model = mini_XCEPTION(input_shape, num_classes) #model.summary() #model = big_XCEPTION(input_shape, num_classes) #model.summary() model = simple_CNN((48, 48, 1), num_classes) model.summary()