# -*- coding:utf-8 -*- from tensorflow.keras.models import Model as KerasModel from tensorflow.keras.layers import Input, Dense, Flatten, Conv2D, MaxPooling2D, BatchNormalization, Dropout, Reshape, Concatenate, LeakyReLU from tensorflow.keras.optimizers import Adam IMGWIDTH = 256 class Classifier: def __init__(): self.model = 0 def predict(self, x): if x.size == 0: return [] return self.model.predict(x) def fit(self, x, y): return self.model.train_on_batch(x, y) def get_accuracy(self, x, y): return self.model.test_on_batch(x, y) def load(self, path): self.model.load_weights(path) class Meso1(Classifier): """ Feature extraction + Classification """ def __init__(self, learning_rate = 0.001, dl_rate = 1): self.model = self.init_model(dl_rate) optimizer = Adam(lr = learning_rate) self.model.compile(optimizer = optimizer, loss = 'mean_squared_error', metrics = ['accuracy']) def init_model(self, dl_rate): x = Input(shape = (IMGWIDTH, IMGWIDTH, 3)) x1 = Conv2D(16, (3, 3), dilation_rate = dl_rate, strides = 1, padding='same', activation = 'relu')(x) x1 = Conv2D(4, (1, 1), padding='same', activation = 'relu')(x1) x1 = BatchNormalization()(x1) x1 = MaxPooling2D(pool_size=(8, 8), padding='same')(x1) y = Flatten()(x1) y = Dropout(0.5)(y) y = Dense(1, activation = 'sigmoid')(y) return KerasModel(inputs = x, outputs = y) class Meso4(Classifier): def __init__(self, learning_rate = 0.001): self.model = self.init_model() optimizer = Adam(learning_rate = learning_rate) self.model.compile(optimizer = optimizer, loss = 'mean_squared_error', metrics = ['accuracy']) def init_model(self): x = Input(shape = (IMGWIDTH, IMGWIDTH, 3)) x1 = Conv2D(8, (3, 3), padding='same', activation = 'relu')(x) x1 = BatchNormalization()(x1) x1 = MaxPooling2D(pool_size=(2, 2), padding='same')(x1) x2 = Conv2D(8, (5, 5), padding='same', activation = 'relu')(x1) x2 = BatchNormalization()(x2) x2 = MaxPooling2D(pool_size=(2, 2), padding='same')(x2) x3 = Conv2D(16, (5, 5), padding='same', activation = 'relu')(x2) x3 = BatchNormalization()(x3) x3 = MaxPooling2D(pool_size=(2, 2), padding='same')(x3) x4 = Conv2D(16, (5, 5), padding='same', activation = 'relu')(x3) x4 = BatchNormalization()(x4) x4 = MaxPooling2D(pool_size=(4, 4), padding='same')(x4) y = Flatten()(x4) y = Dropout(0.5)(y) y = Dense(16)(y) y = LeakyReLU(negative_slope=0.1)(y) y = Dropout(0.5)(y) y = Dense(1, activation = 'sigmoid')(y) return KerasModel(inputs = x, outputs = y) class MesoInception4(Classifier): def __init__(self, learning_rate = 0.001): self.model = self.init_model() optimizer = Adam(learning_rate = learning_rate) self.model.compile(optimizer = optimizer, loss = 'mean_squared_error', metrics = ['accuracy']) def InceptionLayer(self, a, b, c, d): def func(x): x1 = Conv2D(a, (1, 1), padding='same', activation='relu')(x) x2 = Conv2D(b, (1, 1), padding='same', activation='relu')(x) x2 = Conv2D(b, (3, 3), padding='same', activation='relu')(x2) x3 = Conv2D(c, (1, 1), padding='same', activation='relu')(x) x3 = Conv2D(c, (3, 3), dilation_rate = 2, strides = 1, padding='same', activation='relu')(x3) x4 = Conv2D(d, (1, 1), padding='same', activation='relu')(x) x4 = Conv2D(d, (3, 3), dilation_rate = 3, strides = 1, padding='same', activation='relu')(x4) y = Concatenate(axis = -1)([x1, x2, x3, x4]) return y return func def init_model(self): x = Input(shape = (IMGWIDTH, IMGWIDTH, 3)) x1 = self.InceptionLayer(1, 4, 4, 2)(x) x1 = BatchNormalization()(x1) x1 = MaxPooling2D(pool_size=(2, 2), padding='same')(x1) x2 = self.InceptionLayer(2, 4, 4, 2)(x1) x2 = BatchNormalization()(x2) x2 = MaxPooling2D(pool_size=(2, 2), padding='same')(x2) x3 = Conv2D(16, (5, 5), padding='same', activation = 'relu')(x2) x3 = BatchNormalization()(x3) x3 = MaxPooling2D(pool_size=(2, 2), padding='same')(x3) x4 = Conv2D(16, (5, 5), padding='same', activation = 'relu')(x3) x4 = BatchNormalization()(x4) x4 = MaxPooling2D(pool_size=(4, 4), padding='same')(x4) y = Flatten()(x4) y = Dropout(0.5)(y) y = Dense(16)(y) y = LeakyReLU(negative_slope=0.1)(y) y = Dropout(0.5)(y) y = Dense(1, activation = 'sigmoid')(y) return KerasModel(inputs = x, outputs = y)