File size: 5,029 Bytes
2fbad05 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 |
# -*- coding:utf-8 -*-
from tensorflow.keras.models import Model as KerasModel
from tensorflow.keras.layers import Input, Dense, Flatten, Conv2D, MaxPooling2D, BatchNormalization, Dropout, Reshape, Concatenate, LeakyReLU
from tensorflow.keras.optimizers import Adam
IMGWIDTH = 256
class Classifier:
def __init__():
self.model = 0
def predict(self, x):
if x.size == 0:
return []
return self.model.predict(x)
def fit(self, x, y):
return self.model.train_on_batch(x, y)
def get_accuracy(self, x, y):
return self.model.test_on_batch(x, y)
def load(self, path):
self.model.load_weights(path)
class Meso1(Classifier):
"""
Feature extraction + Classification
"""
def __init__(self, learning_rate = 0.001, dl_rate = 1):
self.model = self.init_model(dl_rate)
optimizer = Adam(lr = learning_rate)
self.model.compile(optimizer = optimizer, loss = 'mean_squared_error', metrics = ['accuracy'])
def init_model(self, dl_rate):
x = Input(shape = (IMGWIDTH, IMGWIDTH, 3))
x1 = Conv2D(16, (3, 3), dilation_rate = dl_rate, strides = 1, padding='same', activation = 'relu')(x)
x1 = Conv2D(4, (1, 1), padding='same', activation = 'relu')(x1)
x1 = BatchNormalization()(x1)
x1 = MaxPooling2D(pool_size=(8, 8), padding='same')(x1)
y = Flatten()(x1)
y = Dropout(0.5)(y)
y = Dense(1, activation = 'sigmoid')(y)
return KerasModel(inputs = x, outputs = y)
class Meso4(Classifier):
def __init__(self, learning_rate = 0.001):
self.model = self.init_model()
optimizer = Adam(learning_rate = learning_rate)
self.model.compile(optimizer = optimizer, loss = 'mean_squared_error', metrics = ['accuracy'])
def init_model(self):
x = Input(shape = (IMGWIDTH, IMGWIDTH, 3))
x1 = Conv2D(8, (3, 3), padding='same', activation = 'relu')(x)
x1 = BatchNormalization()(x1)
x1 = MaxPooling2D(pool_size=(2, 2), padding='same')(x1)
x2 = Conv2D(8, (5, 5), padding='same', activation = 'relu')(x1)
x2 = BatchNormalization()(x2)
x2 = MaxPooling2D(pool_size=(2, 2), padding='same')(x2)
x3 = Conv2D(16, (5, 5), padding='same', activation = 'relu')(x2)
x3 = BatchNormalization()(x3)
x3 = MaxPooling2D(pool_size=(2, 2), padding='same')(x3)
x4 = Conv2D(16, (5, 5), padding='same', activation = 'relu')(x3)
x4 = BatchNormalization()(x4)
x4 = MaxPooling2D(pool_size=(4, 4), padding='same')(x4)
y = Flatten()(x4)
y = Dropout(0.5)(y)
y = Dense(16)(y)
y = LeakyReLU(negative_slope=0.1)(y)
y = Dropout(0.5)(y)
y = Dense(1, activation = 'sigmoid')(y)
return KerasModel(inputs = x, outputs = y)
class MesoInception4(Classifier):
def __init__(self, learning_rate = 0.001):
self.model = self.init_model()
optimizer = Adam(learning_rate = learning_rate)
self.model.compile(optimizer = optimizer, loss = 'mean_squared_error', metrics = ['accuracy'])
def InceptionLayer(self, a, b, c, d):
def func(x):
x1 = Conv2D(a, (1, 1), padding='same', activation='relu')(x)
x2 = Conv2D(b, (1, 1), padding='same', activation='relu')(x)
x2 = Conv2D(b, (3, 3), padding='same', activation='relu')(x2)
x3 = Conv2D(c, (1, 1), padding='same', activation='relu')(x)
x3 = Conv2D(c, (3, 3), dilation_rate = 2, strides = 1, padding='same', activation='relu')(x3)
x4 = Conv2D(d, (1, 1), padding='same', activation='relu')(x)
x4 = Conv2D(d, (3, 3), dilation_rate = 3, strides = 1, padding='same', activation='relu')(x4)
y = Concatenate(axis = -1)([x1, x2, x3, x4])
return y
return func
def init_model(self):
x = Input(shape = (IMGWIDTH, IMGWIDTH, 3))
x1 = self.InceptionLayer(1, 4, 4, 2)(x)
x1 = BatchNormalization()(x1)
x1 = MaxPooling2D(pool_size=(2, 2), padding='same')(x1)
x2 = self.InceptionLayer(2, 4, 4, 2)(x1)
x2 = BatchNormalization()(x2)
x2 = MaxPooling2D(pool_size=(2, 2), padding='same')(x2)
x3 = Conv2D(16, (5, 5), padding='same', activation = 'relu')(x2)
x3 = BatchNormalization()(x3)
x3 = MaxPooling2D(pool_size=(2, 2), padding='same')(x3)
x4 = Conv2D(16, (5, 5), padding='same', activation = 'relu')(x3)
x4 = BatchNormalization()(x4)
x4 = MaxPooling2D(pool_size=(4, 4), padding='same')(x4)
y = Flatten()(x4)
y = Dropout(0.5)(y)
y = Dense(16)(y)
y = LeakyReLU(negative_slope=0.1)(y)
y = Dropout(0.5)(y)
y = Dense(1, activation = 'sigmoid')(y)
return KerasModel(inputs = x, outputs = y) |