|
import keras |
|
from keras.layers import Dense, BatchNormalization |
|
from keras import regularizers |
|
from keras.optimizers import Adam |
|
from keras.callbacks import ModelCheckpoint, EarlyStopping |
|
|
|
import pandas as pd |
|
import numpy as np |
|
|
|
|
|
activation = 'relu' |
|
final_activation = 'sigmoid' |
|
loss = 'binary_crossentropy' |
|
batchsize = 200 |
|
epochs = 100 |
|
lr = 0.00005 |
|
|
|
|
|
model = keras.Sequential() |
|
model.add( |
|
Dense(units=300, input_dim=x_train.shape[1], activation=activation, kernel_regularizer=regularizers.L1(0.001))) |
|
model.add(BatchNormalization()) |
|
model.add(Dense(units=102, activation=activation, kernel_regularizer=regularizers.L1(0.001))) |
|
model.add(BatchNormalization()) |
|
model.add(Dense(units=12, activation=activation, kernel_regularizer=regularizers.L1(0.001))) |
|
model.add(BatchNormalization()) |
|
model.add(Dense(units=6, activation=activation, kernel_regularizer=regularizers.L1(0.001))) |
|
model.add(BatchNormalization()) |
|
model.add(Dense(units=1, activation=final_activation)) |
|
|
|
model.compile(optimizer=Adam(learning_rate=lr), |
|
loss=loss, |
|
metrics=['accuracy', 'AUC']) |
|
model.summary() |
|
|
|
|
|
|
|
saveModel = ModelCheckpoint('best_model.hdf5', |
|
save_best_only=True, |
|
monitor='val_loss', |
|
mode='min') |
|
|
|
|
|
|
|
|
|
history = model.fit( |
|
x_train, |
|
y_train, |
|
batch_size=batchsize, |
|
callbacks=[EarlyStopping(verbose=True, patience=10, monitor='val_loss'), saveModel], |
|
epochs=epochs, |
|
validation_data=( |
|
x_val, |
|
y_val)) |