|
|
|
import tensorflow as tf |
|
import scipy.io as sio |
|
import numpy as np |
|
from sklearn.model_selection import train_test_split |
|
from tensorflow_addons.optimizers import AdamW |
|
|
|
def residual_block(x, filters, dilation_rate,kernel_size=3): |
|
|
|
shortcut = x |
|
|
|
|
|
x = tf.keras.layers.BatchNormalization()(x) |
|
x = tf.keras.layers.ReLU()(x) |
|
x = tf.keras.layers.SeparableConv2D(filters, kernel_size, depth_multiplier=2,dilation_rate=dilation_rate, padding='same')(x) |
|
x = tf.keras.layers.Conv2D(filters, 1, padding='same')(x) |
|
x = tf.keras.layers.BatchNormalization()(x) |
|
|
|
|
|
x = tf.keras.layers.ReLU()(x) |
|
x = tf.keras.layers.SeparableConv2D(filters, kernel_size,depth_multiplier=2, dilation_rate=dilation_rate, padding='same')(x) |
|
x = tf.keras.layers.Conv2D(filters, 1, padding='same')(x) |
|
|
|
|
|
if shortcut.shape[-1] != filters: |
|
shortcut = tf.keras.layers.Conv2D(filters, 1, padding='same')(shortcut) |
|
|
|
|
|
x = tf.keras.layers.Add()([x, shortcut]) |
|
x = tf.keras.layers.ReLU()(x) |
|
|
|
return x |
|
|
|
def build_resnet(input_shape): |
|
inputs = tf.keras.layers.Input(shape=input_shape) |
|
|
|
|
|
x = tf.keras.layers.Conv2D(64,3, strides=1, padding='same')(inputs) |
|
x = tf.keras.layers.BatchNormalization()(x) |
|
x = tf.keras.layers.ReLU()(x) |
|
|
|
|
|
x = residual_block(x, filters=128, dilation_rate=(2,3)) |
|
x = residual_block(x, filters=448, dilation_rate=(3,6)) |
|
x = residual_block(x, filters=128, dilation_rate=(2,3)) |
|
|
|
outputs = tf.keras.layers.Conv2D(Morder, 3, strides=1, padding='same')(x) |
|
outputs = tf.keras.activations.sigmoid(outputs) |
|
|
|
model = tf.keras.Model(inputs, outputs) |
|
return model |
|
|
|
def load_data(m): |
|
data_inputs = [] |
|
data_labels = [] |
|
for n in range(1,m+1): |
|
input_data = sio.loadmat(f"SNR{n}_input.mat")["input_save"] |
|
label_data = sio.loadmat(f"SNR{n}_label.mat")["label_save"] |
|
input_data = np.transpose(input_data, (3,0,1,2)) |
|
label_data = np.transpose(label_data, (3,0,1,2)) |
|
data_inputs.append(input_data) |
|
data_labels.append(label_data) |
|
data_inputs = np.concatenate(data_inputs) |
|
data_labels = np.concatenate(data_labels) |
|
return data_inputs, data_labels |
|
|
|
start = time.time() |
|
input_shape = (312, 14, 6) |
|
Morder = 4 |
|
SNR_number = 10 |
|
|
|
|
|
resnet_model = build_resnet(input_shape) |
|
|
|
resnet_model.summary() |
|
|
|
|
|
|
|
|
|
optimizer = AdamW(learning_rate=0.01, weight_decay=1e-4) |
|
|
|
log_dir = "./log" |
|
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir =log_dir) |
|
|
|
resnet_model.compile(optimizer=optimizer, loss='binary_crossentropy',callbacks=[tensorboard_callback]) |
|
|
|
X_data,y_data = load_data(SNR_number) |
|
print(X_data.shape,y_data.shape) |
|
X_train, X_val, y_train, y_val = train_test_split(X_data, y_data, test_size=0.3, random_state=42) |
|
|
|
resnet_model.fit(X_train, y_train, epochs=10, batch_size=20, validation_data=(X_val, y_val)) |
|
|
|
resnet_model.save("deeprx.h5") |
|
endt = time.time() |
|
print(endt-start) |