File size: 1,524 Bytes
c4bd4b0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import tensorflow as tf
policy = tf.keras.mixed_precision.Policy("mixed_float16")
tf.keras.mixed_precision.set_global_policy(policy) 
from tensorflow import keras
from tensorflow.keras import layers
from keras_cv_attention_models import efficientnet

input_shape = (image_frames, None, None, 3)
image_frames = 60
image_size = 384

backbone_path = 'efficientnetv2-s-21k-ft1k.h5'
backbone = efficientnet.EfficientNetV2S(pretrained=backbone_path,dropout=1e-6, num_classes=0, include_preprocessing = True)
backbone.summary()
backbone.trainable = False

inputs = keras.Input(shape=input_shape)
backbone_inputs = keras.Input(shape=(None, None, 3))
y = backbone(backbone_inputs)
y = layers.Flatten()(y)
y = layers.Dense(32, activation="relu")(y)
y = layers.Dropout(0.1)(y)
x = layers.TimeDistributed(keras.Model(backbone_inputs, y))(inputs)
x = layers.Dropout(0.1)(x)
x = layers.LSTM(128, return_sequences=True)(x)
x = layers.Dropout(0.1)(x)
x = layers.LSTM(128, return_sequences=False)(x)
x = layers.Dropout(0.1)(x)
x = layers.Dense(128, activation="relu")(x)
x = layers.Dropout(0.1)(x)
x = layers.Dense(64, activation="relu")(x)
x = layers.Dropout(0.1)(x)
x = layers.Dense(48, activation="relu")(x)
x = layers.Dropout(0.1)(x)
x = layers.Dense(32, activation="relu")(x)
x = layers.Dropout(0.1)(x)
outputs = layers.Dense(9, activation="relu")(x)
model = keras.Model(inputs, outputs)
model.compile(
    optimizer=keras.optimizers.Adam(1e-3),
    loss="mean_squared_error",
    metrics=["mean_squared_error", "mean_absolute_error"]
)