# %%
import tensorflow as tf
from tensorflow import keras
import numpy as np
from matplotlib import pyplot as plt
physical_devices = tf.config.list_physical_devices('GPU')
tf.config.experimental.set_memory_growth(physical_devices[0], True)
# %%
(train_x, train_y), (test_x, test_y) = keras.datasets.cifar10.load_data()


# %%
def make_blk(in_x,
             filters,
             kernel_size,
             activation='relu',
             scale=None,
             use_elu=False):
  in_x = keras.layers.SeparableConv2D(filters, kernel_size)(in_x)
  in_x = keras.layers.BatchNormalization()(in_x)
  if activation == 'relu':
    in_x = keras.layers.ReLU()(in_x)
  elif activation == 'elu':
    in_x = keras.layers.ELU()(in_x)
  elif activation == 'leakyrelu':
    in_x = keras.layers.LeakyReLU()(in_x)

  if scale == 'down':
    in_x = keras.layers.MaxPool2D()(in_x)
  elif scale == 'up':
    in_x = keras.layers.UpSampling2D()(in_x)
  return in_x


# %%
m_x = keras.layers.Input((32, 32, 3))
m_y = make_blk(m_x, 50, 7, scale='down',activation='elu')
m_y = make_blk(m_y, 100, 5, scale='down',activation='elu')
m_y = make_blk(m_y, 200, 3, scale='down',activation='elu')
m_y = keras.layers.Flatten()(m_y)
m_y = keras.layers.Dropout(0.1)(m_y)
m_y = keras.layers.Dense(200)(m_y)
m_y = keras.layers.ELU()(m_y)
m_y = keras.layers.Dense(10, activation='softmax')(m_y)

model = keras.models.Model(inputs=[m_x], outputs=[m_y])
model.compile('rmsprop',loss=keras.losses.sparse_categorical_crossentropy,metrics=['accuracy'])
model.summary()

# %%
model.fit(train_x,train_y,epochs=10,validation_data=(test_x,test_y))
