import numpy as np
import tensorflow as tf
import tensorflow.keras as keras
from tensorflow.keras import losses, optimizers, metrics, layers, activations, utils

N_CLS = 10
BATCH_SIZE = 128
N_EPOCH = 2

(x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data()
print(x_train.shape, y_train.shape)  # 60000x28x28
print(x_test.shape, y_test.shape)

x_train = np.expand_dims(x_train, 3)
x_test = np.expand_dims(x_test, 3)
x_train = x_train.astype(np.float32) / 255.
x_test = x_test.astype(np.float32) / 255.
y_train = utils.to_categorical(y_train, N_CLS)
y_test = utils.to_categorical(y_test, N_CLS)
print(x_train.shape, y_train.shape)  # 60000x28x28x1
print(x_test.shape, y_test.shape)

model = keras.Sequential([
    layers.Conv2D(6, (5, 5)),  # 24x24x6
    layers.BatchNormalization(),
    layers.ReLU(),
    layers.MaxPool2D(strides=(2, 2), padding='same'),  # 12x12x6
    layers.Conv2D(16, (3, 3)),  # 10x10x16
    layers.BatchNormalization(),
    layers.ReLU(),
    layers.MaxPool2D(strides=(2, 2), padding='same'),  # 5x5x16
    layers.Flatten(),
    layers.Dense(120, activation=activations.relu),
    layers.Dense(84, activation=activations.relu),
    layers.Dense(N_CLS, activation=activations.softmax),
])

model.compile(
    loss=losses.categorical_crossentropy,
    optimizer=optimizers.Adam(),
    metrics=metrics.categorical_accuracy
)

model.fit(x_train, y_train,
          batch_size=BATCH_SIZE, epochs=N_EPOCH)

r = model.evaluate(x_test, y_test,
                   batch_size=BATCH_SIZE)
print(r)
