import warnings
warnings.filterwarnings('ignore')
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import tensorflow as tf
tf.compat.v1.logging.set_verbosity(40)

from tensorflow.keras import utils, optimizers, losses, metrics
from tensorflow.keras.layers import Dense, Activation, Conv2D, MaxPooling2D, Flatten

batch_size = 128
nb_output = 10
epochs = 3

kernel_size = (5, 5)
pool_size = (2, 2)

(x_train, y_train), (x_test, y_test) = tf.keras.datasets.mnist.load_data()
x_train = x_train.reshape(-1, 28, 28, 1).astype('float32') / 255
x_test = x_test.reshape(-1, 28, 28, 1).astype('float32') / 255

y_train = utils.to_categorical(y_train, nb_output)
y_test = utils.to_categorical(y_test, nb_output)

# data pipeline
db_train = tf.data.Dataset.from_tensor_slices((x_train, y_train))
db_train = db_train.shuffle(60000).batch(batch_size)

db_test = tf.data.Dataset.from_tensor_slices((x_test, y_test))
db_test = db_test.shuffle(10000).batch(batch_size)

# lenet5 model
inputs = tf.keras.Input((28, 28, 1))

x = Conv2D(6, (5, 5), activation='relu')(inputs)
x = MaxPooling2D((2, 2))(x)
x = Conv2D(16, (3, 3), activation='relu')(x)
x = MaxPooling2D((2, 2))(x)
x = Flatten()(x)
x = Dense(120, activation='relu')(x)
x = Dense(84, activation='relu')(x)
outputs = Dense(nb_output, activation='softmax')(x)

model = tf.keras.Model(inputs, outputs)

model.summary()

model.compile(loss=losses.CategoricalCrossentropy(),
              optimizer=optimizers.Adam(0.001),
              metrics=['accuracy'])

model.fit(db_train)

score = model.evaluate(db_test)
print('loss:', score[0])
print('acc:', score[1])
