import numpy as np
import tensorflow as tf
import tensorflow.keras as keras
from tensorflow.keras import losses, optimizers, metrics, layers, activations, utils
import os
import sys
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt

N_CLS = 3
N_EPOCH = 100

np.random.seed(777)
tf.random.set_seed(777)

dir = '../../../../../large_data/DL1/_many_files/data3'
files = os.listdir(dir)
x = []
y = []
for f in files:
    if f == '.nomedia':
        continue
    path = os.path.join(dir, f)
    img = plt.imread(path)
    x.append(img)
    label = int(f[:1])
    y.append(label)
x = np.uint8(x)
y = np.uint8(y)

x_train, x_test, y_train, y_test = train_test_split(x, y, train_size=0.7, random_state=777)
print(x_train.shape, y_train.shape)  # 60000x28x28
print(x_test.shape, y_test.shape)

x_train = x_train.astype(np.float32) / 255.
x_test = x_test.astype(np.float32) / 255.
y_train = utils.to_categorical(y_train, N_CLS)
y_test = utils.to_categorical(y_test, N_CLS)
print(x_train.shape, y_train.shape)  # 60000x28x28x1
print(x_test.shape, y_test.shape)

model = keras.Sequential([
    layers.Conv2D(6, (5, 5)),  # 24x24x6
    layers.BatchNormalization(),
    layers.ReLU(),
    layers.MaxPool2D(strides=(2, 2), padding='same'),  # 12x12x6
    layers.Conv2D(16, (3, 3)),  # 10x10x16
    layers.BatchNormalization(),
    layers.ReLU(),
    layers.MaxPool2D(strides=(2, 2), padding='same'),  # 5x5x16
    layers.Flatten(),
    layers.Dense(120, activation=activations.relu),
    layers.Dense(84, activation=activations.relu),
    layers.Dense(N_CLS, activation=activations.softmax),
])

model.compile(
    loss=losses.categorical_crossentropy,
    optimizer=optimizers.Adam(),
    metrics=metrics.categorical_accuracy
)

BATCH_SIZE = len(x_train)
model.fit(x_train, y_train,
          batch_size=BATCH_SIZE, epochs=N_EPOCH)

BATCH_SIZE = len(x_test)
r = model.evaluate(x_test, y_test,
                   batch_size=BATCH_SIZE)
print(r)
