import tensorflow as tf
import glob
import numpy as np
from tensorflow.keras import Input
from tensorflow.keras import Sequential
from tensorflow.keras.layers import Dense, Conv2D, BatchNormalization, MaxPool2D, GlobalMaxPool2D
import tensorflow.keras
from tensorflow.keras.applications.densenet import DenseNet121
from tensorflow.keras.applications.resnet import ResNet50


def normalize(image):
    mean = np.mean(image)
    var = np.mean(np.square(image - mean))

    image = (image - mean) / np.sqrt(var)

    return image


image_path = glob.glob('CharSample_Gen_Deal\\*\\*.jpg')
all_labels_name = [image_name.split("\\")[1] for image_name in image_path]
# label_names = np.unique(all_labels_name)
all_labels = [int(label) for label in all_labels_name]

np.random.seed(2021)
random_index = np.random.permutation(len(image_path))
imgs_path = np.array(image_path)[random_index]
all_labels = np.array(all_labels)[random_index]

i = int(len(imgs_path) * 0.8)
train_path = imgs_path[:i]
train_labels = all_labels[:i]

test_path = imgs_path[i:]
test_labels = all_labels[i:]

train_ds = tf.data.Dataset.from_tensor_slices((train_path, train_labels))
test_ds = tf.data.Dataset.from_tensor_slices((test_path, test_labels))


def load_img(path, label):
    image = tf.io.read_file(path)
    image = tf.image.decode_jpeg(image, channels = 1)
    image = tf.image.resize(image, (70, 70))
    image = tf.cast(image, tf.float32)
    tf.image.per_image_standardization(image)
    # label = tf.one_hot(label, depth = 200)
    return image, label


train_ds = train_ds.map(load_img)
test_ds = test_ds.map(load_img)

BATCH_SIZE = 64
train_ds = train_ds.repeat().shuffle(300).batch(BATCH_SIZE)
test_ds = test_ds.batch(BATCH_SIZE)

train_count = len(train_path)
test_count = len(test_path)

steps_per_epoch = train_count // BATCH_SIZE
validation_steps = test_count // BATCH_SIZE

model = Sequential()
# model.add(DenseNet121(include_top = False, weights = 'imagenet'))
model.add(Conv2D(64, (3, 3), input_shape = (70, 70, 1), activation = 'relu'))
model.add(BatchNormalization())
model.add(Conv2D(64, (3, 3), activation = 'relu'))
model.add(BatchNormalization())
model.add(MaxPool2D())
model.add(Conv2D(128, (3, 3), activation = 'relu'))
model.add(BatchNormalization())
model.add(Conv2D(128, (3, 3), activation = 'relu'))
model.add(BatchNormalization())
model.add(Conv2D(256, (3, 3), activation = 'relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), activation = 'relu'))
model.add(BatchNormalization())
model.add(Conv2D(512, (3, 3), activation = 'relu'))
model.add(BatchNormalization())
model.add(Conv2D(1024, (3, 3), activation = 'relu'))
model.add(GlobalMaxPool2D())
model.add(Dense(1000))
model.add(BatchNormalization())
model.add(Dense(200, activation = 'softmax'))

for layer in model.layers:
    layer.trainable = False
model.layers[-1].trainable = True

model.compile(optimizer = tf.keras.optimizers.Adam(0.0001),
              loss = tf.losses.SparseCategoricalCrossentropy(from_logits = True), metrics = ['acc'])
# history = model.fit(train_ds, epochs = 10, steps_per_epoch = steps_per_epoch, validation_data = test_ds,
#                     validation_steps = validation_steps)

history = model.fit(train_ds, epochs = 100, steps_per_epoch = steps_per_epoch, validation_data = test_ds,
                    validation_steps = validation_steps)
