import time
import tensorflow as tf
from matplotlib import pyplot as plt
from tensorflow.keras.preprocessing import image_dataset_from_directory


PATH = '../data/flower_photos/'

BATCH_SIZE = 32
IMG_SIZE = (32, 32)

flower_dataset = image_dataset_from_directory(PATH,
                                              shuffle=True,
                                              batch_size=BATCH_SIZE,
                                              image_size=IMG_SIZE)


val_batches = tf.data.experimental.cardinality(flower_dataset)
temp_dataset = flower_dataset.take(val_batches // 5)
training_dataset = flower_dataset.skip(val_batches // 5)

test_batches = tf.data.experimental.cardinality(temp_dataset)
test_dataset = temp_dataset.take(test_batches // 2)
validation_dataset = temp_dataset.skip(test_batches // 2)


def model():
    model = tf.keras.Sequential()
    model.add(tf.keras.layers.Conv2D(32, 3, input_shape=(32, 32, 3), padding='same', activation='relu',))
    model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=1))
    model.add(tf.keras.layers.Dropout(rate=0.1))
    model.add(tf.keras.layers.Conv2D(32, 3, padding='same', activation='relu',))
    model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=1))
    model.add(tf.keras.layers.Dropout(rate=0.2))
    model.add(tf.keras.layers.Conv2D(32, 3, padding='same', activation='relu',))
    model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2), strides=1))
    model.add(tf.keras.layers.Dropout(rate=0.3))
    model.add(tf.keras.layers.Flatten())
    model.add(tf.keras.layers.Dense(128, activation='relu', kernel_initializer='glorot_uniform'))
    model.add(tf.keras.layers.Dropout(rate=0.1))
    model.add(tf.keras.layers.Dense(128, activation='relu', kernel_initializer='glorot_uniform'))
    model.add(tf.keras.layers.Dropout(rate=0.1))
    model.add(tf.keras.layers.Dense(32, activation='relu', kernel_initializer='glorot_uniform'))
    model.add(tf.keras.layers.Dense(5, activation='softmax', kernel_initializer='glorot_uniform'))
    model.compile(optimizer='rmsprop', loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), metrics=(['accuracy']))
    return model


model = model()
print(model.summary())
start_time = time.time()
history = model.fit(training_dataset, epochs=20, validation_data=training_dataset)

acc = history.history['accuracy']
val_acc = history.history['val_accuracy']

loss = history.history['loss']
val_loss = history.history['val_loss']

plt.figure(figsize=(8, 8))
plt.subplot(2, 1, 1)
plt.plot(acc, label='Training Accuracy')
plt.plot(val_acc, label='Validation Accuracy')
plt.legend(loc='lower right')
plt.ylabel('Accuracy')
plt.ylim([min(plt.ylim()), 1])
plt.title('Training and Validation Accuracy')

plt.subplot(2, 1, 2)
plt.plot(loss, label='Training Loss')
plt.plot(val_loss, label='Validation Loss')
plt.legend(loc='upper right')
plt.ylabel('Cross Entropy')
plt.ylim([0, 1.0])
plt.title('Training and Validation Loss')
plt.xlabel('epoch')
plt.show()

over_time = time.time()
spend = start_time - over_time
print("训练时间花费了：", spend)
model.save('../data/eight_week_model.h5')
score = model.evaluate(test_dataset)
print("The evaluate is :{0:.3f}".format(score[1]))
