import os
import tensorflow as tf
import pandas as pd
import matplotlib.pyplot as plt
import time
import numpy as np
from tensorflow.keras.models import save_model

from nets.conv_net import ConvModel
from utils.data_generator import train_val_generator
from utils.image_plot import plot_images

# set gpu
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = "0"


def main():
    # import train set
    train_gen = train_val_generator(
        data_dir='./dataset/natural-scenes/seg_train',
        target_size=(160, 160),
        batch_size=64,
        class_mode='categorical',
        subset='training'
    )

    # import val set
    val_gen = train_val_generator(
        data_dir='./dataset/natural-scenes/seg_train',
        target_size=(160, 160),
        batch_size=64,
        class_mode='categorical',
        subset='validation'
    )

    # get class names
    class_names = list(train_gen.class_indices.keys())

    # show 15 pics in train and val set
    train_batch, train_label_batch = train_gen.next()
    plot_images(train_batch, train_label_batch, class_names)

    val_batch, val_label_batch = val_gen.next()
    plot_images(val_batch, val_label_batch, class_names)

    # new model
    model = ConvModel()

    """
    tf.keras.Sequential.compile

    parameter:
    - loss: loss function, "sparse_categorical_crossentropy" for classifier without one-hot encode
    - optimizer: "sgd"
    - metrics: "accuracy
    """

    # set loss func, optimizer, metrics
    model.compile(
        loss='categorical_crossentropy',
        optimizer=tf.keras.optimizers.SGD(
            learning_rate=0.01),
        metrics=['accuracy']
    )

    """
    tf.keras.Swquential.fit

    parameter:
    - x: training set, import data by ImageDataGenerator
    - steps_per_epoch: int, number of all pic set/batch size
    - epochs: int
    - validation_data: validation set, import data by ImageDataGenerator
    - validation_steps: int, batch_size
    - shuffle: default = True

    return:
    History.history: record loss value and metrics each iteration for both train and validation set 
    """

    # train model
    history = model.fit(x=train_gen, steps_per_epoch=119,
                        epochs=40, validation_data=val_gen,
                        validation_steps=30, shuffle=True)

    # show changement of history record
    pd.DataFrame(history.history).plot(figsize=(8, 5))
    plt.grid(True)
    plt.xlabel('epoch')
    plt.show()

    # Reset metrics before saving so that loaded model has same state,
    # since metric states are not preserved by Model.save_weights
    model.reset_metrics()

    """
    tf.keras.models.save_model

    parameter:
    - model:
    - filepath: 
    """

    # model save path
    model_name = "model-" + time.strftime('%Y-%m-%d-%H-%M-%S')
    model_path = os.path.join('models', model_name)
    if not os.path.exists(model_path):
        os.makedirs(model_path)

    model.save(model_path, save_format='tf')
    # save_model(model=model, filepath=model_path)


if __name__ == '__main__':
    main()
