import tensorflow as tf
from tensorflow.keras import layers, models
from matplotlib import pyplot as plt
import pandas as pd
from utils.evalute_model_plot_metric import plot_metric
from utils.create_log import get_log


def load_image(file_path, size=(32, 32)):
    """
    load 图片
    :param file_path:
    :param size:
    :return:
    """
    label = tf.constant(1, tf.int8) if tf.strings.regex_full_match(file_path, '.*automobile.*') \
        else tf.constant(0, tf.int8)
    img = tf.io.read_file(file_path)
    img = tf.image.decode_jpeg(img)  # 图片格式
    img = tf.image.resize(img, size) / 255.0
    return img, label


class CifarModel(object):
    def __init__(self):
        self.df_train, self.df_test = self.load_data()
        # 创建日志
        log_path = '../data/logs/cifar_autograph'
        self.logdir = get_log(log_path=log_path)

    @staticmethod
    def load_data(batch_size=100):
        """
        数据加载
        :return:
        """
        train_file = '../data/cifar2/train/*/*.jpg'
        test_file = '../data/cifar2/test/*/*.jpg'

        df_train = tf.data.Dataset.list_files(train_file).\
            map(load_image, num_parallel_calls=tf.data.experimental.AUTOTUNE).\
            shuffle(buffer_size=1000).batch(batch_size=batch_size).\
            prefetch(tf.data.experimental.AUTOTUNE)

        df_test = tf.data.Dataset.list_files(test_file).\
            map(load_image, num_parallel_calls=tf.data.experimental.AUTOTUNE).batch(batch_size=batch_size).\
            prefetch(tf.data.experimental.AUTOTUNE)

        return df_train, df_test

    def display(self):
        """
        展示
        :return:
        """
        plt.figure(figsize=(8, 8))
        for i, (img, label) in enumerate(self.df_train.unbatch().take(9)):
            ax = plt.subplot(3, 3, i + 1)
            ax.imshow(img.numpy())
            ax.set_title('label = {}'.format(label))
            ax.set_xticks([])
            ax.set_yticks([])
        plt.show()

    @staticmethod
    def tf_model(input_shape):
        """
        构建模型
        :return:
        """
        # 清空会话
        tf.keras.backend.clear_session()

        # 函数式API构建模型
        inputs = layers.Input(shape=input_shape)
        x = layers.Conv2D(32, kernel_size=(3, 3))(inputs)
        x = layers.MaxPool2D()(x)
        x = layers.Conv2D(64, kernel_size=(5, 5))(x)
        x = layers.MaxPool2D()(x)
        x = layers.Dropout(rate=0.1)(x)
        x = layers.Flatten()(x)
        x = layers.Dense(32, activation='relu')(x)
        outputs = layers.Dense(1, activation='sigmoid')(x)

        return models.Model(inputs=inputs, outputs=outputs)

    def train_model(self, model, learning_rate=0.001):
        """
        模型训练:
        :param model:
        :param learning_rate:
        :return:
        """
        tensorbord_callback = tf.keras.callbacks.TensorBoard(self.logdir, histogram_freq=1)
        model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=learning_rate),
                      loss=tf.keras.losses.binary_crossentropy,
                      metrics=['accuracy'])
        # 交叉检验
        return model.fit(self.df_train, epochs=10, validation_data=self.df_test,
                         callbacks=[tensorbord_callback], workers=4)

    def evalute_model(self, model, history):
        """
        模型评估
        :param model:
        :param history:
        :return:
        """
        # 在tensorboard中查看模型: shell or cmd: tensorboard --logdir=self.LOG_PATH --host=127.0.0.1
        df_history = pd.DataFrame(history.history)
        df_history.index = range(1, len(df_history) + 1)
        df_history.index.name = 'epoch'
        print(df_history)

        plot_metric(history=history, metric='loss')
        plot_metric(history=history, metric='accuracy')

        val_loss, val_accuracy = model.evaluate(self.df_test, workers=4)
        print(val_loss, val_accuracy)

    def main(self):
        # 构建模型
        model = self.tf_model(input_shape=(32, 32, 3))
        # 训练模型
        history = self.train_model(model=model)
        # 评估模型
        self.evalute_model(model=model, history=history)

        # predict
        # pre = model.predict(self.df_test)
        # batch predict
        for x, y in self.df_test.take(1):
            print(model.predict_on_batch(x[: 20]))

        # model save and load
        # model_path = '../data/models/tf_model_cifar2'
        # model.save(model_path, save_format='tf')
        # model_load = tf.keras.models.load_model(model_path)


def run():
    cifar_model = CifarModel()
    cifar_model.main()


if __name__ == '__main__':
    run()
