import pathlib

import tensorflow as tf
import numpy as np
from tensorflow import keras


def standard(x, y):
    return x / 255, y


if __name__ == '__main__':
    data_dir = "../datasets/text_images/test"

    data_dir = pathlib.Path(data_dir)
    batch_size = 64
    img_height = 60
    img_width = 150
    test = tf.keras.utils.image_dataset_from_directory(
        data_dir,
        validation_split=0,
        seed=1,
        image_size=(img_height, img_width),
        batch_size=batch_size)

    print(test.class_names)
    test = test.map(standard)
    # 构建模型
    inputs = keras.Input(shape=(60, 150, 3))
    resNet = keras.applications.resnet_rs.ResNetRS50(include_top=False)
    resNet.trainable = True
    x = resNet(inputs)
    x = keras.layers.GlobalAveragePooling2D()(x)
    x = keras.layers.Dense(units=256, activation=keras.activations.relu, use_bias=True)(x)
    x = keras.layers.Dense(units=256, activation=keras.activations.relu, use_bias=True)(x)
    outputs = keras.layers.Dense(units=50, activation=keras.activations.softmax, use_bias=True)(x)

    model = keras.Model(inputs, outputs)
    model.compile(optimizer=tf.keras.optimizers.Adagrad(learning_rate=0.02),
                  loss=keras.losses.sparse_categorical_crossentropy,
                  metrics=["accuracy"])
    model.load_weights("./checkpoints/checkpoint3/")
    for i, z in test:
        print(model.evaluate(i, z))

    # print("-----------------------------------------")
    # # model.fit(test, epochs=10)
    # model.load_weights("./checkpoints/checkpoint1/")
    # for i, z in test:
    #     print(model.evaluate(i, z))
