import os
import matplotlib.pyplot as plt

import numpy
import tensorflow as tf
import pathlib
from tensorflow import keras
import numpy as np
import yaml

plt.rcParams['font.sans-serif'] = ['SimHei']  # 用来正常显示中文标签
plt.rcParams['axes.unicode_minus'] = False  # 用来正常显示负号 #有中文出现的情况，需要u'内容'

labels = None
with open("text_recognition.yaml", mode="r", encoding="utf8") as f:
    labels = yaml.load(f, yaml.Loader)["labels"]
print(labels)
labels_k = list(labels.keys())
labels_v = list(labels.values())


def standard(x, y):
    return x / 255, y


# @tf.function
def process_path(file_path):
    # y = np.zeros(shape=(50))
    label = tf.strings.split(file_path, os.sep)[-2]
    # y[labels_k[labels_v.index(label.numpy())]] = 1
    img = tf.io.decode_png(tf.io.read_file(file_path)) / 255
    return img, label


if __name__ == '__main__':

    # 加载数据
    data_dir = "../datasets/text_images/train"

    data_dir = pathlib.Path(data_dir)
    batch_size = 128
    img_height = 60
    img_width = 150
    train_ds = tf.keras.utils.image_dataset_from_directory(
        data_dir,
        validation_split=0.2,
        subset="training",
        seed=123,
        image_size=(img_height, img_width),
        batch_size=batch_size)
    val_ds = tf.keras.utils.image_dataset_from_directory(
        data_dir,
        validation_split=0.2,
        subset="validation",
        seed=123,
        image_size=(img_height, img_width),
        batch_size=batch_size)
    # print(train_ds.class_names)
    # print(val_ds.class_names)

    # 保存标签顺序对应列表（他会自己把标签转换成数字，classnames就是对应顺序）
    with open("class_names.txt", mode="w", encoding="utf-8") as f:
        for i in range(len(train_ds.class_names)):
            if i == 0:
                f.write(train_ds.class_names[i])
            else:
                f.write("\n")
                f.write(train_ds.class_names[i])

    AUTOTUNE = tf.data.AUTOTUNE

    train_ds = train_ds.cache().prefetch(buffer_size=AUTOTUNE)
    val_ds = val_ds.cache().prefetch(buffer_size=AUTOTUNE)
    # 标准化

    train_ds = train_ds.map(standard)
    val_ds = val_ds.map(standard)

    # 构建模型
    inputs = keras.Input(shape=(60, 150, 3))
    resNet = keras.applications.resnet_rs.ResNetRS50(include_top=False)
    resNet.trainable = True
    x = resNet(inputs)
    x = keras.layers.GlobalAveragePooling2D()(x)
    x = keras.layers.Dense(units=256, activation=keras.activations.relu, use_bias=True)(x)
    x = keras.layers.Dense(units=256, activation=keras.activations.relu, use_bias=True)(x)
    outputs = keras.layers.Dense(units=50, activation=keras.activations.softmax, use_bias=True)(x)

    model = keras.Model(inputs, outputs)
    model.compile(optimizer=tf.keras.optimizers.Adagrad(learning_rate=0.02),
                  loss=keras.losses.sparse_categorical_crossentropy,
                  metrics=["accuracy"])

    model.load_weights("./checkpoints/checkpoint2/")

    # model.fit(
    #     train_ds,
    #     validation_data=val_ds,
    #     epochs=5
    # )
    # 保存训练权重
    # model.save_weights("./checkpoints/checkpoint3/")

    # model.evaluate()

    print("aaa")

