import numpy as np

from sklearn.metrics import classification_report
from sklearn.preprocessing import LabelBinarizer
from tensorflow.keras.optimizers import SGD
from tensorflow.keras.datasets import cifar10
from multiprocessing import Process, Queue
from tensorflow.keras.callbacks import ModelCheckpoint

# from pic_it import pic_it
from mini_vggnet import Mini_Vggnet
from train_pac.callback.train_monitor import Train_Monitor, ani_loss_show


def learn_rate_decay(epoch: int) -> float:
    init_alpha = 0.01
    factor = 0.5
    drop_every = 5
    alpha = init_alpha * (factor ** np.floor((1 + epoch) / drop_every))
    return float(alpha)


def main(que: Queue = None):
    # 加载Cifar10数据集
    print(f"[info]:开始加载cifar10数据集...")
    (train_x, train_y), (test_x, test_y) = cifar10.load_data()

    # 矩阵归一化
    train_x = train_x.astype("float") / 255.0
    test_x = test_x.astype("float") / 255.0

    # 标签编码
    lb = LabelBinarizer()
    train_y = lb.fit_transform(train_y)
    test_y = lb.transform(test_y)

    classes = ["airplane", "automobile", "bird", "cat", "deer",
               "dog", "frog", "horse", "ship", "truck"]

    # 模型编译
    print("[info]:开始编译模型...")
    f_name = 'weights.best.hdf5'
    check_point = ModelCheckpoint(f_name, monitor="val_loss", mode="min", save_best_only=True, verbose=1)
    callback = [Train_Monitor(que=que, e_num=40)]  # check_point LearningRateScheduler(learn_rate_decay)
    opt = SGD(learning_rate=0.01, momentum=0.9, nesterov=True)  # decay=0.01 / 40,
    model = Mini_Vggnet.build(width=32, height=32, depth=3, classes=10)
    model.compile(loss="categorical_crossentropy", optimizer=opt,
                  metrics=["acc"])

    # 训练模型
    print("[info]:开始训练模型...")
    record = model.fit(train_x, train_y, validation_data=(test_x, test_y),
                       batch_size=64, epochs=40, callbacks=callback, verbose=1)

    # 评估网络
    print("[info]:评估网络中...")
    predictions = model.predict(test_x, batch_size=64)
    print(classification_report(test_y.argmax(1),
                                predictions.argmax(1),
                                target_names=classes))
    # 画图
    # pic_it(epochs=40, record=record, other=True, file_path="cifar10数据集使用vggnet和阶跃函数（0.5）下的损失和精准度.jpg")


def use_plot():
    que = Queue()
    p1 = Process(target=main, args=(que,))
    p2 = Process(target=ani_loss_show, args=("监控.gif", que, 40))
    p1.start()
    p2.start()
    p1.join()
    p2.join()


if __name__ == '__main__':
    # main()
    use_plot()
