import numpy as np
import seaborn as sns
from function_class import function
from tensorflow import keras
from matplotlib import pyplot as plt
from function_class import UNet_model
from sklearn.metrics import confusion_matrix

##   本文件对于不同长度下的模型训练效果进行实验
path = r'D:\PycharmProjects\festival\dataset\classifydataset'
##   i分别对应 512长度，1024长度，以及4096长度
for i in range(3):
    if i == 0:
        dl=512
    elif i==1:
        dl=1024
    else:
        dl=4096
    x_train,x_valid,y_train,y_valid= function.ReadAndShuffle(path,dl)
    # 创建学习率调度器
    lr_schedule = 0.00001
    early_stopping = keras.callbacks.EarlyStopping(
        monitor='val_accuracy',  # 监控验证集准确率（可以改成 'val_loss' 等其他指标）
        patience=10,            # 容忍验证集指标无改善的轮次（这里设置为10轮，可自行调整）
        restore_best_weights=True  # 是否在训练停止后恢复最优权重
    )
    adam  = keras.optimizers.Adam(learning_rate=lr_schedule, beta_1=0.9, beta_2=0.999)
    if i == 0:
        model_ours01 = UNet_model.RebuildUnetChange02(kse=3, kern_reg=None, kern_int_e='he_normal')
        model_ours01.compile(loss='categorical_crossentropy',
                             optimizer=adam,
                             metrics=['accuracy']
                             )
        history_change01 = model_ours01.fit(x_train, y_train, batch_size=50, epochs=200,
                                            validation_data=(x_valid, y_valid), callbacks=[early_stopping])
    elif i == 1:
        model_ours02 = UNet_model.RebuildUnetChange02(kse=3, kern_reg=None, kern_int_e='he_normal')
        model_ours02.compile(loss='categorical_crossentropy',
                             optimizer=adam,
                             metrics=['accuracy']
                             )
        history_change02 = model_ours02.fit(x_train, y_train, batch_size=50, epochs=200,
                                            validation_data=(x_valid, y_valid), callbacks=[early_stopping])
    else:
        model_ours03 = UNet_model.RebuildUnetChange02(kse=3, kern_reg=None, kern_int_e='he_normal')
        model_ours03.compile(loss='categorical_crossentropy',
                             optimizer=adam,
                             metrics=['accuracy']
                             )
        history_change03 = model_ours03.fit(x_train, y_train, batch_size=50, epochs=200,
                                            validation_data=(x_valid, y_valid), callbacks=[early_stopping])

    #自己的模型训练

    if i == 0:
        val_loss_01 = history_change01.history['val_loss']
        val_acc_01 = history_change01.history['val_accuracy']
        epochs_change01_loss = range(1, len(val_loss_01) + 1)
    elif i == 1:
        val_loss_02 = history_change02.history['val_loss']
        val_acc_02 = history_change02.history['val_accuracy']
        epochs_change02_loss = range(1, len(val_loss_02) + 1)
    else:
        val_loss_03 = history_change03.history['val_loss']
        val_acc_03 = history_change03.history['val_accuracy']
        epochs_change03_loss = range(1, len(val_loss_03) + 1)
# 创建画布
plt.figure(figsize=(12, 5))

# 绘制损失率曲线
plt.subplot(1, 2, 1)
#plt.plot(epochs, train_loss_01, 'bo-', label='Training Loss by Xu')
plt.plot(epochs_change01_loss, val_loss_01, 'g.-', label='512_len')
plt.plot(epochs_change02_loss, val_loss_02, 'r.-', label='1024_len')
plt.plot(epochs_change03_loss, val_loss_03, 'b.-', label='4096_len')
plt.title('Validation Loss')
plt.xlabel('Epochs')
plt.ylabel('Loss')
plt.legend()
# 绘制准确度曲线
plt.subplot(1, 2, 2)
#plt.plot(epochs, train_acc_01, 'bo-', label='Training Accuracy by Xu')
plt.plot(epochs_change01_loss, val_acc_01, 'g.-', label='512_len')
plt.plot(epochs_change02_loss, val_acc_02, 'r.-', label='1024_len')
plt.plot(epochs_change03_loss, val_acc_03, 'b.-', label='4096_len')
plt.title('Validation Accuracy')
plt.xlabel('Epochs')
plt.ylabel('Accuracy')
plt.legend()
# 显示图像
plt.tight_layout()
plt.show()
print("测试断点设置处")
print('check point')


