import numpy as np
import time
from function_class import function
from tensorflow import keras
from matplotlib import pyplot as plt
from function_class import UNet_model

path = r'F:\ultralytics-main\data_set\20250417'
min_peak = 0.4
data_len = [512,1024,2048,4096]     #不同的bins长度对训练的影响
distance = [64,128,256]
bs=100
lr_schedule = 0.00001
early_stopping = keras.callbacks.EarlyStopping(
    monitor='val_accuracy',  # 监控验证集准确率（可以改成 'val_loss' 等其他指标）
    patience=10,            # 容忍验证集指标无改善的轮次（这里设置为10轮，可自行调整）
    restore_best_weights=True  # 是否在训练停止后恢复最优权重
)
adam  = keras.optimizers.Adam(learning_rate=lr_schedule, beta_1=0.9, beta_2=0.999)
histories = {}
for DL in data_len:
    for D in distance:
        x_train, x_valid, y_train, y_valid = function.ReadAndShuffleNpy(path, per_data=20000, data_len=DL,
                                                                        Min_Peak=min_peak, distance=D)
        x_train_rp = np.array(x_train[0]).reshape([6000, 2 * D, 1])
        x_valid_rp = np.array(x_valid[0]).reshape([1500, 2 * D, 1])
        y_train_rp = np.array(y_train[0]).reshape([6000, 3])
        y_valid_rp = np.array(y_valid[0]).reshape([1500, 3])

        x_train_ro = np.array(x_train[2]).reshape([6000, DL, 1])
        x_valid_ro = np.array(x_valid[2]).reshape([1500, DL, 1])
        y_train_ro = np.array(y_train[2]).reshape([6000, 3])
        y_valid_ro = np.array(y_valid[2]).reshape([1500, 3])

        x_train_no = np.array(x_train[3]).reshape([6000, DL, 1])
        x_valid_no = np.array(x_valid[3]).reshape([1500, DL, 1])
        y_train_no = np.array(y_train[3]).reshape([6000, 3])
        y_valid_no = np.array(y_valid[3]).reshape([1500, 3])

        x_train_np = np.array(x_train[1]).reshape([6000, 2 * D, 1])
        x_valid_np = np.array(x_valid[1]).reshape([1500, 2 * D, 1])
        y_train_np = np.array(y_train[1]).reshape([6000, 3])
        y_valid_np = np.array(y_valid[1]).reshape([1500, 3])

        name_1 = f"{DL}_{D}_rp"
        name_2 = f"{DL}_{D}_np"
        name_3 = f"{DL}_{D}_ro"
        name_4 = f"{DL}_{D}_no"

        lr_schedule = 0.00001
        early_stopping = keras.callbacks.EarlyStopping(
            monitor='val_accuracy',  # 监控验证集准确率（可以改成 'val_loss' 等其他指标）
            patience=10,  # 容忍验证集指标无改善的轮次（这里设置为10轮，可自行调整）
            restore_best_weights=True  # 是否在训练停止后恢复最优权重
        )
        adam = keras.optimizers.Adam(learning_rate=lr_schedule, beta_1=0.9, beta_2=0.999)

        model_rp = UNet_model.RebuildUnetChange02(kse=3, kern_reg=None, kern_int_e='he_normal')
        model_rp.compile(loss='categorical_crossentropy',
                         optimizer=adam,
                         metrics=['accuracy']
                         )
        histories[name_1] = model_rp.fit(x_train_rp, y_train_rp, batch_size=bs, epochs=200,
                                  validation_data=(x_valid_rp, y_valid_rp), callbacks=[early_stopping])

        model_np = UNet_model.RebuildUnetChange02(kse=3, kern_reg=None, kern_int_e='he_normal')
        model_np.compile(loss='categorical_crossentropy',
                         optimizer=adam,
                         metrics=['accuracy']
                         )
        histories[name_2] = model_np.fit(x_train_np, y_train_np, batch_size=bs, epochs=200,
                                  validation_data=(x_valid_np, y_valid_np), callbacks=[early_stopping])

        model_ro = UNet_model.RebuildUnetChange02(kse=3, kern_reg=None, kern_int_e='he_normal')
        model_ro.compile(loss='categorical_crossentropy',
                         optimizer=adam,
                         metrics=['accuracy']
                         )
        histories[name_3] = model_ro.fit(x_train_ro, y_train_ro, batch_size=bs, epochs=200,
                                  validation_data=(x_valid_ro, y_valid_ro), callbacks=[early_stopping])

        model_no = UNet_model.RebuildUnetChange02(kse=3, kern_reg=None, kern_int_e='he_normal')
        model_no.compile(loss='categorical_crossentropy',
                         optimizer=adam,
                         metrics=['accuracy']
                         )
        histories[name_4] = model_no.fit(x_train_no, y_train_no, batch_size=bs, epochs=200,
                                  validation_data=(x_valid_no, y_valid_no), callbacks=[early_stopping])




