import numpy as np
from boto import sns
from matplotlib import pyplot as plt
from sklearn.metrics import confusion_matrix
from sklearn.preprocessing import StandardScaler
from tensorflow.keras.models import load_model

# 加载训练好的模型
# model_path = 'shibie.h5'
# model = load_model(model_path)
#
# # 加载你要识别的新数据
# new_data_file = r'E:\EEG\EEG-TransNet-main\data\dataset\bci_iv_2a\A01T_data.npy'  # 示例路径
# new_data = np.load(new_data_file)
#
# # 确保 new_data 的形状是 (288, 11, 1125) 或类似形状
# print(f"Original shape of new_data: {new_data.shape}")
#
# for i in range(288):
#     # 提取第一个样本的数据
#     first_sample = new_data[i:i+1]  # 形状为 (1, 11, 1125)
#     print(f"Shape of first_sample: {first_sample.shape}")
#
#     # # 数据标准化
#     # scaler = StandardScaler()
#     #
#     # # 展平数据以便进行标准化处理
#     # first_sample_flat = first_sample.reshape((first_sample.shape[0], -1))
#     # first_sample_scaled = scaler.fit_transform(first_sample_flat)
#     #
#     # # 将标准化后的数据重新reshape回原来的形状
#     # first_sample = first_sample_scaled.reshape(first_sample.shape[0], first_sample.shape[1], first_sample.shape[2])
#
#     # 使用模型进行预测
#     predictions = model.predict(first_sample)
#     # predicted_class = np.argmax(predictions, axis=1)
#
#     print("Predicted class for the first sample:", predictions+1)
# 加载训练好的模型
import numpy as np
from sklearn.preprocessing import StandardScaler
from tensorflow.keras.models import load_model

# 加载训练好的模型
model_path = 'shibie.h5'
model = load_model(model_path)
print(f"Model loaded from {model_path}")

# 加载你要识别的新数据
new_data_file = r'E:\EEG\EEG-TransNet-main\data\dataset\bci_iv_2a\A01T_data.npy'  # 示例路径
new_data = np.load(new_data_file)

# 确保 new_data 的形状是 (288, 22, 1125) 或类似形状
print(f"Original shape of new_data: {new_data.shape}")

# 加载之前保存的标准缩放器参数
scaler = StandardScaler()
scaler.mean_ = np.load('scaler_mean.npy')  # 替换为实际路径
scaler.var_ = np.load('scaler_var.npy')    # 替换为实际路径
scaler.scale_ = np.load('scaler_scale.npy')  # 替换为实际路径

def preprocess_data(data, scaler):
    """
    对输入数据进行标准化处理
    :param data: 输入数据，形状为 (batch_size, channels, time_steps)
    :param scaler: 标准化器
    :return: 预处理后的数据
    """
    data_flat = data.reshape((data.shape[0], -1))  # 展平数据以便进行标准化处理
    data_scaled = scaler.transform(data_flat)       # 使用transform
    return data_scaled.reshape(data.shape[0], data.shape[1], data.shape[2])

# 对新样本进行标准化处理
new_data_scaled = preprocess_data(new_data, scaler)

# 使用模型进行预测
predictions = model.predict(new_data_scaled)
predicted_classes = np.argmax(predictions, axis=1)

for i in range(len(new_data)):
    print(f"Shape of sample {i}: {new_data[i].shape}")
    print(f"Predicted class for sample {i}: {predicted_classes[i]+1}")

