import numpy as np
import torch
from torch.utils.data import Dataset
import scipy.io as sio
from torch.utils.data import DataLoader
from model import EMGCNN
import torch.optim as optim
import torch.nn as nn
import torch.nn.functional as F


class NinaProDatasetFolder(Dataset):
    def __init__(self, mat_files, window_size=200, step_size=20, ignore_rest=True):
        super().__init__()
        self.emg_data = []
        self.stim_data = []
        self.indices = []

        # 一次性加载所有文件
        for file_idx, file in enumerate(mat_files):
            data = sio.loadmat(file)
            emg = data['emg']
            stim = data['stimulus'].squeeze().astype(int)

            self.emg_data.append(emg)
            self.stim_data.append(stim)

            N = len(stim)
            for start in range(0, N - window_size, step_size):
                end = start + window_size
                label = np.bincount(stim[start:end]).argmax()
                if ignore_rest and label == 0:
                    continue
                self.indices.append((file_idx, start, end, label))

        print(f"Loaded {len(self.indices)} windows from {len(mat_files)} files.")

    def __len__(self):
        return len(self.indices)

    def __getitem__(self, idx):
        file_idx, start, end, label = self.indices[idx]
        x = self.emg_data[file_idx][start:end, :]
        x = torch.tensor(x, dtype=torch.float32).T  # (channels, window_size)
        y = torch.tensor(label, dtype=torch.long)
        return x, y

mat_files = [
    r"D:\project\muscle_net\dino\S1_A1_E1.mat",
    r"D:\project\muscle_net\dino\S1_A1_E2.mat",
    r"D:\project\muscle_net\dino\S1_A1_E3.mat",
    r"D:\project\muscle_net\dino\S2_A1_E1.mat",
    r"D:\project\muscle_net\dino\S2_A1_E2.mat",
    r"D:\project\muscle_net\dino\S2_A1_E3.mat",
]

# dataset = NinaProDatasetFolder(mat_files, window_size=200, step_size=20)
# loader = DataLoader(dataset, batch_size=32, shuffle=True, num_workers=4, pin_memory=True)
# for X, y in loader:
#     print("X shape:", X.shape)  # (batch, channels, window_size)
#     print("y shape:", y.shape)  # (batch,)
#     print(X,y)
#     break


# 参数
num_classes = 51   # NinaPro DB1 有 52 个动作（可根据实际 stim 最大值调整）
batch_size = 2048
epochs = 20
lr = 0.001


# 模型
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = EMGCNN(num_classes=num_classes).to(device)

# 损失函数 & 优化器
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=lr)

if __name__ == "__main__":
    # 1. 初始化数据，只做一次
    dataset = NinaProDatasetFolder(
        mat_files,
        window_size=200,
        step_size=20,
        ignore_rest=True
    )
    loader = DataLoader(dataset,
                        batch_size=2048,
                        shuffle=True,
                        num_workers=4,
                        pin_memory=True)
    for epoch in range(epochs):
        model.train()
        running_loss = 0.0
        correct = 0
        total = 0
    
        for X, y in loader:
            X, y = X.to(device), y.to(device)

            optimizer.zero_grad()
            outputs = model(X)
            loss = criterion(outputs, y)
            loss.backward()
            optimizer.step()

            running_loss += loss.item() * X.size(0)
            _, predicted = outputs.max(1)
            total += y.size(0)
            correct += predicted.eq(y).sum().item()
    
        epoch_loss = running_loss / total
        epoch_acc = correct / total * 100
        print(f"Epoch [{epoch+1}/{epochs}] Loss: {epoch_loss:.4f}, Acc: {epoch_acc:.2f}%")
