# 假设我们有一个自定义的Dataset类来处理音频和文本数据
class AudioTextDataset(Dataset):
    def __init__(self, audio_data, text_data, labels):
        self.audio_data = audio_data
        self.text_data = text_data
        self.labels = labels

    def __len__(self):
        return len(self.labels)

    def __getitem__(self, idx):
        audio = self.audio_data[idx]
        text = self.text_data[idx]
        label = self.labels[idx]
        return audio, text, label

# 加载数据
audio_data, text_data, labels = load_preprocessed_data()  # 假设这个函数加载了数据
dataset = AudioTextDataset(audio_data, text_data, labels)
dataloader = DataLoader(dataset, batch_size=32, shuffle=True)

# 定义损失函数和优化器
criterion = nn.BCELoss()  # 假设输出为二分类概率
optimizer = optim.Adam(model.parameters(), lr=0.001)

# 训练模型
num_epochs = 20
for epoch in range(num_epochs):
    model.train()
    running_loss = 0.0
    for audio, text, label in dataloader:
        optimizer.zero_grad()
        outputs = model(audio, text)
        loss = criterion(outputs.squeeze(), label.float())
        loss.backward()
        optimizer.step()
        running_loss += loss.item()

    print(f'Epoch [{epoch+1}/{num_epochs}], Loss: {running_loss/len(dataloader):.4f}')


from torch.utils.data import DataLoader
from torch.optim.lr_scheduler import ReduceLROnPlateau
import torch.nn.functional as F
from copy import deepcopy

# 自定义早停和模型检查点逻辑
best_model_wts = deepcopy(model.state_dict())
best_loss = float('inf')
patience = 5
epoch_no_improve = 0

scheduler = ReduceLROnPlateau(optimizer, mode='min', factor=0.1, patience=2, verbose=True)

for epoch in range(num_epochs):
    model.train()
    running_loss = 0.0
    for audio, text, label in dataloader:
        optimizer.zero_grad()
        outputs = model(audio, text)
        loss = criterion(F.sigmoid(outputs), label.float())  # 假设输出通过sigmoid转为概率
        loss.backward()
        optimizer.step()
        running_loss += loss.item()

    epoch_loss = running_loss / len(dataloader)
    scheduler.step(epoch_loss)

    print(f'Epoch [{epoch+1}/{num_epochs}], Loss: {epoch_loss:.4f}')

    if epoch_loss < best_loss:
        best_loss = epoch_loss
        best_model_wts = deepcopy(model.state_dict())
        epoch_no_improve = 0
    else:
        epoch_no_improve += 1

    if epoch_no_improve >= patience:
        print('Early stopping!')
        break

# 加载最佳模型权重
model.load_state_dict(best_model_wts)


