import os

import torch


def save_model_handle(model,model_path):
    torch.save(model.state_dict(), model_path)

def save_checkpoint_handle(model, optimizer, scheduler, epoch, batch_idx, current_loss,checkpoint_path):
    torch.save({
        'model_state_dict': model.state_dict(),
        'optimizer_state_dict': optimizer.state_dict(),
        'scheduler_state_dict': scheduler.state_dict(),
        'epoch': epoch,
        'batch_idx': batch_idx,
        'loss': current_loss,
        # 可选：添加其他参数（如梯度裁剪阈值）
    }, checkpoint_path)

def load_checkpoint_handle(model, optimizer, scheduler,checkpoint_path,device,weights_only=True):
    if os.path.exists(checkpoint_path):
        checkpoint_data = torch.load(checkpoint_path, map_location=device, weights_only=weights_only)
        if checkpoint_data is not None:
            model.load_state_dict(checkpoint_data['model_state_dict'])
            optimizer.load_state_dict(checkpoint_data['optimizer_state_dict'])
            scheduler.load_state_dict(checkpoint_data['scheduler_state_dict'])
            epoch = checkpoint_data['epoch']
            batch_idx = checkpoint_data['batch_idx']
            current_loss = checkpoint_data['loss']
            return checkpoint_data, epoch, batch_idx, current_loss
    return None,None,None,None

def handle_save_int(data):
    return data if data is not None else 0
