import os
import torch
import logging

logger = logging.getLogger(__name__)

def save_checkpoint(model, optimizer, epoch, step, output_dir, filename="checkpoint.pth"):
    """保存模型检查点"""
    checkpoint_path = os.path.join(output_dir, "checkpoints", filename)
    torch.save({
        'epoch': epoch,
        'model_state_dict': model.state_dict(),
        'optimizer_state_dict': optimizer.state_dict(),
        'global_step': step,
    }, checkpoint_path)
    logger.info(f"Model saved to {checkpoint_path}")

def load_checkpoint(model, optimizer, path):
    """加载模型检查点"""
    checkpoint = torch.load(path)
    model.load_state_dict(checkpoint['model_state_dict'])
    optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
    epoch = checkpoint['epoch']
    global_step = checkpoint['global_step']
    logger.info(f"Loaded checkpoint from {path}, current epoch: {epoch}, steps: {global_step}")
    return epoch, global_step