import torch


def save_checkpoint(epoch, model, optimizer, path):
    checkpoint = {
        'epoch': epoch,
        'model_state_dict': model.state_dict(),
        'optimizer_state_dict': optimizer.state_dict()
    }
    torch.save(checkpoint, path)


def save_whole_model(model, path):
    torch.save(model, path)


def load_whole_model(path):
    return torch.load(path)


def load_checkpoint(model, optimizer, model_dir):
    path = model_dir / 'current_model.pt'
    try:
        checkpoint = torch.load(path)
        model.load_state_dict(checkpoint['model_state_dict'])
        optimizer.load_state_dict(checkpoint['optimizer_state_dict'])
        print(f"Found checkpoint at epoch {checkpoint['epoch']}. Resuming training...")
        return checkpoint['epoch'], checkpoint
    except FileNotFoundError:
        print("No checkpoint found. Training from scratch...")
        return 1, None
