import torch
import os
import datetime
def save_checkpoint(config, epoch, model, max_accuracy, optimizer, lr_scheduler,class_names, logger):
    save_state = {'model': model.state_dict(),
                  'optimizer': optimizer.state_dict(),
                  'max_accuracy': max_accuracy,
                  'epoch': epoch,
                  'config': config,
                  'class':class_names}
    if not os.path.exists(config.output):
        os.makedirs(config.output)
    date_time = datetime.datetime.now()
    save_time =date_time.strftime('%y-%m-%d-%H-%M-%S')
    save_path = os.path.join(config.output, f'ckpt_epoch_{epoch}-{save_time}.pth')
    logger.info(f"{save_path} saving......")
    torch.save(save_state, save_path)
    logger.info(f"accuracy:{max_accuracy}")
    logger.info(f"{save_path} saved !!!")

if __name__=="__main__":
    model_path="/mnt/myproject/classfication/simple_classifier/saved/ckpt_epoch_1-21-08-29-21-41-32.pth"
    checkpoint = torch.load(model_path, map_location='cpu')
    checkpoint['model']

