# print cuda memory info

import torch

class Snapshot():
    def __init__(self, model):
        self.model = model
        self.parameters = model.parameters()

    @classmethod
    def print_total():
        allocated_memory, reserved_memory = Snapshot.gpu_memory_snapshot()
        print(f"cuda memory allocated: {allocated_memory / (1024 ** 2):.2f} MB")
        print(f"cuda memory reserved: {reserved_memory / (1024 ** 2):.2f} MB")
        return allocated_memory, reserved_memory
    
    def _calc_parameters_bytes(self, ):
        parameters_bytes = sum([param.nelement() * param.element_size() for param in self.parameters()])
        return parameters_bytes

    @classmethod
    def gpu_memory_snapshot(self):
        if torch.cuda.is_available():
            allocated_memory = torch.cuda.memory_allocated()
            reserved_memory = torch.cuda.memory_reserved()
            return allocated_memory, reserved_memory
        else:
            return 0, 0