import torch
import torch.nn as nn

def print_memory_usage(tensor, layer_name):
    if tensor is not None:
        print(f'{layer_name}: {tensor.element_size() * tensor.nelement() / 1024**2:.2f} MB')

class MemoryUsageHook:
    def __init__(self, module, layer_name):
        self.hook = module.register_forward_hook(self.hook_fn)
        self.layer_name = layer_name

    def hook_fn(self, module, input, output):
        print_memory_usage(output, self.layer_name)

    def remove(self):
        self.hook.remove()

# Example model
def hookmodel(model):
    # Register hooks for each layer
    hooks = []
    for name, layer in model.named_modules():
        if not isinstance(layer, nn.Sequential) and not isinstance(layer, nn.ModuleList) and layer != model:
            hooks.append(MemoryUsageHook(layer, name))

