radames commited on
Commit
2ed0257
1 Parent(s): f4724fc

add memory usage

Browse files
Files changed (2) hide show
  1. requirements.txt +2 -1
  2. visualizer_drag_gradio.py +21 -0
requirements.txt CHANGED
@@ -4,4 +4,5 @@ Ninja
4
  gradio
5
  huggingface_hub
6
  hf_transfer
7
- Pillow==9.5.0
 
 
4
  gradio
5
  huggingface_hub
6
  hf_transfer
7
+ Pillow==9.5.0
8
+ psutil
visualizer_drag_gradio.py CHANGED
@@ -7,6 +7,8 @@ from functools import partial
7
  from pathlib import Path
8
  import time
9
 
 
 
10
  import gradio as gr
11
  import numpy as np
12
  import torch
@@ -160,6 +162,24 @@ def preprocess_mask_info(global_state, image):
160
  return global_state
161
 
162
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
163
  # filter large models running on SPACES
164
  allowed_checkpoints = [] # all checkpoints
165
  if IS_SPACE:
@@ -557,6 +577,7 @@ with gr.Blocks() as app:
557
  step_idx = 0
558
  last_time = time.time()
559
  while True:
 
560
  # add a TIMEOUT break
561
  print(f'Running time: {time.time() - last_time}')
562
  if IS_SPACE and time.time() - last_time > TIMEOUT:
 
7
  from pathlib import Path
8
  import time
9
 
10
+ import psutil
11
+
12
  import gradio as gr
13
  import numpy as np
14
  import torch
 
162
  return global_state
163
 
164
 
165
+ def print_memory_usage():
166
+ # Print system memory usage
167
+ print(f"System memory usage: {psutil.virtual_memory().percent}%")
168
+
169
+ # Print GPU memory usage
170
+ if torch.cuda.is_available():
171
+ device = torch.device("cuda")
172
+ print(f"GPU memory usage: {torch.cuda.memory_allocated() / 1e9} GB")
173
+ print(
174
+ f"Max GPU memory usage: {torch.cuda.max_memory_allocated() / 1e9} GB")
175
+ device_properties = torch.cuda.get_device_properties(device)
176
+ available_memory = device_properties.total_memory - \
177
+ torch.cuda.max_memory_allocated()
178
+ print(f"Available GPU memory: {available_memory / 1e9} GB")
179
+ else:
180
+ print("No GPU available")
181
+
182
+
183
  # filter large models running on SPACES
184
  allowed_checkpoints = [] # all checkpoints
185
  if IS_SPACE:
 
577
  step_idx = 0
578
  last_time = time.time()
579
  while True:
580
+ print_memory_usage()
581
  # add a TIMEOUT break
582
  print(f'Running time: {time.time() - last_time}')
583
  if IS_SPACE and time.time() - last_time > TIMEOUT: