vumichien commited on
Commit
eac8477
1 Parent(s): 03c33bc

Update app.py

Browse files

show system info

Files changed (1) hide show
  1. app.py +24 -4
app.py CHANGED
@@ -13,6 +13,11 @@ from mediapipe.framework.formats import landmark_pb2
13
  from mediapipe.tasks import python
14
  from mediapipe.tasks.python import vision
15
  import cv2
 
 
 
 
 
16
 
17
  from diffusers import (
18
  FlaxControlNetModel,
@@ -210,7 +215,22 @@ def infer(prompt, negative_prompt, image, model_type="Standard"):
210
  images = images.reshape((images.shape[0] * images.shape[1],) + images.shape[-3:])
211
 
212
  results = [i for i in images]
213
- return [overlap_image, annotated_image] + results
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
214
 
215
 
216
  with gr.Blocks(theme='gradio/soft') as demo:
@@ -301,7 +321,7 @@ with gr.Blocks(theme='gradio/soft') as demo:
301
  submit_btn = gr.Button(value = "Submit")
302
  # inputs = [prompt_input, negative_prompt, input_image]
303
  # submit_btn.click(fn=infer, inputs=inputs, outputs=[output_image])
304
-
305
  with gr.Column():
306
  output_image = gr.Gallery(label='Output Image', show_label=False, elem_id="gallery").style(grid=2, height='auto')
307
 
@@ -334,12 +354,12 @@ with gr.Blocks(theme='gradio/soft') as demo:
334
  ],
335
  ],
336
  inputs=[prompt_input, negative_prompt, input_image, model_type],
337
- outputs=[output_image],
338
  fn=infer,
339
  cache_examples=True,
340
  )
341
 
342
  inputs = [prompt_input, negative_prompt, input_image, model_type]
343
- submit_btn.click(fn=infer, inputs=inputs, outputs=[output_image])
344
 
345
  demo.launch()
 
13
  from mediapipe.tasks import python
14
  from mediapipe.tasks.python import vision
15
  import cv2
16
+ import psutil
17
+ from gpuinfo import GPUInfo
18
+ import time
19
+ import gc
20
+ import torch
21
 
22
  from diffusers import (
23
  FlaxControlNetModel,
 
215
  images = images.reshape((images.shape[0] * images.shape[1],) + images.shape[-3:])
216
 
217
  results = [i for i in images]
218
+
219
+ # running info
220
+ time_end = time.time()
221
+ time_diff = time_end - time_start
222
+ gc.collect()
223
+ torch.cuda.empty_cache()
224
+ memory = psutil.virtual_memory()
225
+ gpu_utilization, gpu_memory = GPUInfo.gpu_usage()
226
+ gpu_utilization = gpu_utilization[0] if len(gpu_utilization) > 0 else 0
227
+ gpu_memory = gpu_memory[0] if len(gpu_memory) > 0 else 0
228
+ system_info = f"""
229
+ *Memory: {memory.total / (1024 * 1024 * 1024):.2f}GB, used: {memory.percent}%, available: {memory.available / (1024 * 1024 * 1024):.2f}GB.*
230
+ *Processing time: {time_diff:.5} seconds.*
231
+ *GPU Utilization: {gpu_utilization}%, GPU Memory: {gpu_memory}MiB.*
232
+ """
233
+ return [overlap_image, annotated_image] + results, system_info
234
 
235
 
236
  with gr.Blocks(theme='gradio/soft') as demo:
 
321
  submit_btn = gr.Button(value = "Submit")
322
  # inputs = [prompt_input, negative_prompt, input_image]
323
  # submit_btn.click(fn=infer, inputs=inputs, outputs=[output_image])
324
+ system_info = gr.Markdown(f"*Memory: {memory.total / (1024 * 1024 * 1024):.2f}GB, used: {memory.percent}%, available: {memory.available / (1024 * 1024 * 1024):.2f}GB*")
325
  with gr.Column():
326
  output_image = gr.Gallery(label='Output Image', show_label=False, elem_id="gallery").style(grid=2, height='auto')
327
 
 
354
  ],
355
  ],
356
  inputs=[prompt_input, negative_prompt, input_image, model_type],
357
+ outputs=[output_image, system_info],
358
  fn=infer,
359
  cache_examples=True,
360
  )
361
 
362
  inputs = [prompt_input, negative_prompt, input_image, model_type]
363
+ submit_btn.click(fn=infer, inputs=inputs, outputs=[output_image, system_info])
364
 
365
  demo.launch()