sayakpaul HF staff commited on
Commit
1ea8dd9
1 Parent(s): 828df18

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -7
app.py CHANGED
@@ -3,9 +3,24 @@ import gradio as gr
3
  import json
4
 
5
 
6
- def bytes_to_giga_bytes(bytes):
7
- return f"{(bytes / 1024 / 1024 / 1024):.3f}"
8
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
 
10
  def load_model_index(pipeline_id, token=None, revision=None):
11
  index_path = hf_hub_download(repo_id=pipeline_id, filename="model_index.json", revision=revision, token=token)
@@ -73,12 +88,12 @@ def get_component_wise_memory(pipeline_id, token=None, variant=None, revision=No
73
  print(selected_file.rfilename)
74
  component_wise_memory[component] = bytes_to_giga_bytes(selected_file.size)
75
 
76
- return component_wise_memory
77
 
78
 
79
  gr.Interface(
80
  title="Compute component-wise memory of a 🧨 Diffusers pipeline.",
81
- description="Sizes will be reported in GB. It can handle pipelines where the text encoder checkpoints might be shared (PixArt, for example).",
82
  fn=get_component_wise_memory,
83
  inputs=[
84
  gr.components.Textbox(lines=1, label="pipeline_id", info="Example: runwayml/stable-diffusion-v1-5"),
@@ -98,11 +113,11 @@ gr.Interface(
98
  info="Extension to use.",
99
  ),
100
  ],
101
- outputs="text",
102
  examples=[
103
  ["runwayml/stable-diffusion-v1-5", None, "fp32", None, ".safetensors"],
104
  ["stabilityai/stable-diffusion-xl-base-1.0", None, "fp16", None, ".safetensors"],
105
- ["PixArt-alpha/PixArt-XL-2-1024-MS", None, "fp32", None, ".safetensors"],
106
  ],
107
  theme=gr.themes.Soft(),
108
  allow_flagging=False,
 
3
  import json
4
 
5
 
6
+ def format_size(num: int) -> str:
7
+ """Format size in bytes into a human-readable string.
8
+
9
+ Taken from https://stackoverflow.com/a/1094933
10
+ """
11
+ num_f = float(num)
12
+ for unit in ["", "K", "M", "G", "T", "P", "E", "Z"]:
13
+ if abs(num_f) < 1000.0:
14
+ return f"{num_f:3.1f}{unit}"
15
+ num_f /= 1000.0
16
+ return f"{num_f:.1f}Y"
17
+
18
+ def format_output(memory_mapping):
19
+ markdown_str = ""
20
+ if memory_mapping:
21
+ for component, memory in memory_mapping:
22
+ markdown_str += f"* {component}: {format_size(memory)}\n"
23
+ return markdown_str
24
 
25
  def load_model_index(pipeline_id, token=None, revision=None):
26
  index_path = hf_hub_download(repo_id=pipeline_id, filename="model_index.json", revision=revision, token=token)
 
88
  print(selected_file.rfilename)
89
  component_wise_memory[component] = bytes_to_giga_bytes(selected_file.size)
90
 
91
+ return format_output(component_wise_memory)
92
 
93
 
94
  gr.Interface(
95
  title="Compute component-wise memory of a 🧨 Diffusers pipeline.",
96
+ description="Sizes will be reported in GB.",
97
  fn=get_component_wise_memory,
98
  inputs=[
99
  gr.components.Textbox(lines=1, label="pipeline_id", info="Example: runwayml/stable-diffusion-v1-5"),
 
113
  info="Extension to use.",
114
  ),
115
  ],
116
+ outputs="markdown",
117
  examples=[
118
  ["runwayml/stable-diffusion-v1-5", None, "fp32", None, ".safetensors"],
119
  ["stabilityai/stable-diffusion-xl-base-1.0", None, "fp16", None, ".safetensors"],
120
+ [""],
121
  ],
122
  theme=gr.themes.Soft(),
123
  allow_flagging=False,