Spaces:
Sleeping
Sleeping
VictorKai1996NUS
commited on
Commit
•
efc27db
1
Parent(s):
374c3a9
Update app.py
Browse files
app.py
CHANGED
@@ -99,6 +99,31 @@ def generate(engine, prompt, num_inference_steps=50, guidance_scale=6.0):
|
|
99 |
logger.error(f"An error occurred: {str(e)}")
|
100 |
return None
|
101 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
102 |
css = """
|
103 |
body {
|
104 |
font-family: Arial, sans-serif;
|
@@ -132,19 +157,43 @@ body {
|
|
132 |
margin: 0 auto;
|
133 |
}
|
134 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
135 |
@media (max-width: 768px) {
|
136 |
.row {
|
137 |
flex-direction: column;
|
138 |
}
|
139 |
-
|
140 |
.column {
|
141 |
width: 100%;
|
142 |
}
|
143 |
-
|
144 |
.video-output {
|
145 |
width: 100%;
|
146 |
height: auto;
|
147 |
}
|
|
|
|
|
|
|
|
|
|
|
148 |
}
|
149 |
"""
|
150 |
|
@@ -200,6 +249,18 @@ with gr.Blocks(css=css) as demo:
|
|
200 |
download_video_button_vs = gr.File(label="📥 Download Video", visible=False)
|
201 |
elapsed_time_vs = gr.Textbox(label="Elapsed Time", value="0s", visible=False)
|
202 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
203 |
def generate_vanilla(prompt, num_inference_steps, guidance_scale, progress=gr.Progress(track_tqdm=True)):
|
204 |
engine = load_model()
|
205 |
t = time()
|
@@ -225,6 +286,16 @@ with gr.Blocks(css=css) as demo:
|
|
225 |
def enhance_prompt_func(prompt):
|
226 |
return convert_prompt(prompt, retry_times=1)
|
227 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
228 |
generate_button.click(
|
229 |
generate_vanilla,
|
230 |
inputs=[prompt, num_inference_steps, guidance_scale],
|
@@ -239,6 +310,10 @@ with gr.Blocks(css=css) as demo:
|
|
239 |
|
240 |
enhance_button.click(enhance_prompt_func, inputs=[prompt], outputs=[prompt])
|
241 |
|
|
|
|
|
|
|
|
|
242 |
if __name__ == "__main__":
|
243 |
demo.queue(max_size=10, default_concurrency_limit=1)
|
244 |
demo.launch()
|
|
|
99 |
logger.error(f"An error occurred: {str(e)}")
|
100 |
return None
|
101 |
|
102 |
+
|
103 |
+
def get_server_status():
|
104 |
+
cpu_percent = psutil.cpu_percent()
|
105 |
+
memory = psutil.virtual_memory()
|
106 |
+
disk = psutil.disk_usage('/')
|
107 |
+
gpus = GPUtil.getGPUs()
|
108 |
+
gpu_info = []
|
109 |
+
for gpu in gpus:
|
110 |
+
gpu_info.append({
|
111 |
+
'id': gpu.id,
|
112 |
+
'name': gpu.name,
|
113 |
+
'load': f"{gpu.load*100:.1f}%",
|
114 |
+
'memory_used': f"{gpu.memoryUsed}MB",
|
115 |
+
'memory_total': f"{gpu.memoryTotal}MB"
|
116 |
+
})
|
117 |
+
|
118 |
+
return {
|
119 |
+
'cpu': f"{cpu_percent}%",
|
120 |
+
'memory': f"{memory.percent}%",
|
121 |
+
'disk': f"{disk.percent}%",
|
122 |
+
'gpu': gpu_info
|
123 |
+
}
|
124 |
+
|
125 |
+
|
126 |
+
|
127 |
css = """
|
128 |
body {
|
129 |
font-family: Arial, sans-serif;
|
|
|
157 |
margin: 0 auto;
|
158 |
}
|
159 |
|
160 |
+
.server-status {
|
161 |
+
position: fixed;
|
162 |
+
bottom: 10px;
|
163 |
+
left: 10px;
|
164 |
+
background-color: rgba(240, 240, 240, 0.9);
|
165 |
+
padding: 5px;
|
166 |
+
border-radius: 5px;
|
167 |
+
box-shadow: 0 1px 3px rgba(0,0,0,0.1);
|
168 |
+
font-size: 0.8em;
|
169 |
+
max-width: 340px;
|
170 |
+
}
|
171 |
+
.server-status h4 {
|
172 |
+
margin: 0 0 5px 0;
|
173 |
+
font-size: 0.9em;
|
174 |
+
}
|
175 |
+
.server-status-details {
|
176 |
+
display: none;
|
177 |
+
}
|
178 |
+
.server-status-summary {
|
179 |
+
cursor: pointer;
|
180 |
+
}
|
181 |
@media (max-width: 768px) {
|
182 |
.row {
|
183 |
flex-direction: column;
|
184 |
}
|
|
|
185 |
.column {
|
186 |
width: 100%;
|
187 |
}
|
|
|
188 |
.video-output {
|
189 |
width: 100%;
|
190 |
height: auto;
|
191 |
}
|
192 |
+
.server-status {
|
193 |
+
position: static;
|
194 |
+
margin-top: 20px;
|
195 |
+
max-width: 100%;
|
196 |
+
}
|
197 |
}
|
198 |
"""
|
199 |
|
|
|
249 |
download_video_button_vs = gr.File(label="📥 Download Video", visible=False)
|
250 |
elapsed_time_vs = gr.Textbox(label="Elapsed Time", value="0s", visible=False)
|
251 |
|
252 |
+
|
253 |
+
with gr.Column(elem_classes="server-status"):
|
254 |
+
gr.Markdown("#### Server Status")
|
255 |
+
with gr.Row():
|
256 |
+
cpu_status = gr.Textbox(label="CPU", scale=1, container=False)
|
257 |
+
memory_status = gr.Textbox(label="Memory", scale=1, container=False)
|
258 |
+
disk_status = gr.Textbox(label="Disk", scale=1, container=False)
|
259 |
+
with gr.Accordion("GPU Details", open=False):
|
260 |
+
gpu_status = gr.JSON(container=False)
|
261 |
+
refresh_button = gr.Button("Refresh", scale=1, size="sm")
|
262 |
+
|
263 |
+
|
264 |
def generate_vanilla(prompt, num_inference_steps, guidance_scale, progress=gr.Progress(track_tqdm=True)):
|
265 |
engine = load_model()
|
266 |
t = time()
|
|
|
286 |
def enhance_prompt_func(prompt):
|
287 |
return convert_prompt(prompt, retry_times=1)
|
288 |
|
289 |
+
def update_server_status():
|
290 |
+
status = get_server_status()
|
291 |
+
return (
|
292 |
+
f"{status['cpu']}",
|
293 |
+
f"{status['memory']}",
|
294 |
+
f"{status['disk']}",
|
295 |
+
status['gpu']
|
296 |
+
)
|
297 |
+
|
298 |
+
|
299 |
generate_button.click(
|
300 |
generate_vanilla,
|
301 |
inputs=[prompt, num_inference_steps, guidance_scale],
|
|
|
310 |
|
311 |
enhance_button.click(enhance_prompt_func, inputs=[prompt], outputs=[prompt])
|
312 |
|
313 |
+
|
314 |
+
refresh_button.click(update_server_status, outputs=[cpu_status, memory_status, disk_status, gpu_status])
|
315 |
+
demo.load(update_server_status, outputs=[cpu_status, memory_status, disk_status, gpu_status], every=60)
|
316 |
+
|
317 |
if __name__ == "__main__":
|
318 |
demo.queue(max_size=10, default_concurrency_limit=1)
|
319 |
demo.launch()
|