Spaces:
Runtime error
Runtime error
File size: 1,359 Bytes
0df55bd d191934 0df55bd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
import spaces
import datetime
import os
import subprocess
import torch
import gradio as gr
CUSTOM_CSS = """
#output_box textarea {
font-family: IBM Plex Mono, ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas, "Liberation Mono", "Courier New", monospace;
}
"""
zero = torch.Tensor([0]).cuda()
print(zero.device) # <-- 'cpu' 🤔
@spaces.GPU
def run_gpu() -> str:
print(zero.device) # <-- 'cuda:0'
output: str = ""
try:
output = subprocess.check_output(["nvidia-smi"], text=True)
except FileNotFoundError:
output = "nvidia-smi failed"
comment = (
datetime.datetime.now().replace(microsecond=0).isoformat().replace("T", " ")
)
return f"# {comment}\n\n{output}"
def run(check: bool) -> str:
if check:
return run_gpu()
else:
comment = (
datetime.datetime.now().replace(microsecond=0).isoformat().replace("T", " ")
)
return f"# {comment}"
output = gr.Textbox(
label="Command Output", max_lines=32, elem_id="output_box", value=run(False)
)
with gr.Blocks(css=CUSTOM_CSS) as demo:
#gr.Markdown("#### `zero-gpu`: how to run on serverless GPU for free on Spaces 🔥")
output.render()
check = gr.Checkbox(label="Run")
check.change(run, inputs=[check], outputs=output, every=1)
demo.queue().launch(show_api=True,share=True)
|