hysts HF staff commited on
Commit
d0fbcd0
1 Parent(s): 8b663a6

Update for ZeroGPU

Browse files
Files changed (3) hide show
  1. app_image_to_3d.py +8 -1
  2. app_text_to_3d.py +8 -2
  3. requirements.txt +2 -2
app_image_to_3d.py CHANGED
@@ -5,6 +5,8 @@ import shlex
5
  import subprocess
6
 
7
  import gradio as gr
 
 
8
 
9
  from model import Model
10
  from settings import CACHE_EXAMPLES, MAX_SEED
@@ -20,9 +22,14 @@ def create_demo(model: Model) -> gr.Blocks:
20
  )
21
  examples = ["corgi.png"]
22
 
 
23
  def process_example_fn(image_path: str) -> str:
24
  return model.run_image(image_path)
25
 
 
 
 
 
26
  with gr.Blocks() as demo:
27
  with gr.Box():
28
  image = gr.Image(label="Input image", show_label=False, type="pil")
@@ -74,7 +81,7 @@ def create_demo(model: Model) -> gr.Blocks:
74
  queue=False,
75
  api_name=False,
76
  ).then(
77
- fn=model.run_image,
78
  inputs=inputs,
79
  outputs=result,
80
  api_name="image-to-3d",
 
5
  import subprocess
6
 
7
  import gradio as gr
8
+ import PIL.Image
9
+ import spaces
10
 
11
  from model import Model
12
  from settings import CACHE_EXAMPLES, MAX_SEED
 
22
  )
23
  examples = ["corgi.png"]
24
 
25
+ @spaces.GPU
26
  def process_example_fn(image_path: str) -> str:
27
  return model.run_image(image_path)
28
 
29
+ @spaces.GPU
30
+ def run(image: PIL.Image.Image, seed: int, guidance_scale: float, num_inference_steps: int) -> str:
31
+ return model.run_image(image, seed, guidance_scale, num_inference_steps)
32
+
33
  with gr.Blocks() as demo:
34
  with gr.Box():
35
  image = gr.Image(label="Input image", show_label=False, type="pil")
 
81
  queue=False,
82
  api_name=False,
83
  ).then(
84
+ fn=run,
85
  inputs=inputs,
86
  outputs=result,
87
  api_name="image-to-3d",
app_text_to_3d.py CHANGED
@@ -1,6 +1,7 @@
1
  #!/usr/bin/env python
2
 
3
  import gradio as gr
 
4
 
5
  from model import Model
6
  from settings import CACHE_EXAMPLES, MAX_SEED
@@ -20,9 +21,14 @@ def create_demo(model: Model) -> gr.Blocks:
20
  "A bowl of vegetables",
21
  ]
22
 
 
23
  def process_example_fn(prompt: str) -> str:
24
  return model.run_text(prompt)
25
 
 
 
 
 
26
  with gr.Blocks() as demo:
27
  with gr.Box():
28
  with gr.Row(elem_id="prompt-container"):
@@ -80,7 +86,7 @@ def create_demo(model: Model) -> gr.Blocks:
80
  queue=False,
81
  api_name=False,
82
  ).then(
83
- fn=model.run_text,
84
  inputs=inputs,
85
  outputs=result,
86
  api_name=False,
@@ -92,7 +98,7 @@ def create_demo(model: Model) -> gr.Blocks:
92
  queue=False,
93
  api_name=False,
94
  ).then(
95
- fn=model.run_text,
96
  inputs=inputs,
97
  outputs=result,
98
  api_name="text-to-3d",
 
1
  #!/usr/bin/env python
2
 
3
  import gradio as gr
4
+ import spaces
5
 
6
  from model import Model
7
  from settings import CACHE_EXAMPLES, MAX_SEED
 
21
  "A bowl of vegetables",
22
  ]
23
 
24
+ @spaces.GPU
25
  def process_example_fn(prompt: str) -> str:
26
  return model.run_text(prompt)
27
 
28
+ @spaces.GPU
29
+ def run(prompt: str, seed: int, guidance_scale: float, num_inference_steps: int) -> str:
30
+ return model.run_text(prompt, seed, guidance_scale, num_inference_steps)
31
+
32
  with gr.Blocks() as demo:
33
  with gr.Box():
34
  with gr.Row(elem_id="prompt-container"):
 
86
  queue=False,
87
  api_name=False,
88
  ).then(
89
+ fn=run,
90
  inputs=inputs,
91
  outputs=result,
92
  api_name=False,
 
98
  queue=False,
99
  api_name=False,
100
  ).then(
101
+ fn=run,
102
  inputs=inputs,
103
  outputs=result,
104
  api_name="text-to-3d",
requirements.txt CHANGED
@@ -1,6 +1,6 @@
1
  diffusers==0.21.2
2
  gradio==3.44.4
3
- torch==2.0.1
4
- torchvision==0.15.2
5
  transformers==4.33.2
6
  trimesh==3.23.5
 
1
  diffusers==0.21.2
2
  gradio==3.44.4
3
+ torch==2.0.0
4
+ torchvision==0.15.1
5
  transformers==4.33.2
6
  trimesh==3.23.5