PeterL1n commited on
Commit
75859e2
1 Parent(s): 1db955a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -3
app.py CHANGED
@@ -28,7 +28,7 @@ pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config, times
28
 
29
  # Inference function.
30
  @spaces.GPU(enable_queue=True)
31
- def generate_image(prompt, option, progress=gr.Progress()):
32
  global step_loaded
33
  print(prompt, option)
34
  ckpt, step = opts[option]
@@ -69,14 +69,25 @@ with gr.Blocks(css="style.css") as demo:
69
  img = gr.Image(label="SDXL-Lighting Generated Image")
70
 
71
  prompt.submit(
72
- fn=generate_image,
73
  inputs=[prompt, option],
74
  outputs=img,
75
  )
76
  submit.click(
77
- fn=generate_image,
78
  inputs=[prompt, option],
79
  outputs=img,
80
  )
 
 
 
 
 
 
 
 
 
 
 
81
 
82
  demo.queue().launch()
 
28
 
29
  # Inference function.
30
  @spaces.GPU(enable_queue=True)
31
+ def generate(prompt, option, progress=gr.Progress()):
32
  global step_loaded
33
  print(prompt, option)
34
  ckpt, step = opts[option]
 
69
  img = gr.Image(label="SDXL-Lighting Generated Image")
70
 
71
  prompt.submit(
72
+ fn=generate,
73
  inputs=[prompt, option],
74
  outputs=img,
75
  )
76
  submit.click(
77
+ fn=generate,
78
  inputs=[prompt, option],
79
  outputs=img,
80
  )
81
+
82
+ gr.Examples(
83
+ fn=generate,
84
+ examples=[
85
+ ["A girl smiling", "4 Steps"],
86
+ ["An astronaut riding a horse", "4 Steps"]
87
+ ],
88
+ inputs=[prompt, option],
89
+ outputs=img,
90
+ cache_examples=True,
91
+ )
92
 
93
  demo.queue().launch()