Wauplin HF staff commited on
Commit
a571610
1 Parent(s): 76c7f04

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -11
app.py CHANGED
@@ -10,7 +10,7 @@ from diffusers import WuerstchenDecoderPipeline, WuerstchenPriorPipeline
10
  from diffusers.pipelines.wuerstchen import DEFAULT_STAGE_C_TIMESTEPS
11
  from previewer.modules import Previewer
12
 
13
- from gallery_history import fetch_gallery_history, show_gallery_history
14
 
15
  os.environ['TOKENIZERS_PARALLELISM'] = 'false'
16
 
@@ -79,6 +79,7 @@ def generate(
79
  # decoder_timesteps: List[float] = None,
80
  decoder_guidance_scale: float = 0.0,
81
  num_images_per_prompt: int = 2,
 
82
  ) -> PIL.Image.Image:
83
  generator = torch.Generator().manual_seed(seed)
84
 
@@ -111,6 +112,25 @@ def generate(
111
  generator=generator,
112
  output_type="pil",
113
  ).images
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
114
  yield decoder_output
115
 
116
 
@@ -119,7 +139,7 @@ examples = [
119
  "An astronaut riding a green horse",
120
  ]
121
 
122
- with gr.Blocks(css="style.css") as demo:
123
  gr.Markdown(DESCRIPTION)
124
  gr.DuplicateButton(
125
  value="Duplicate Space for private use",
@@ -213,8 +233,6 @@ with gr.Blocks(css="style.css") as demo:
213
  cache_examples=CACHE_EXAMPLES,
214
  )
215
 
216
- history = show_gallery_history()
217
-
218
  inputs = [
219
  prompt,
220
  negative_prompt,
@@ -240,8 +258,6 @@ with gr.Blocks(css="style.css") as demo:
240
  inputs=inputs,
241
  outputs=result,
242
  api_name="run",
243
- ).then(
244
- fn=fetch_gallery_history, inputs=[prompt, result], outputs=history, queue=False
245
  )
246
  negative_prompt.submit(
247
  fn=randomize_seed_fn,
@@ -254,8 +270,6 @@ with gr.Blocks(css="style.css") as demo:
254
  inputs=inputs,
255
  outputs=result,
256
  api_name=False,
257
- ).then(
258
- fn=fetch_gallery_history, inputs=[prompt, result], outputs=history, queue=False
259
  )
260
  run_button.click(
261
  fn=randomize_seed_fn,
@@ -268,9 +282,13 @@ with gr.Blocks(css="style.css") as demo:
268
  inputs=inputs,
269
  outputs=result,
270
  api_name=False,
271
- ).then(
272
- fn=fetch_gallery_history, inputs=[prompt, result], outputs=history, queue=False
273
  )
274
 
 
 
 
 
 
 
275
  if __name__ == "__main__":
276
- demo.queue(max_size=20).launch()
 
10
  from diffusers.pipelines.wuerstchen import DEFAULT_STAGE_C_TIMESTEPS
11
  from previewer.modules import Previewer
12
 
13
+ import user_history
14
 
15
  os.environ['TOKENIZERS_PARALLELISM'] = 'false'
16
 
 
79
  # decoder_timesteps: List[float] = None,
80
  decoder_guidance_scale: float = 0.0,
81
  num_images_per_prompt: int = 2,
82
+ profile: gr.OAuthProfile | None = None,
83
  ) -> PIL.Image.Image:
84
  generator = torch.Generator().manual_seed(seed)
85
 
 
112
  generator=generator,
113
  output_type="pil",
114
  ).images
115
+
116
+ # Save images
117
+ for image in decoder_output:
118
+ user_history.save_image(
119
+ profile=profile,
120
+ image=image,
121
+ label=prompt,
122
+ metadata={
123
+ "negative_prompt": negative_prompt,
124
+ "seed": seed,
125
+ "width": width,
126
+ "height": height,
127
+ "prior_guidance_scale": prior_guidance_scale,
128
+ "decoder_num_inference_steps": decoder_num_inference_steps,
129
+ "decoder_guidance_scale": decoder_guidance_scale,
130
+ "num_images_per_prompt": num_images_per_prompt,
131
+ },
132
+ )
133
+
134
  yield decoder_output
135
 
136
 
 
139
  "An astronaut riding a green horse",
140
  ]
141
 
142
+ with gr.Blocks() as demo:
143
  gr.Markdown(DESCRIPTION)
144
  gr.DuplicateButton(
145
  value="Duplicate Space for private use",
 
233
  cache_examples=CACHE_EXAMPLES,
234
  )
235
 
 
 
236
  inputs = [
237
  prompt,
238
  negative_prompt,
 
258
  inputs=inputs,
259
  outputs=result,
260
  api_name="run",
 
 
261
  )
262
  negative_prompt.submit(
263
  fn=randomize_seed_fn,
 
270
  inputs=inputs,
271
  outputs=result,
272
  api_name=False,
 
 
273
  )
274
  run_button.click(
275
  fn=randomize_seed_fn,
 
282
  inputs=inputs,
283
  outputs=result,
284
  api_name=False,
 
 
285
  )
286
 
287
+ with gr.Blocks(css="style.css") as demo_with_history:
288
+ with gr.Tab("App"):
289
+ demo.render()
290
+ with gr.Tab("Past generations"):
291
+ user_history.render()
292
+
293
  if __name__ == "__main__":
294
+ demo_with_history.queue(max_size=20).launch()