multimodalart HF staff commited on
Commit
9ac0d67
1 Parent(s): 5d5545c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -22
app.py CHANGED
@@ -10,7 +10,7 @@ from diffusers import StableCascadeDecoderPipeline, StableCascadePriorPipeline
10
  from diffusers.pipelines.wuerstchen import DEFAULT_STAGE_C_TIMESTEPS
11
  import spaces
12
  from previewer.modules import Previewer
13
- #import user_history
14
 
15
  os.environ['TOKENIZERS_PARALLELISM'] = 'false'
16
 
@@ -118,23 +118,23 @@ def generate(
118
  output_type="pil",
119
  ).images
120
 
121
- # Save images
122
- #for image in decoder_output:
123
- # user_history.save_image(
124
- # profile=profile,
125
- # image=image,
126
- # label=prompt,
127
- # metadata={
128
- # "negative_prompt": negative_prompt,
129
- # "seed": seed,
130
- # "width": width,
131
- # "height": height,
132
- # "prior_guidance_scale": prior_guidance_scale,
133
- # "decoder_num_inference_steps": decoder_num_inference_steps,
134
- # "decoder_guidance_scale": decoder_guidance_scale,
135
- # "num_images_per_prompt": num_images_per_prompt,
136
- # },
137
- # )
138
 
139
  yield decoder_output[0]
140
 
@@ -267,10 +267,10 @@ with gr.Blocks() as demo:
267
  )
268
 
269
  with gr.Blocks(css="style.css") as demo_with_history:
270
- #with gr.Tab("App"):
271
- demo.render()
272
- #with gr.Tab("Past generations"):
273
- # user_history.render()
274
 
275
  if __name__ == "__main__":
276
  demo_with_history.queue(max_size=20).launch()
 
10
  from diffusers.pipelines.wuerstchen import DEFAULT_STAGE_C_TIMESTEPS
11
  import spaces
12
  from previewer.modules import Previewer
13
+ import user_history
14
 
15
  os.environ['TOKENIZERS_PARALLELISM'] = 'false'
16
 
 
118
  output_type="pil",
119
  ).images
120
 
121
+ Save images
122
+ for image in decoder_output:
123
+ user_history.save_image(
124
+ profile=profile,
125
+ image=image,
126
+ label=prompt,
127
+ metadata={
128
+ "negative_prompt": negative_prompt,
129
+ "seed": seed,
130
+ "width": width,
131
+ "height": height,
132
+ "prior_guidance_scale": prior_guidance_scale,
133
+ "decoder_num_inference_steps": decoder_num_inference_steps,
134
+ "decoder_guidance_scale": decoder_guidance_scale,
135
+ "num_images_per_prompt": num_images_per_prompt,
136
+ },
137
+ )
138
 
139
  yield decoder_output[0]
140
 
 
267
  )
268
 
269
  with gr.Blocks(css="style.css") as demo_with_history:
270
+ with gr.Tab("App"):
271
+ demo.render()
272
+ with gr.Tab("Past generations"):
273
+ user_history.render()
274
 
275
  if __name__ == "__main__":
276
  demo_with_history.queue(max_size=20).launch()