ChenWu98 commited on
Commit
cecb3d6
β€’
1 Parent(s): 08770df

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -251,7 +251,8 @@ def inference(source_prompt, target_prompt, source_guidance_scale=1, guidance_sc
251
  )
252
  ptp_utils.register_attention_control(pipe, controller)
253
  elif cross_attention_control == "None":
254
- pass
 
255
  else:
256
  raise ValueError("Unknown cross_attention_control: {}".format(cross_attention_control))
257
 
@@ -287,7 +288,7 @@ with gr.Blocks(css=css) as demo:
287
  <p>
288
  Demo for CycleDiffusion with Stable Diffusion. <br>
289
  CycleDiffusion (<a href="https://arxiv.org/abs/2210.05559">πŸ“„ Paper link</a> | <a href="https://huggingface.co/docs/diffusers/main/en/api/pipelines/cycle_diffusion">🧨 Pipeline doc</a>) is an image-to-image translation method that supports stochastic samplers for diffusion models. <br>
290
- It also supports Cross Attention Control (<a href="https://arxiv.org/abs/2208.01626">πŸ“„ Paper link</a>), which is a technique to transfer the attention map from the source prompt to the target prompt. <br>
291
  </p>
292
  <p>
293
  <b>How to use:</b> <br>
 
251
  )
252
  ptp_utils.register_attention_control(pipe, controller)
253
  elif cross_attention_control == "None":
254
+ controller = EmptyControl()
255
+ ptp_utils.register_attention_control(pipe, controller)
256
  else:
257
  raise ValueError("Unknown cross_attention_control: {}".format(cross_attention_control))
258
 
 
288
  <p>
289
  Demo for CycleDiffusion with Stable Diffusion. <br>
290
  CycleDiffusion (<a href="https://arxiv.org/abs/2210.05559">πŸ“„ Paper link</a> | <a href="https://huggingface.co/docs/diffusers/main/en/api/pipelines/cycle_diffusion">🧨 Pipeline doc</a>) is an image-to-image translation method that supports stochastic samplers for diffusion models. <br>
291
+ We also support the combination of CycleDiffusion and Cross Attention Control (CAC | <a href="https://arxiv.org/abs/2208.01626">πŸ“„ Paper link</a>). CAC is a technique to transfer the attention map from the source prompt to the target prompt. <br>
292
  </p>
293
  <p>
294
  <b>How to use:</b> <br>