multimodalart HF staff commited on
Commit
71d6339
1 Parent(s): 4a1f895

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -2
app.py CHANGED
@@ -22,7 +22,7 @@ device="cuda"
22
  pipe = pipe.to(device)
23
 
24
  @spaces.GPU
25
- def run(prompt, negative_prompt=None, guidance_scale=7.0, pag_scale=3.0, pag_layers=["mid"], randomize_seed=True, seed=42, progress=gr.Progress(track_tqdm=True)):
26
  prompt = prompt.strip()
27
  negative_prompt = negative_prompt.strip() if negative_prompt and negative_prompt.strip() else None
28
  print(f"Initial seed for prompt `{prompt}`", seed)
@@ -31,6 +31,10 @@ def run(prompt, negative_prompt=None, guidance_scale=7.0, pag_scale=3.0, pag_lay
31
 
32
  if not prompt and not negative_prompt:
33
  guidance_scale = 0.0
 
 
 
 
34
  print(f"Seed before sending to generator for prompt: `{prompt}`", seed)
35
  generator = torch.Generator(device="cuda").manual_seed(seed)
36
  image_pag = pipe(prompt, negative_prompt=negative_prompt, guidance_scale=guidance_scale, pag_scale=pag_scale, pag_applied_layers=pag_layers, generator=generator, num_inference_steps=25).images[0]
@@ -56,6 +60,7 @@ with gr.Blocks(css=css, theme=theme) as demo:
56
  prompt = gr.Textbox(show_label=False, scale=4, placeholder="Your prompt", info="Leave blank to test unconditional generation")
57
  button = gr.Button("Generate", min_width=120)
58
  output = ImageSlider(label="Left: PAG, Right: No PAG", interactive=False)
 
59
  with gr.Accordion("Advanced Settings", open=False):
60
  guidance_scale = gr.Number(label="CFG Guidance Scale", info="The guidance scale for CFG, ignored if no prompt is entered (unconditional generation)", value=7.0)
61
  negative_prompt = gr.Textbox(label="Negative prompt", info="Is only applied for the CFG part, leave blank for unconditional generation")
@@ -70,7 +75,7 @@ with gr.Blocks(css=css, theme=theme) as demo:
70
  prompt.submit
71
  ],
72
  fn=run,
73
- inputs=[prompt, negative_prompt, guidance_scale, pag_scale, pag_layers, randomize_seed, seed],
74
  outputs=[output, seed],
75
  )
76
  if __name__ == "__main__":
 
22
  pipe = pipe.to(device)
23
 
24
  @spaces.GPU
25
+ def run(prompt, negative_prompt=None, guidance_scale=7.0, pag_scale=3.0, pag_layers=["mid"], randomize_seed=True, seed=42, lora=None, progress=gr.Progress(track_tqdm=True)):
26
  prompt = prompt.strip()
27
  negative_prompt = negative_prompt.strip() if negative_prompt and negative_prompt.strip() else None
28
  print(f"Initial seed for prompt `{prompt}`", seed)
 
31
 
32
  if not prompt and not negative_prompt:
33
  guidance_scale = 0.0
34
+ pipe.unload_lora_weights()
35
+ if lora is not None:
36
+ pipe.load_lora_weights(lora, adapter_name="custom")
37
+ pipe.fuse_lora(lora_scale=0.9)
38
  print(f"Seed before sending to generator for prompt: `{prompt}`", seed)
39
  generator = torch.Generator(device="cuda").manual_seed(seed)
40
  image_pag = pipe(prompt, negative_prompt=negative_prompt, guidance_scale=guidance_scale, pag_scale=pag_scale, pag_applied_layers=pag_layers, generator=generator, num_inference_steps=25).images[0]
 
60
  prompt = gr.Textbox(show_label=False, scale=4, placeholder="Your prompt", info="Leave blank to test unconditional generation")
61
  button = gr.Button("Generate", min_width=120)
62
  output = ImageSlider(label="Left: PAG, Right: No PAG", interactive=False)
63
+ lora = gr.Textbox(label="Custom LoRA path")
64
  with gr.Accordion("Advanced Settings", open=False):
65
  guidance_scale = gr.Number(label="CFG Guidance Scale", info="The guidance scale for CFG, ignored if no prompt is entered (unconditional generation)", value=7.0)
66
  negative_prompt = gr.Textbox(label="Negative prompt", info="Is only applied for the CFG part, leave blank for unconditional generation")
 
75
  prompt.submit
76
  ],
77
  fn=run,
78
+ inputs=[prompt, negative_prompt, guidance_scale, pag_scale, pag_layers, randomize_seed, seed, lora],
79
  outputs=[output, seed],
80
  )
81
  if __name__ == "__main__":