lllyasviel commited on
Commit
cc3a6ed
·
1 Parent(s): 3151139
Files changed (2) hide show
  1. draft.py +2 -1
  2. modules/default_pipeline.py +1 -1
draft.py CHANGED
@@ -20,7 +20,8 @@ with block:
20
  with gr.Column():
21
  prompt = gr.Textbox(label="Prompt", value='a handsome man in forest')
22
  run_button = gr.Button(label="Run")
23
- result_gallery = gr.Gallery(label='Output', show_label=False, elem_id="gallery", height='auto')
 
24
  run_button.click(fn=generate_clicked, inputs=[prompt], outputs=[result_gallery])
25
 
26
 
 
20
  with gr.Column():
21
  prompt = gr.Textbox(label="Prompt", value='a handsome man in forest')
22
  run_button = gr.Button(label="Run")
23
+ result_gallery = gr.Gallery(label='Output', show_label=False, elem_id="gallery",
24
+ object_fit='contain', height=768)
25
  run_button.click(fn=generate_clicked, inputs=[prompt], outputs=[result_gallery])
26
 
27
 
modules/default_pipeline.py CHANGED
@@ -18,7 +18,7 @@ xl_refiner = core.load_model(xl_refiner_filename)
18
 
19
 
20
  @torch.no_grad()
21
- def process(positive_prompt, negative_prompt, width=1024, height=1024, batch_size=1):
22
  positive_conditions = core.encode_prompt_condition(clip=xl_base.clip, prompt=positive_prompt)
23
  negative_conditions = core.encode_prompt_condition(clip=xl_base.clip, prompt=negative_prompt)
24
 
 
18
 
19
 
20
  @torch.no_grad()
21
+ def process(positive_prompt, negative_prompt, width=1280, height=960, batch_size=1):
22
  positive_conditions = core.encode_prompt_condition(clip=xl_base.clip, prompt=positive_prompt)
23
  negative_conditions = core.encode_prompt_condition(clip=xl_base.clip, prompt=negative_prompt)
24