lllyasviel commited on
Commit
edb2ece
·
1 Parent(s): a22acc2
Files changed (2) hide show
  1. modules/core.py +2 -2
  2. modules/default_pipeline.py +1 -1
modules/core.py CHANGED
@@ -83,7 +83,7 @@ def close_all_preview():
83
 
84
 
85
  @torch.no_grad()
86
- def ksampler(model, positive, negative, latent, seed=None, steps=30, cfg=9.0, sampler_name='euler_ancestral',
87
  scheduler='normal', denoise=1.0, disable_noise=False, start_step=None, last_step=None,
88
  force_full_denoise=False):
89
  seed = seed if isinstance(seed, int) else random.randint(1, 2 ** 64)
@@ -147,7 +147,7 @@ def ksampler(model, positive, negative, latent, seed=None, steps=30, cfg=9.0, sa
147
 
148
  @torch.no_grad()
149
  def ksampler_with_refiner(model, positive, negative, refiner, refiner_positive, refiner_negative, latent,
150
- seed=None, steps=30, refiner_switch_step=20, cfg=9.0, sampler_name='euler_ancestral',
151
  scheduler='normal', denoise=1.0, disable_noise=False, start_step=None, last_step=None,
152
  force_full_denoise=False):
153
  seed = seed if isinstance(seed, int) else random.randint(1, 2 ** 64)
 
83
 
84
 
85
  @torch.no_grad()
86
+ def ksampler(model, positive, negative, latent, seed=None, steps=30, cfg=7.0, sampler_name='euler_ancestral',
87
  scheduler='normal', denoise=1.0, disable_noise=False, start_step=None, last_step=None,
88
  force_full_denoise=False):
89
  seed = seed if isinstance(seed, int) else random.randint(1, 2 ** 64)
 
147
 
148
  @torch.no_grad()
149
  def ksampler_with_refiner(model, positive, negative, refiner, refiner_positive, refiner_negative, latent,
150
+ seed=None, steps=30, refiner_switch_step=20, cfg=7.0, sampler_name='euler_ancestral',
151
  scheduler='normal', denoise=1.0, disable_noise=False, start_step=None, last_step=None,
152
  force_full_denoise=False):
153
  seed = seed if isinstance(seed, int) else random.randint(1, 2 ** 64)
modules/default_pipeline.py CHANGED
@@ -14,7 +14,7 @@ del xl_base.vae
14
 
15
 
16
  @torch.no_grad()
17
- def process(positive_prompt, negative_prompt, width=1280, height=960, batch_size=1):
18
  positive_conditions = core.encode_prompt_condition(clip=xl_base.clip, prompt=positive_prompt)
19
  negative_conditions = core.encode_prompt_condition(clip=xl_base.clip, prompt=negative_prompt)
20
 
 
14
 
15
 
16
  @torch.no_grad()
17
+ def process(positive_prompt, negative_prompt, width=1024, height=1024, batch_size=1):
18
  positive_conditions = core.encode_prompt_condition(clip=xl_base.clip, prompt=positive_prompt)
19
  negative_conditions = core.encode_prompt_condition(clip=xl_base.clip, prompt=negative_prompt)
20