JoPmt commited on
Commit
0b1b15b
1 Parent(s): f822fb6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -1
app.py CHANGED
@@ -6,10 +6,11 @@ from transformers import pipeline
6
  from PIL import Image
7
 
8
  accelerator = Accelerator()
9
- pipe = accelerator.prepare(KandinskyV22CombinedPipeline.from_pretrained("kandinsky-community/kandinsky-2-2-decoder", torch_dtype=torch.float32))
10
  pipe = pipe.to("cpu")
11
  apol=[]
12
  def plex(prompt,negative_prompt,stips,uno):
 
13
  generator = torch.Generator(device="cpu").manual_seed(random.randint(1, 4876364))
14
  image = pipe(prompt=[prompt]*2, negative_prompt=[negative_prompt]*2,num_inference_steps=stips, prior_guidance_scale=uno, height=512, width=512, generator=generator)
15
  for i, igs in enumerate(image["images"]):
 
6
  from PIL import Image
7
 
8
  accelerator = Accelerator()
9
+ pipe = accelerator.prepare(KandinskyV22CombinedPipeline.from_pretrained("kandinsky-community/kandinsky-2-2-decoder", torch_dtype=torch.float32, use_safetensors=True, safety_checker=False))
10
  pipe = pipe.to("cpu")
11
  apol=[]
12
  def plex(prompt,negative_prompt,stips,uno):
13
+ apol=[]
14
  generator = torch.Generator(device="cpu").manual_seed(random.randint(1, 4876364))
15
  image = pipe(prompt=[prompt]*2, negative_prompt=[negative_prompt]*2,num_inference_steps=stips, prior_guidance_scale=uno, height=512, width=512, generator=generator)
16
  for i, igs in enumerate(image["images"]):