JoPmt commited on
Commit
c0533c2
1 Parent(s): 524c7f6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -7
app.py CHANGED
@@ -5,27 +5,28 @@ import PIL.Image
5
  from diffusers.utils import load_image
6
  import gradio as gr
7
  from PIL import Image
8
- import cv2
9
  import os, random, gc
10
- import numpy as np
11
  from accelerate import Accelerator
12
  accelerator = Accelerator(cpu=True)
13
  pipe = accelerator.prepare(AmusedPipeline.from_pretrained("amused/amused-512", variant=None, torch_dtype=torch.float32, use_safetensors=True))
14
  pipe.vqvae.to(torch.float32)
15
  pipe.to("cpu")
16
  apol=[]
17
- def plex(prompt, guod, fifth, twice):
18
  gc.collect()
19
  apol=[]
20
- nm = random.randint(1, 4836928)
21
- while nm % 32 != 0:
22
- nm = random.randint(1, 4836928)
 
 
 
23
  generator = torch.Generator(device="cpu").manual_seed(nm)
24
  image = pipe(prompt=prompt,guidance_scale=guod,num_inference_steps=twice,num_images_per_prompt=fifth,generator=generator)
25
  for a, imze in enumerate(image["images"]):
26
  apol.append(imze)
27
  return apol
28
 
29
- iface = gr.Interface(fn=plex, inputs=[gr.Textbox(label="prompt",),gr.Slider(label="guidance scale",minimum=1,step=1,maximum=10,value=4),gr.Slider(label="num images", minimum=1, step=1, maximum=4, value=1), gr.Slider(label="num inference steps", minimum=1, step=1, maximum=20, value=12)], outputs=gr.Gallery(label="out", columns=2),description="Running on cpu, very slow! by JoPmt.")
30
  iface.queue(max_size=1,api_open=False)
31
  iface.launch(max_threads=1)
 
5
  from diffusers.utils import load_image
6
  import gradio as gr
7
  from PIL import Image
 
8
  import os, random, gc
 
9
  from accelerate import Accelerator
10
  accelerator = Accelerator(cpu=True)
11
  pipe = accelerator.prepare(AmusedPipeline.from_pretrained("amused/amused-512", variant=None, torch_dtype=torch.float32, use_safetensors=True))
12
  pipe.vqvae.to(torch.float32)
13
  pipe.to("cpu")
14
  apol=[]
15
+ def plex(prompt, guod, fifth, twice, nut):
16
  gc.collect()
17
  apol=[]
18
+ if nut == 0:
19
+ nm = random.randint(1, 2147483616)
20
+ while nm % 32 != 0:
21
+ nm = random.randint(1, 2147483616)
22
+ else:
23
+ nm=nut
24
  generator = torch.Generator(device="cpu").manual_seed(nm)
25
  image = pipe(prompt=prompt,guidance_scale=guod,num_inference_steps=twice,num_images_per_prompt=fifth,generator=generator)
26
  for a, imze in enumerate(image["images"]):
27
  apol.append(imze)
28
  return apol
29
 
30
+ iface = gr.Interface(fn=plex, inputs=[gr.Textbox(label="prompt",),gr.Slider(label="guidance scale",minimum=1,step=1,maximum=10,value=4),gr.Slider(label="num images", minimum=1, step=1, maximum=4, value=1), gr.Slider(label="num inference steps", minimum=1, step=1, maximum=20, value=12), gr.Slider(label="manual seed (leave 0 for random)",minimum=0,step=32,maximum=2147483616,value=0)], outputs=gr.Gallery(label="out", columns=2),description="Running on cpu, very slow! by JoPmt.")
31
  iface.queue(max_size=1,api_open=False)
32
  iface.launch(max_threads=1)