JoPmt commited on
Commit
ce42a12
1 Parent(s): 2dd0dd9

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -0
app.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from diffusers import AutoPipelineForText2Image, DiffusionPipeline, UniPCMultistepScheduler, EulerAncestralDiscreteScheduler
2
+ import torch
3
+ import gradio as gr
4
+ from PIL import Image
5
+ import os, random
6
+ import PIL.Image
7
+ from transformers import pipeline
8
+ from diffusers.utils import load_image
9
+ from accelerate import Accelerator
10
+
11
+ accelerator = Accelerator()
12
+ apol=[]
13
+ pipe = accelerator.prepare(DiffusionPipeline.from_single_file("https://huggingface.co/lllyasviel/fav_models/fav/DreamShaper_8_pruned.safetensors",torch_dtype=torch.float32, variant=None, use_safetensors=True, safety_checker=None))
14
+ ##pipe.scheduler = accelerator.prepare(EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config))
15
+ pipe.unet.to(memory_format=torch.channels_last)
16
+ pipe = accelerator.prepare(pipe.to("cpu"))
17
+ def plex(prompt,neg_prompt,stips,scaly,nut):
18
+ apol=[]
19
+ if nut == 0:
20
+ nm = random.randint(1, 2147483616)
21
+ while nm % 32 != 0:
22
+ nm = random.randint(1, 2147483616)
23
+ else:
24
+ nm=nut
25
+ generator = torch.Generator(device="cpu").manual_seed(nm)
26
+ image = pipe(prompt=prompt, negative_prompt=neg_prompt, generator=generator, num_inference_steps=stips, guidance_scale=scaly)
27
+ for a, imze in enumerate(image["images"]):
28
+ apol.append(imze)
29
+ return apol
30
+
31
+ iface = gr.Interface(fn=plex,inputs=[gr.Textbox(label="Prompt"), gr.Textbox(label="negative_prompt", value="low quality, bad quality"), gr.Slider(label="num inference steps",minimum=1,step=1,maximum=20,value=15), gr.Slider(label="guidance_scale",minimum=1,step=1,maximum=10,value=7),gr.Slider(label="manual seed (leave 0 for random)",minimum=0,step=32,maximum=2147483616,value=0)],outputs=gr.Gallery(label="Generated Output Image", columns=1), title="Txt2Img_DrmDrp_v1_SD",description="Running on cpu, very slow!")
32
+ iface.queue(max_size=1,api_open=False)
33
+ iface.launch(max_threads=1)