JoPmt's picture
Create app.py
ce42a12 verified
raw
history blame
No virus
1.87 kB
from diffusers import AutoPipelineForText2Image, DiffusionPipeline, UniPCMultistepScheduler, EulerAncestralDiscreteScheduler
import torch
import gradio as gr
from PIL import Image
import os, random
import PIL.Image
from transformers import pipeline
from diffusers.utils import load_image
from accelerate import Accelerator
accelerator = Accelerator()
apol=[]
pipe = accelerator.prepare(DiffusionPipeline.from_single_file("https://huggingface.co/lllyasviel/fav_models/fav/DreamShaper_8_pruned.safetensors",torch_dtype=torch.float32, variant=None, use_safetensors=True, safety_checker=None))
##pipe.scheduler = accelerator.prepare(EulerAncestralDiscreteScheduler.from_config(pipe.scheduler.config))
pipe.unet.to(memory_format=torch.channels_last)
pipe = accelerator.prepare(pipe.to("cpu"))
def plex(prompt,neg_prompt,stips,scaly,nut):
apol=[]
if nut == 0:
nm = random.randint(1, 2147483616)
while nm % 32 != 0:
nm = random.randint(1, 2147483616)
else:
nm=nut
generator = torch.Generator(device="cpu").manual_seed(nm)
image = pipe(prompt=prompt, negative_prompt=neg_prompt, generator=generator, num_inference_steps=stips, guidance_scale=scaly)
for a, imze in enumerate(image["images"]):
apol.append(imze)
return apol
iface = gr.Interface(fn=plex,inputs=[gr.Textbox(label="Prompt"), gr.Textbox(label="negative_prompt", value="low quality, bad quality"), gr.Slider(label="num inference steps",minimum=1,step=1,maximum=20,value=15), gr.Slider(label="guidance_scale",minimum=1,step=1,maximum=10,value=7),gr.Slider(label="manual seed (leave 0 for random)",minimum=0,step=32,maximum=2147483616,value=0)],outputs=gr.Gallery(label="Generated Output Image", columns=1), title="Txt2Img_DrmDrp_v1_SD",description="Running on cpu, very slow!")
iface.queue(max_size=1,api_open=False)
iface.launch(max_threads=1)