secret / image_generator.py
nekofura's picture
Update image_generator.py
c8415e7
raw history blame
No virus
1.62 kB
from diffusers import StableDiffusionXLPipeline, AutoencoderKL, UNet2DConditionModel, LCMScheduler, DPMSolverMultistepScheduler
import torch
loaded_pipe = None
loaded_pipe_id = None
def load_model(pipe_id, unet_model_id):
global loaded_pipe, loaded_pipe_id
if loaded_pipe_id != pipe_id:
unet = UNet2DConditionModel.from_pretrained(
unet_model_id,
torch_dtype=torch.float16,
variant="fp16",
)
vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=torch.float16)
loaded_pipe = StableDiffusionXLPipeline.from_pretrained(
pipe_id, unet=unet, vae=vae, torch_dtype=torch.float16, variant="fp16",
).to("cuda")
loaded_pipe_id = pipe_id
return loaded_pipe
def set_scheduler(pipe, scheduler_type):
if scheduler_type == "LCM":
pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config)
elif scheduler_type == "DPM++ 2M Karras":
pipe.scheduler = DPMSolverMultistepScheduler(use_karras_sigmas="yes")
return pipe
def generate_image(prompt, num_inference_steps, seed, guidance_scale, negative_prompt, pipe_id="KBlueLeaf/kohaku-xl-beta7.1", unet_model_id="latent-consistency/lcm-sdxl", scheduler_type="LCM"):
global loaded_pipe
pipe = load_model(pipe_id, unet_model_id)
pipe = set_scheduler(pipe, scheduler_type)
generator = torch.manual_seed(seed)
image = pipe(prompt=prompt, negative_prompt=negative_prompt, num_inference_steps=num_inference_steps, generator=generator, guidance_scale=guidance_scale).images[0]
return image