Update app.py
Browse files
app.py
CHANGED
|
@@ -10,8 +10,8 @@ import PIL
|
|
| 10 |
base = "stabilityai/stable-diffusion-xl-base-1.0"
|
| 11 |
repo = "tianweiy/DMD2"
|
| 12 |
checkpoints = {
|
| 13 |
-
"1-Step" : ["
|
| 14 |
-
"4-Step" : ["
|
| 15 |
}
|
| 16 |
loaded = None
|
| 17 |
|
|
@@ -22,8 +22,8 @@ CSS = """
|
|
| 22 |
"""
|
| 23 |
|
| 24 |
# Ensure model and scheduler are initialized in GPU-enabled function
|
| 25 |
-
unet = UNet2DConditionModel.from_config(base, subfolder="unet").to("cuda", torch.float16)
|
| 26 |
if torch.cuda.is_available():
|
|
|
|
| 27 |
pipe = DiffusionPipeline.from_pretrained(base, unet=unet, torch_dtype=torch.float16, variant="fp16").to("cuda")
|
| 28 |
|
| 29 |
|
|
@@ -38,7 +38,7 @@ def generate_image(prompt, ckpt):
|
|
| 38 |
|
| 39 |
if loaded != num_inference_steps:
|
| 40 |
unet.load_state_dict(torch.load(hf_hub_download(repo, checkpoint), map_location="cuda"))
|
| 41 |
-
pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config
|
| 42 |
loaded = num_inference_steps
|
| 43 |
|
| 44 |
results = pipe(prompt, num_inference_steps=num_inference_steps, guidance_scale=0)
|
|
|
|
| 10 |
base = "stabilityai/stable-diffusion-xl-base-1.0"
|
| 11 |
repo = "tianweiy/DMD2"
|
| 12 |
checkpoints = {
|
| 13 |
+
"1-Step" : ["dmd2_sdxl_1step_unet_fp16.bin", 1],
|
| 14 |
+
"4-Step" : ["dmd2_sdxl_4step_unet_fp16.bin", 4],
|
| 15 |
}
|
| 16 |
loaded = None
|
| 17 |
|
|
|
|
| 22 |
"""
|
| 23 |
|
| 24 |
# Ensure model and scheduler are initialized in GPU-enabled function
|
|
|
|
| 25 |
if torch.cuda.is_available():
|
| 26 |
+
unet = UNet2DConditionModel.from_config(base, subfolder="unet").to("cuda", torch.float16)
|
| 27 |
pipe = DiffusionPipeline.from_pretrained(base, unet=unet, torch_dtype=torch.float16, variant="fp16").to("cuda")
|
| 28 |
|
| 29 |
|
|
|
|
| 38 |
|
| 39 |
if loaded != num_inference_steps:
|
| 40 |
unet.load_state_dict(torch.load(hf_hub_download(repo, checkpoint), map_location="cuda"))
|
| 41 |
+
pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config)
|
| 42 |
loaded = num_inference_steps
|
| 43 |
|
| 44 |
results = pipe(prompt, num_inference_steps=num_inference_steps, guidance_scale=0)
|