Commit
•
8a2a8c7
1
Parent(s):
47f7759
Update app.py
Browse files
app.py
CHANGED
@@ -8,24 +8,34 @@ import PIL
|
|
8 |
import base64
|
9 |
import io
|
10 |
import torch
|
11 |
-
|
|
|
|
|
|
|
|
|
|
|
12 |
|
13 |
MAX_SEED = np.iinfo(np.int32).max
|
14 |
MAX_IMAGE_SIZE = int(os.getenv('MAX_IMAGE_SIZE', '1024'))
|
15 |
SECRET_TOKEN = os.getenv('SECRET_TOKEN', 'default_secret')
|
16 |
|
17 |
-
|
18 |
-
ADAPTER_ID = "latent-consistency/lcm-lora-ssd-1b"
|
19 |
-
|
20 |
-
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
|
21 |
if torch.cuda.is_available():
|
22 |
-
|
23 |
-
pipe
|
24 |
-
pipe.
|
|
|
25 |
|
26 |
# load and fuse
|
27 |
-
pipe.load_lora_weights(
|
28 |
-
pipe.fuse_lora()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
else:
|
30 |
pipe = None
|
31 |
|
|
|
8 |
import base64
|
9 |
import io
|
10 |
import torch
|
11 |
+
|
12 |
+
# SSD-1B
|
13 |
+
#from diffusers import LCMScheduler, AutoPipelineForText2Image
|
14 |
+
|
15 |
+
# SDXL
|
16 |
+
from diffusers import UNet2DConditionModel, DiffusionPipeline, LCMScheduler
|
17 |
|
18 |
MAX_SEED = np.iinfo(np.int32).max
|
19 |
MAX_IMAGE_SIZE = int(os.getenv('MAX_IMAGE_SIZE', '1024'))
|
20 |
SECRET_TOKEN = os.getenv('SECRET_TOKEN', 'default_secret')
|
21 |
|
22 |
+
#device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
|
|
|
|
|
|
|
23 |
if torch.cuda.is_available():
|
24 |
+
|
25 |
+
#pipe = AutoPipelineForText2Image.from_pretrained("segmind/SSD-1B", torch_dtype=torch.float16, variant="fp16")
|
26 |
+
#pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config)
|
27 |
+
#pipe.to("cuda")
|
28 |
|
29 |
# load and fuse
|
30 |
+
#pipe.load_lora_weights("latent-consistency/lcm-lora-ssd-1b")
|
31 |
+
#pipe.fuse_lora()
|
32 |
+
|
33 |
+
unet = UNet2DConditionModel.from_pretrained("latent-consistency/lcm-sdxl", torch_dtype=torch.float16, variant="fp16")
|
34 |
+
pipe = DiffusionPipeline.from_pretrained("stabilityai/stable-diffusion-xl-base-1.0", unet=unet, torch_dtype=torch.float16, variant="fp16")
|
35 |
+
|
36 |
+
pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config)
|
37 |
+
pipe.to('cuda')
|
38 |
+
|
39 |
else:
|
40 |
pipe = None
|
41 |
|