Update app.py
Browse files
app.py
CHANGED
@@ -8,7 +8,6 @@ from diffusers import DiffusionPipeline
|
|
8 |
import torch
|
9 |
|
10 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
11 |
-
model_repo_id = "stable-diffusion-v1-5/stable-diffusion-v1-5" # Replace to the model you would like to use
|
12 |
if torch.cuda.is_available():
|
13 |
torch_dtype = torch.float16
|
14 |
else:
|
@@ -45,10 +44,11 @@ def infer(
|
|
45 |
generator = torch.Generator().manual_seed(seed)
|
46 |
|
47 |
pipe = None
|
48 |
-
if (
|
49 |
pipe=DiffusionPipeline.from_pretrained("stable-diffusion-v1-5/stable-diffusion-v1-5", torch_dtype=torch_dtype,cross_attention_kwargs={"scale": 0.5})
|
50 |
pipe.unet = PeftModel.from_pretrained(pipe.unet,"um235/cartoon_cat_stickers")
|
51 |
else:
|
|
|
52 |
pipe=DiffusionPipeline.from_pretrained(model_id, torch_dtype=torch_dtype)
|
53 |
pipe = pipe.to(device)
|
54 |
|
|
|
8 |
import torch
|
9 |
|
10 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
|
11 |
if torch.cuda.is_available():
|
12 |
torch_dtype = torch.float16
|
13 |
else:
|
|
|
44 |
generator = torch.Generator().manual_seed(seed)
|
45 |
|
46 |
pipe = None
|
47 |
+
if (model_id=="stable-diffusion-v1-5/stable-diffusion-v1-5 with lora"):
|
48 |
pipe=DiffusionPipeline.from_pretrained("stable-diffusion-v1-5/stable-diffusion-v1-5", torch_dtype=torch_dtype,cross_attention_kwargs={"scale": 0.5})
|
49 |
pipe.unet = PeftModel.from_pretrained(pipe.unet,"um235/cartoon_cat_stickers")
|
50 |
else:
|
51 |
+
print("stable-diffusion-v1-5/stable-diffusion-v1-5 with lora")
|
52 |
pipe=DiffusionPipeline.from_pretrained(model_id, torch_dtype=torch_dtype)
|
53 |
pipe = pipe.to(device)
|
54 |
|