Update app.py
Browse files
app.py
CHANGED
@@ -7,7 +7,7 @@ from diffusers import DiffusionPipeline
|
|
7 |
import torch
|
8 |
|
9 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
10 |
-
model_repo_id = "
|
11 |
|
12 |
if torch.cuda.is_available():
|
13 |
torch_dtype = torch.float16
|
@@ -15,6 +15,7 @@ else:
|
|
15 |
torch_dtype = torch.float32
|
16 |
|
17 |
pipe = DiffusionPipeline.from_pretrained(model_repo_id, torch_dtype=torch_dtype)
|
|
|
18 |
pipe = pipe.to(device)
|
19 |
|
20 |
MAX_SEED = np.iinfo(np.int32).max
|
@@ -40,6 +41,7 @@ def infer(
|
|
40 |
generator = torch.Generator().manual_seed(seed)
|
41 |
|
42 |
pipe = DiffusionPipeline.from_pretrained(model_id, torch_dtype=torch_dtype)
|
|
|
43 |
pipe = pipe.to(device)
|
44 |
|
45 |
image = pipe(
|
|
|
7 |
import torch
|
8 |
|
9 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
10 |
+
model_repo_id = "stable-diffusion-v1-5/stable-diffusion-v1-5" # Replace to the model you would like to use
|
11 |
|
12 |
if torch.cuda.is_available():
|
13 |
torch_dtype = torch.float16
|
|
|
15 |
torch_dtype = torch.float32
|
16 |
|
17 |
pipe = DiffusionPipeline.from_pretrained(model_repo_id, torch_dtype=torch_dtype)
|
18 |
+
pipe.unet.load_attn_procs("juliensimon/stable-diffusion-v1-5-pokemon-lora")
|
19 |
pipe = pipe.to(device)
|
20 |
|
21 |
MAX_SEED = np.iinfo(np.int32).max
|
|
|
41 |
generator = torch.Generator().manual_seed(seed)
|
42 |
|
43 |
pipe = DiffusionPipeline.from_pretrained(model_id, torch_dtype=torch_dtype)
|
44 |
+
if (model_repo_id=="stable-diffusion-v1-5/stable-diffusion-v1-5"): pipe.unet.load_attn_procs("juliensimon/stable-diffusion-v1-5-pokemon-lora")
|
45 |
pipe = pipe.to(device)
|
46 |
|
47 |
image = pipe(
|