radames commited on
Commit
b7758dc
1 Parent(s): 3ceb728

no need for xformers on Spaces Zero

Browse files
Files changed (1) hide show
  1. app.py +4 -1
app.py CHANGED
@@ -6,8 +6,10 @@ import numpy as np
6
  import torch
7
  from diffusers import LCMScheduler, PixArtAlphaPipeline, Transformer2DModel
8
  from peft import PeftModel
 
9
 
10
  device = "cuda" if torch.cuda.is_available() else "cpu"
 
11
 
12
  transformer = Transformer2DModel.from_pretrained(
13
  "PixArt-alpha/PixArt-XL-2-1024-MS",
@@ -24,7 +26,8 @@ if torch.cuda.is_available():
24
  transformer=transformer,
25
  torch_dtype=torch.float16,
26
  )
27
- pipe.enable_xformers_memory_efficient_attention()
 
28
  pipe = pipe.to(device)
29
  else:
30
  pipe = PixArtAlphaPipeline.from_pretrained(
 
6
  import torch
7
  from diffusers import LCMScheduler, PixArtAlphaPipeline, Transformer2DModel
8
  from peft import PeftModel
9
+ import os
10
 
11
  device = "cuda" if torch.cuda.is_available() else "cpu"
12
+ IS_SPACE = os.environ.get("SPACE_ID", None) is not None
13
 
14
  transformer = Transformer2DModel.from_pretrained(
15
  "PixArt-alpha/PixArt-XL-2-1024-MS",
 
26
  transformer=transformer,
27
  torch_dtype=torch.float16,
28
  )
29
+ if not IS_SPACE:
30
+ pipe.enable_xformers_memory_efficient_attention()
31
  pipe = pipe.to(device)
32
  else:
33
  pipe = PixArtAlphaPipeline.from_pretrained(