multimodalart HF staff commited on
Commit
29b4150
1 Parent(s): 89ce1c2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -15,7 +15,7 @@ if not torch.cuda.is_available():
15
  DESCRIPTION += "\n<p>Running on CPU 🥶 This demo may not work on CPU.</p>"
16
 
17
  MAX_SEED = np.iinfo(np.int32).max
18
- CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES", "0") == "1"
19
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "1536"))
20
  USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE", "0") == "1"
21
  ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD", "0") == "1"
 
15
  DESCRIPTION += "\n<p>Running on CPU 🥶 This demo may not work on CPU.</p>"
16
 
17
  MAX_SEED = np.iinfo(np.int32).max
18
+ CACHE_EXAMPLES = torch.cuda.is_available() and os.getenv("CACHE_EXAMPLES", "1") == "1"
19
  MAX_IMAGE_SIZE = int(os.getenv("MAX_IMAGE_SIZE", "1536"))
20
  USE_TORCH_COMPILE = os.getenv("USE_TORCH_COMPILE", "0") == "1"
21
  ENABLE_CPU_OFFLOAD = os.getenv("ENABLE_CPU_OFFLOAD", "0") == "1"