karthikeya1212 commited on
Commit
e423c4b
·
verified ·
1 Parent(s): 836d682

Update core/image_generator.py

Browse files
Files changed (1) hide show
  1. core/image_generator.py +17 -6
core/image_generator.py CHANGED
@@ -247,6 +247,7 @@
247
 
248
  # print(f"[ImageGen] Generated {len(images)} image(s) successfully.")
249
  # return images
 
250
  import os
251
  from pathlib import Path
252
  import gc
@@ -341,12 +342,22 @@ def unload_pipelines():
341
 
342
 
343
  def safe_load_pipeline(pipeline_cls, model_path: Path):
344
- """Load model from disk safely."""
345
- return pipeline_cls.from_single_file(
346
- str(model_path),
347
- torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
348
- local_files_only=True,
349
- )
 
 
 
 
 
 
 
 
 
 
350
 
351
 
352
  def load_pipeline():
 
247
 
248
  # print(f"[ImageGen] Generated {len(images)} image(s) successfully.")
249
  # return images
250
+
251
  import os
252
  from pathlib import Path
253
  import gc
 
342
 
343
 
344
  def safe_load_pipeline(pipeline_cls, model_path: Path):
345
+ try:
346
+ # Try local-only load first
347
+ return pipeline_cls.from_single_file(
348
+ str(model_path),
349
+ torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
350
+ local_files_only=True,
351
+ cache_dir=str(HF_CACHE_DIR) # <-- ensure all caches go to /tmp/hf_cache
352
+ )
353
+ except Exception as e:
354
+ print(f"[WARN] Local-only load failed ({e}). Retrying with network access...")
355
+ return pipeline_cls.from_single_file(
356
+ str(model_path),
357
+ torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32,
358
+ local_files_only=False, # allow network download
359
+ cache_dir=str(HF_CACHE_DIR) # <-- must specify this
360
+ )
361
 
362
 
363
  def load_pipeline():