Hanzo03 commited on
Commit
08d3193
·
verified ·
1 Parent(s): 37ee371

Update utils/models.py

Browse files
Files changed (1) hide show
  1. utils/models.py +12 -0
utils/models.py CHANGED
@@ -21,6 +21,18 @@ except Exception:
21
  pass
22
  collection = chroma_client.create_collection(name=config.collection_name)
23
 
 
 
 
 
 
 
 
 
 
 
 
 
24
  # 3. Load VLM
25
  logger.info(f"Loading VLM ({config.vlm_model_id})...")
26
  vlm_model = AutoModelForCausalLM.from_pretrained(
 
21
  pass
22
  collection = chroma_client.create_collection(name=config.collection_name)
23
 
24
+ # --- THE MONKEY PATCH ---
25
+ # Intercept the breaking change in Transformers v4.45+ for custom models
26
+ _orig_getattr = torch.nn.Module.__getattr__
27
+
28
+ def _patched_getattr(self, name):
29
+ if name == "all_tied_weights_keys":
30
+ return {}
31
+ return _orig_getattr(self, name)
32
+
33
+ torch.nn.Module.__getattr__ = _patched_getattr
34
+ # ------------------------
35
+
36
  # 3. Load VLM
37
  logger.info(f"Loading VLM ({config.vlm_model_id})...")
38
  vlm_model = AutoModelForCausalLM.from_pretrained(