KeyError: 'llava_llama'

#2
by omerfarukaydin - opened

transformers.version >>> 4.36.2
Python 3.10.15


KeyError Traceback (most recent call last)
Cell In[4], line 3
1 # Load model directly
2 from transformers import AutoModelForCausalLM
----> 3 model = AutoModelForCausalLM.from_pretrained("Efficient-Large-Model/VILA-2.7b")

File ~/anaconda3/envs/vila_env/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:526, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
523 if kwargs.get("quantization_config", None) is not None:
524 _ = kwargs.pop("quantization_config")
--> 526 config, kwargs = AutoConfig.from_pretrained(
527 pretrained_model_name_or_path,
528 return_unused_kwargs=True,
529 trust_remote_code=trust_remote_code,
530 code_revision=code_revision,
531 _commit_hash=commit_hash,
532 **hub_kwargs,
533 **kwargs,
534 )
536 # if torch_dtype=auto was passed here, ensure to pass it on
537 if kwargs_orig.get("torch_dtype", None) == "auto":

File ~/anaconda3/envs/vila_env/lib/python3.10/site-packages/transformers/models/auto/configuration_auto.py:1098, in AutoConfig.from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
1096 return config_class.from_pretrained(pretrained_model_name_or_path, **kwargs)
1097 elif "model_type" in config_dict:
...
--> 795 raise KeyError(key)
796 value = self._mapping[key]
797 module_name = model_type_to_module_name(key)

KeyError: 'llava_llama'

Sign up or log in to comment