KeyError: 'stablelm' not found. Did I miss to pull any changes?

#11
by Arsive - opened

KeyError Traceback (most recent call last)
Cell In[8], line 3
1 from transformers import AutoModelForCausalLM, AutoModel
2 tokenizer = AutoTokenizer.from_pretrained("stabilityai/stablelm-3b-4e1t")
----> 3 model = AutoModel.from_pretrained("stabilityai/stablelm-3b-4e1t")
4 model.cuda()

File /opt/conda/envs/evalFAQ/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py:527, in _BaseAutoModelClass.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
524 if kwargs.get("quantization_config", None) is not None:
525 _ = kwargs.pop("quantization_config")
--> 527 config, kwargs = AutoConfig.from_pretrained(
528 pretrained_model_name_or_path,
529 return_unused_kwargs=True,
530 trust_remote_code=trust_remote_code,
531 code_revision=code_revision,
532 _commit_hash=commit_hash,
533 **hub_kwargs,
534 **kwargs,
535 )
537 # if torch_dtype=auto was passed here, ensure to pass it on
538 if kwargs_orig.get("torch_dtype", None) == "auto":

File /opt/conda/envs/evalFAQ/lib/python3.10/site-packages/transformers/models/auto/configuration_auto.py:1039, in AutoConfig.from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
1037 return config_class.from_pretrained(pretrained_model_name_or_path, **kwargs)
1038 elif "model_type" in config_dict:
-> 1039 config_class = CONFIG_MAPPING[config_dict["model_type"]]
1040 return config_class.from_dict(config_dict, **unused_kwargs)
1041 else:
1042 # Fallback: use pattern matching on the string.
1043 # We go from longer names to shorter names to catch roberta before bert (for instance)

File /opt/conda/envs/evalFAQ/lib/python3.10/site-packages/transformers/models/auto/configuration_auto.py:734, in _LazyConfigMapping.getitem(self, key)
732 return self._extra_content[key]
733 if key not in self._mapping:
--> 734 raise KeyError(key)
735 value = self._mapping[key]
736 module_name = model_type_to_module_name(key)

KeyError: 'stablelm'

Arsive changed discussion status to closed

Thanks, after I upgraded to latest transformers=4.38.2 the issue was resolved

Sign up or log in to comment