getting error
from transformers import AutoModelForCausalLM, AutoTokenizer
model_path = "D:\Program\Falcon-7b"
tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained(model_path, trust_remote_code=True, from_tf=True, revision="main")
template = "Question: {question}"
prompt = PromptTemplate(template=template, input_variables=["question"])
llm_chain = LLMChain(prompt=prompt, llm=model)
question = "What is the meaning of life?"
output = llm_chain.run(question)
print(output)
β d:\Program\zcode\test4.py:8 in β
β β
β 5 β
β 6 model_path = "D:\Program\Falcon-7b" β
β 7 tokenizer = AutoTokenizer.from_pretrained(model_path, trust_remote_code=True) β
β β± 8 model = AutoModelForCausalLM.from_pretrained(model_path, trust_remote_code=True, from_tf β
β 9 β
β 10 template = "Question: {question}" β
β 11 prompt = PromptTemplate(template=template, input_variables=["question"]) β
β β
β D:\Program\venv\Lib\site-packages\transformers\models\auto\auto_factory.py:466 in β
β from_pretrained β
β β
β 463 β β β β pretrained_model_name_or_path, module_file + ".py", class_name, **hub_kw β
β 464 β β β ) β
β 465 β β β model_class.register_for_auto_class(cls.name) β
β β± 466 β β β return model_class.from_pretrained( β
β 467 β β β β pretrained_model_name_or_path, *model_args, config=config, **hub_kwargs, β
β 468 β β β ) β
β 469 β β elif type(config) in cls._model_mapping.keys(): β
β β
β D:\Program\venv\Lib\site-packages\transformers\modeling_utils.py:2753 in from_pretrained β
β β
β 2750 β β β β del device_map_without_lm_head β
β 2751 β β β
β 2752 β β if from_tf: β
β β± 2753 β β β if resolved_archive_file.endswith(".index"): β
β 2754 β β β β # Load from a TensorFlow 1.X checkpoint - provided by original authors β
β 2755 β β β β model = cls.load_tf_weights(model, config, resolved_archive_file[:-6]) β
β 2756 β β β else: β
β°βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ―
AttributeError: 'list' object has no attribute 'endswith'
Are you attempting to open a tensor flow version of the model? The error leads me to believe you are not. Try removing from_tf=True
.
setting the from_tf=True, then encountered a new error as follows:
Traceback (most recent call last):
File "/home/others/model_test.py", line 35, in
func_inference_with_huggingface(model_dir)
File "/home/others/model_test.py", line 11, in func_inference_with_huggingface
model = AutoModelForCausalLM.from_pretrained(model_dir, from_tf=True)
File "/home/anaconda3/envs/env_py39/lib/python3.9/site-packages/transformers/models/auto/auto_factory.py", line 564, in from_pretrained
return model_class.from_pretrained(
File "/home/anaconda3/envs/env_py39/lib/python3.9/site-packages/transformers/modeling_utils.py", line 3796, in from_pretrained
if resolved_archive_file.endswith(".index"):
AttributeError: 'list' object has no attribute 'endswith'