How use it with LangChain Library
How use it with LangChain Library
what is wrong in this code
from transformers import AutoTokenizer, AutoModelForCausalLM
import transformers
import torch
from langchain import PromptTemplate, LLMChain
from langchain.llms import HuggingFacePipeline
model = "tiiuae/falcon-7b-instruct"
tokenizer = AutoTokenizer.from_pretrained(model)
pipeline = transformers.pipeline(
"text-generation",
model=model,
tokenizer=tokenizer,
torch_dtype=torch.bfloat16,
trust_remote_code=True,
device_map="auto",
)
# template for an instruction with no input
prompt = PromptTemplate(
input_variables=["instruction"],
template="{instruction}"
)
llm = HuggingFacePipeline(pipeline=pipeline)
llm_chain = LLMChain(llm=llm, prompt=prompt)
print(llm_chain.predict(
instruction="Explain to me the difference between nuclear fission and fusion."
).lstrip())
ValueError Traceback (most recent call last)
in <cell line: 10>()
8
9 tokenizer = AutoTokenizer.from_pretrained(model)
---> 10 pipeline = transformers.pipeline(
11 "text-generation",
12 model=model,
1 frames
/usr/local/lib/python3.10/dist-packages/transformers/pipelines/base.py in infer_framework_load_model(model, config, model_classes, task, framework, **model_kwargs)
280 for class_name, trace in all_traceback.items():
281 error += f"while loading with {class_name}, an error is thrown:\n{trace}\n"
--> 282 raise ValueError(
283 f"Could not load model {model} with any of the following classes: {class_tuple}. See the original errors:\n\n{error}\n"
284 )
ValueError: Could not load model tiiuae/falcon-7b-instruct with any of the following classes: (<class 'transformers.models.auto.modeling_auto.AutoModelForCausalLM'>, <class 'transformers.models.auto.modeling_tf_auto.TFAutoModelForCausalLM'>). See the original errors:
while loading with AutoModelForCausalLM, an error is thrown:
Traceback (most recent call last):
File "/usr/local/lib/python3.10/dist-packages/transformers/pipelines/base.py", line 269, in infer_framework_load_model
model = model_class.from_pretrained(model, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py", line 558, in from_pretrained
return model_class.from_pretrained(
File "/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py", line 3165, in from_pretrained
) = cls._load_pretrained_model(
File "/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py", line 3286, in _load_pretrained_model
raise ValueError(
ValueError: The current device_map
had weights offloaded to the disk. Please provide an offload_folder
for them. Alternatively, make sure you have safetensors
installed if the model you are using offers the weights in this format.
while loading with TFAutoModelForCausalLM, an error is thrown:
Traceback (most recent call last):
File "/usr/local/lib/python3.10/dist-packages/transformers/pipelines/base.py", line 269, in infer_framework_load_model
model = model_class.from_pretrained(model, **kwargs)
File "/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py", line 566, in from_pretrained
raise ValueError(
ValueError: Unrecognized configuration class <class 'transformers_modules.tiiuae.falcon-7b-instruct.eb410fb6ffa9028e97adb801f0d6ec46d02f8b07.configuration_RW.RWConfig'> for this kind of AutoModel: TFAutoModelForCausalLM.
Model type should be one of BertConfig, CamembertConfig, CTRLConfig, GPT2Config, GPT2Config, GPTJConfig, OpenAIGPTConfig, OPTConfig, RemBertConfig, RobertaConfig, RobertaPreLayerNormConfig, RoFormerConfig, TransfoXLConfig, XGLMConfig, XLMConfig, XLMRobertaConfig, XLNetConfig.