Spaces:
Sleeping
Sleeping
from langchain.llms import HuggingFaceHub | |
from langchain.prompts import PromptTemplate | |
from langchain.chains import LLMChain | |
question = "Who won the FIFA World Cup in the year 1994? " | |
template = """Question: {question} | |
Answer: Let's think step by step.""" | |
prompt = PromptTemplate(template=template, input_variables=["question"]) | |
repo_id = "google/flan-t5-xxl" # See https://huggingface.co/models?pipeline_tag=text-generation&sort=downloads for some other options | |
#https://huggingface.co/meta-llama | |
#repo_id = "meta-llama/Llama-2-7b-chat-hf" | |
#repo_id = "" | |
#repo_id = "meta-llama/Llama-2-7b-chat" | |
#repo_id = "tiiuae/falcon-7b" | |
llm = HuggingFaceHub( | |
repo_id=repo_id, model_kwargs={"temperature": 0.5, "max_length": 64} | |
) | |
llm_chain = LLMChain(prompt=prompt, llm=llm) | |
print(llm_chain.run(question)) | |