llm_test / llm_example
savedata's picture
Create llm_example
066ab16
raw
history blame contribute delete
678 Bytes
from langchain import LLMChain
from langchain.prompts import PromptTemplate
from langchain.llms import HuggingFaceHub
# HuggingFace Repository ID
repo_id = 'mistralai/Mistral-7B-v0.1'
# μ§ˆμ˜λ‚΄μš©
question = "Who is Son Heung Min?"
# ν…œν”Œλ¦Ώ
template = """Question: {question}
Answer: """
# ν”„λ‘¬ν”„νŠΈ ν…œν”Œλ¦Ώ 생성
prompt = PromptTemplate(template=template, input_variables=["question"])
# HuggingFaceHub 객체 생성
llm = HuggingFaceHub(
repo_id=repo_id,
model_kwargs={"temperature": 0.2,
"max_length": 128}
)
# LLM Chain 객체 생성
llm_chain = LLMChain(prompt=prompt, llm=llm)
# μ‹€ν–‰
print(llm_chain.run(question=question))