# 导入HuggingFace API Token
import os
os.environ['HUGGINGFACEHUB_API_TOKEN'] = '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';

# 导入必要库
from langchain import PromptTemplate, HuggingFaceHub, LLMChain

# 初始化HF LLM
llm = HuggingFaceHub(
  repo_id = "google/flan-t5-small",
)


# 创建简单的提示词模版
template = """Question: {question}
            Answer: """

# 创建Prompt
prompt = PromptTemplate(template = template, input_varibles=['question'])

# 调用LLM Chain 以后详细讲LLM Chain
llm_chain = LLMChain(
  prompt = prompt,
  llm = llm
)

# 准备问题
question = "Rose is which type of flower?"

# 调用模型并返回结果
print(llm_chain.run(question))









