from config import HF_ENDPOINT
from llama_index.core import PromptTemplate

template = (
    "We have provided context information below. \n"
    "---------------------\n"
    "{context_str}"
    "\n---------------------\n"
    "Given this information, please answer the question: {query_str}\n"
)
qa_template = PromptTemplate(template)

# you can create text prompt (for completion API)
prompt = qa_template.format( query_str="你好呀")
print(prompt)

# or easily convert to message prompts (for chat API)
messages = qa_template.format_messages(query_str="你好呀")
print(messages)