from langchain_community.llms import Ollama
from langchain_core.prompts import ChatPromptTemplate, SystemMessagePromptTemplate, \
    HumanMessagePromptTemplate

llm = Ollama(model="llama3", base_url="http://localhost:11434")

def chat_with_ollama(story: str):
    #propmt_template = ChatPromptTemplate.from_messages([
    propmt_template = ChatPromptTemplate.from_messages([
        SystemMessagePromptTemplate.from_template("你是{product}产品的客服助手，你的名字叫{name}，你用比较严肃，简练的简体中文进行回答。"),
        HumanMessagePromptTemplate.from_template("{query}")
    ])
    #prompt = propmt_template.format(product="Ollama", name="ollama_小助手", query=story)
    #prompt = propmt_template.format_prompt(product="Ollama", name="ollama_小助手", query=story)
    prompt = propmt_template.format_messages(product="Ollama", name="ollama_小助手", query=story)
    response = llm.invoke(prompt)
    return response
if __name__ == '__main__':
    print(chat_with_ollama("你叫什么名字？"))