# without chain
# from langchain.prompts import PromptTemplate
# template = "{flower}的花语是什么？"
# propt = PromptTemplate.from_template(template)

# from langchain_ollama import ChatOllama
# llm = ChatOllama(
#     model="deepseek-r1:8b",
#     # model="gemma3:1b",
#     base_url="http://localhost:11434",  # Ollama 服务地址
#     temperature=0.7,  # 创造性程度
#     num_predict=512,  # 最大生成长度
#     options={'stop': ['<think>', '</think>']} # 遇到这些标记就停止
# )

# output = llm.invoke(propt.format(flower="玫瑰"))
# print(output.content) 

# with chain
from langchain_ollama import ChatOllama
from langchain.prompts import PromptTemplate

template = "{flower}的花语是什么？"
prompt = PromptTemplate.from_template(template)

llm = ChatOllama(
    model="deepseek-r1:8b",
    base_url="http://localhost:11434",  # Ollama 服务地址
    temperature=0.7,  # 创造性程度
    num_predict=512,  # 最大生成长度
    options={'stop': ['</think>', '</think>']} # 遇到这些标记就停止
)

chain = prompt | llm # langchain 的 链式调用语法
# 等价于传统的 LLMChain
# from langchain.chains import LLMChain
# chain = LLMChain(llm=llm, prompt=prompt)

output = chain.invoke({"flower": "玫瑰"})