from langchain_community.llms import Ollama
from langchain_core.output_parsers import CommaSeparatedListOutputParser
from langchain_core.prompts import PromptTemplate

llm = Ollama(model="llama3", base_url="http://localhost:11434")


# 定义提示模板
parser  = CommaSeparatedListOutputParser()
prompt = PromptTemplate(template="Give five {subject},{format_instructions}", input_variables=["subject"],
                        partial_variables = {"format_instructions":parser.get_format_instructions()})

print(prompt)
chain = prompt | llm | parser
response = chain.invoke({"subject":"ice cream flavors"})
print(response)

print("========流式==============")
#流式
for s in chain.stream({"subject": "ice cream flavors"}):
    print(s)




