# langchain_ollama调用大模型LLM

# llm.invokde(str)返回对话结果

from langchain_ollama import OllamaLLM

question = input("请输入问题：") or "20字介绍北京"

llm = OllamaLLM(
    model = "qwen2.5:3b",
    temperature=0.01,
    top_p=0.7,
    max_tokens=200
    )

answer  = llm.invoke("20字介绍北京")

print("问:",question,"?\n答:", answer)