# from langchain_ollama import OllamaLLM
#
# # llm = OllamaLLM(model="qwen:7b-chat-q4_0")
# llm = OllamaLLM(model="qwen:7b-chat-q4_0",base_url="http://localhost:64342")
# r = llm.invoke("马云是谁")
# print(r)

from langchain_ollama import ChatOllama

llm = ChatOllama(
    model="qwen:7b-chat-q4_0",
    base_url="http://localhost:64342",
    api_key="123456" #required but ignored 需要但忽略错误
)

r = llm.invoke("马云是谁")
print(r.content)