# -*- coding: utf-8 -*-
from langchain_core.messages import ChatMessage, HumanMessage
from langchain_ollama import ChatOllama

llm = ChatOllama(
    model="qwen3:0.6b",  # 模型名
    base_url="http://localhost:11434",  # 本地部署的ollama
    temperature=0.0
    # other params...
)

messages = [
    ChatMessage(role="control", content="thinking"),
    HumanMessage("What is 3^3?"),
]

response = llm.invoke(messages)
print(response.content)