from litellm import completion

response = completion(
    model="deepseek-chat",
    messages=[{"role": "user", "content": "解释量子力学"}]
)
print(response.choices[0].message.content)

response = completion(
    model="ollama/deepseek-r1:1.5b",
    api_base="http://localhost:11434",
    messages=[{"content": "求解方程 x:ml-citation{ref="3" data="citationList"} + 3x = 14", "role": "user"}]
)
