from openai import OpenAI
import os

# 配置 OpenAI 客户端连接本地 Ollama
client = OpenAI(
    base_url="http://localhost:11434/v1",
    api_key="ollama"  # 本地不需要真实 API 密钥
)

# 对话循环
while True:
    user_input = input("\nYou: ")
    if user_input.lower() in ["exit", "quit"]:
        break

    response = client.chat.completions.create(
        model="deepseek-r1:1.5b",
        messages=[
            {"role": "user", "content": user_input}
        ],
        temperature=0.7,
        stream=True  # 启用流式输出
    )

    # 打印流式响应
    print("\nDeepSeek: ", end="")
    for chunk in response:
        if chunk.choices[0].delta.content:
            print(chunk.choices[0].delta.content, end="", flush=True)

print("\n对话已结束")
