import subprocess

def ollama_run(model: str, prompt: str) -> str:
    """
    使用 Ollama CLI 在 Windows 上调用模型（兼容老版本）。
    """
    try:
        cmd = [
            "ollama", "run", model,
            "--prompt", prompt
        ]

        result = subprocess.run(cmd, capture_output=True, text=True, check=True)
        return result.stdout.strip()

    except subprocess.CalledProcessError as e:
        print("CLI 调用失败:", e)
        print("stderr:", e.stderr)
        return ""

if __name__ == "__main__":
    model_name = "deepseek-r1:8b"
    prompt_text = "请总结一下LangChain的作用"

    response = ollama_run(model_name, prompt_text)
    print("模型输出:\n", response)
