from llm.model_service import ModelService

# 用 openai 代理
vec = ModelService.call(
    provider_name="ollama",
    model_name="bge-m3:latest",
    mode="embed",
    text="医学大数据平台"
)
print("Embedding 向量长度:", len(vec))

# 用本地 Ollama
reply2 = ModelService.call(
    provider_name="ollama",
    model_name="qwen2.5:3b",
    mode="chat",
    prompt="解释一下什么是嵌入模型"
)
print("Ollama 回复:", reply2)