from langchain_ollama import OllamaLLM
from langchain_core.prompts import FewShotPromptTemplate, PromptTemplate

# 创建 OllamaLLM 实例
llm = OllamaLLM(model="deepseek-r1:7b", base_url="http://localhost:11434")

example_template = "输入: {input}\n输出: {output}"

examples = [{"input": "将'Hello'翻译成中文", "output": "你好"},
            {"input": "How are you?'翻译成中文", "output": "你好吗？"},
            {"input": "Thank you", "output": "谢谢!"}]

# 创建 FewShotPromptTemplate 实例
few_shot_prompt_template = FewShotPromptTemplate(
    examples=examples,
    example_prompt=PromptTemplate.from_template(example_template),
    prefix="请将以下英文翻译成中文：",
    suffix="输入: {text}\n输出:",
    input_variables=["text"],
)

print(few_shot_prompt_template)

# prompt = few_shot_prompt_template.format(text="Thank you")
prompt = few_shot_prompt_template.format(text="How are you?")

print(prompt)

#
# 输入: 将'Hello'翻译成中文
# 输出: 你好
#
# 输入: How are you?'翻译成中文
# 输出: 你好吗？
#
# 输入: Thank you
# 输出:

resp = llm.stream(prompt)  # 返回字符串

# 打印返回结果
for chunk in resp:
    print(chunk, end="")
