from langchain_core.prompts import PromptTemplate, FewShotPromptTemplate
from langchain_openai import ChatOpenAI
from pydantic import SecretStr

from settings import DASHSCOPE_API_KEY

# 1-定义客户端
llm = ChatOpenAI(
    model="qwen-max",
    base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
    api_key=SecretStr(DASHSCOPE_API_KEY),
    streaming=True
)

# 1. 单个示例的格式模板
example_prompt = PromptTemplate(
    input_variables=["country", "capital"],
    template="国家：{country} -> 首都：{capital}"
)

# 2. 少样本示例池
examples = [
    {"country": "法国", "capital": "巴黎"},
    {"country": "日本", "capital": "东京"},
    {"country": "巴西", "capital": "巴西利亚"},
]

# 3. 构建 FewShotPromptTemplate
few_shot_prompt_template = FewShotPromptTemplate(
    examples=examples,
    example_prompt=example_prompt,
    prefix="请仿照示例，直接输出答案：",
    suffix="国家：{country} -> 首都：",
    input_variables=["country"],
    example_separator="\n"
)

# 4. 调用
chat_prompt_detail = few_shot_prompt_template.format(country="加拿大")
print(chat_prompt_detail)
print("-----chat_prompt_detail:", chat_prompt_detail)

# 5-打印响应结果
print("-----Hold on, LLM 正在回答！-----")
response = llm.stream(chat_prompt_detail)
for chunk in response:
    # 打印不换行
    print(chunk.content, end="")
