from langchain_core.messages import SystemMessage
from langchain_core.prompts import (
    ChatPromptTemplate,
    FewShotChatMessagePromptTemplate,
)

from common.llm_builder import llm

# 2. 示例池
examples = [
    {"country": "法国", "capital": "巴黎"},
    {"country": "日本", "capital": "东京"},
    {"country": "巴西", "capital": "巴西利亚"},
]

# 3. 单条示例的模板
example_prompt = ChatPromptTemplate.from_messages(
    [("human", "国家：{country} -> 首都：{capital}")]
)

# 4. 构建 FewShotChatMessagePromptTemplate
few_shot_prompt_template = FewShotChatMessagePromptTemplate(
    examples=examples,
    example_prompt=example_prompt,
    input_variables=["country"],  # 仅用于校验占位符
)

# 5. 最终 prompt 模板
chat_prompt_message = ChatPromptTemplate.from_messages(
    [
        SystemMessage(content="请仿照示例，直接输出答案："),
        few_shot_prompt_template,
        ("human", "{country}"),  # ✅ 这里只是占位，实际由 few_shot_prompt_template 处理
    ]
)

# 6. 构造消息并调用
chat_prompt_detail = chat_prompt_message.invoke({
    "country": "中国"
})
print("-----chat_prompt_detail:", chat_prompt_detail)

# 5-打印响应结果
print("-----Hold on, LLM 正在回答！-----")
for chunk in llm.stream(chat_prompt_detail):
    print(chunk.content, end="")
