from langchain_openai import ChatOpenAI
from pydantic import SecretStr
from langchain_core.prompts import PromptTemplate
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.prompts import ChatMessagePromptTemplate
from langchain_core.prompts import FewShotPromptTemplate

# 创建模型实例（远程调用）
llm = ChatOpenAI(
    model_name="qwen3-max",
    temperature=0,
    openai_api_key=SecretStr("sk-f4fb03bbc29b4f0995b60dec52645af0"),
    openai_api_base="https://dashscope.aliyuncs.com/compatible-mode/v1",
    streaming=True,
)

# 创建提示词模板
prompt_template = PromptTemplate.from_template("今天的{something}真不错！")
# 模板+变量=>最终提示词
prompt = prompt_template.format(something="天气")

# -------------------------------------------------------------------------------
# 创建聊天提示词模板
chat_template_prompt = ChatPromptTemplate.from_messages([
    ("system", "你是一位{role}专家、擅长回答{domain}领域的问题。"),
    ("user", "用户问题：{question}。")
])
# 模板+变量=>最终提示词
prompt = chat_template_prompt.format(role="医疗", domain="健康", question="如何预防感冒？")

# -------------------------------------------------------------------------------
# 创建聊天消息提示词模板
system_message_template = ChatMessagePromptTemplate.from_template(
    template="你是一位{role}专家、擅长回答{domain}领域的问题。",
    role="system"
)
# 创建用户消息提示词模板
user_message_template = ChatMessagePromptTemplate.from_template(
    template="用户问题：{question}。",
    role="user"
)
# 组合成聊天提示词模板
chat_template_prompt = ChatPromptTemplate.from_messages([
    system_message_template,
    user_message_template
])
# 模板+变量=>最终提示词
prompt = chat_template_prompt.format_messages(role="医疗", domain="健康", question="如何预防感冒？")

# =================================================================================
# 调用模型进行推理
#resp = llm.stream(prompt)

#for chunk in resp:
#    print(chunk.text, end="", flush=True)

# 组合成链式调用
chain = chat_template_prompt | llm
resp = chain.stream({"role": "医疗", "domain": "健康", "question": "如何预防感冒？"})

for chunk in resp:
    print(chunk.content, end="", flush=True)

# 创建少量样本提示词模板 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
example_template = "输入：{input}\n输出：{output}"

examples = [
    {"input": "将'hello'翻译成中文", "output": "你好"},
    {"input": "将'goodbye'翻译成中文", "output": "再见"},
]

few_shot_prompt_template = FewShotPromptTemplate(
    examples=examples,
    example_prompt=PromptTemplate.from_template(example_template),
    prefix="下面是一些英文到中文的翻译示例：",
    suffix="请根据上述示例，翻译以下内容：\n输入：{input}\n输出：",
    input_variables=["input"],
)

# 组合成提示词
prompt = few_shot_prompt_template.format(input="thank you")

# 调用模型进行推理
#resp = llm.stream(prompt)

#for chunk in resp:
#    print(chunk.text, end="", flush=True)