

from sys import prefix
from langchain_openai import ChatOpenAI
from langchain_core.prompts import ChatPromptTemplate,ChatMessagePromptTemplate,PromptTemplate,FewShotPromptTemplate
from pydantic import SecretStr

# 初始化LLM模型
llm: ChatOpenAI= ChatOpenAI(
    model='qwen3-max',
    base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
    api_key=SecretStr("sk-42f84356d53a44ecbdbaebbeb5a12738"),
    streaming=True,

)

# 创建prompt模板
# prompt_template: PromptTemplate = PromptTemplate.from_template("今天{something}真不错")



# 模板+变量=》提示词
# prompt = prompt_template.format(something="天气")
# chat_prompt = chat_prompt_template.format_messages(
#     role="编程", 
#     domain="前端", 
#     question="你擅长什么"
#     )

#少量样本
examples = [
    {"input": "将hello翻译成中文","output": "你好"},
    {"input": "将goodbye翻译成中文","output": "再见"}
]
example_template = "输入:{input}\n输出:{output}"
few_shot_prompt_template: FewShotPromptTemplate = FewShotPromptTemplate(
    examples=examples,
    example_prompt = PromptTemplate.from_template(example_template),
    prefix="请将一下英文翻译成中文",
    suffix="输入:{text}\n输出:",
    input_variables=["text"]
)
print(few_shot_prompt_template)
few_shot_prompt = few_shot_prompt_template.format(text="Thank you")
print(few_shot_prompt)

# print("生成的prompt: {chat_prompt}")



# # 调用API并处理响应
try:
    # resp = llm.stream(few_shot_prompt)
    chain = few_shot_prompt_template | llm
    resp = chain.stream({"text": "Thank you"})
    for chunk in resp:
        if chunk.content:
            print(chunk.content, end="")
except Exception as e:
    print(f"API调用失败: {e}")