from langchain_core.prompts import ChatPromptTemplate, ChatMessagePromptTemplate
from langchain_openai import ChatOpenAI
from pydantic import SecretStr

from settings import DASHSCOPE_API_KEY

# 1-定义客户端
llm = ChatOpenAI(
    model="qwen-max",
    base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
    api_key=SecretStr(DASHSCOPE_API_KEY),
    streaming=True
)

# 2-定义对话上下文
system_message_template = ChatMessagePromptTemplate.from_template(
    template="你是一位{role}专家，擅长回答{domain}领域的问题",
    role="system"
)

human_message_template = ChatMessagePromptTemplate.from_template(
    template="用户问题：{question}",
    role="user"
)

messages = [
    system_message_template,
    human_message_template,
]

# 3-创建一个ChatPromptTemplate
chat_prompt_template = ChatPromptTemplate.from_messages(messages)

# 4-模板+变量=>提示词
chat_prompt_detail = chat_prompt_template.format_messages(
    role="Python",
    domain="编程",
    question="如何编写一个闹钟程序？")
print("-----chat_prompt_detail:", chat_prompt_detail)

# 5-打印响应结果
response = llm.stream(chat_prompt_detail)
print("-----Hold on, LLM 正在回答！-----")
for chunk in response:
    # 打印不换行
    print(chunk.content, end="")
