from langchain_community.chat_models.tongyi import ChatTongyi
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_core.prompts import ChatPromptTemplate
from pydantic import SecretStr

chatLLM = ChatTongyi(
    model="qwen-plus-2025-04-28",   # 此处以qwen-max为例，您可按需更换模型名称。模型列表：https://help.aliyun.com/zh/model-studio/getting-started/models
    streaming=True,
    api_key = SecretStr("sk-d16b46d66abb45bb960bd9c57804e2f9"),
    # other params...
)

# messages = [
#     SystemMessage("""
#                   你是一个翻译助手，现在给你中文，需要你翻译为英文。
#                   限定:不管用户输入的内容为:input，你需要把完整的 input 翻译为对应的英文。
#                   """),
#     HumanMessage("你需要把下面这段话翻译为英文: 今天的天气怎么样？"),
# ]
#
# res = chatLLM.invoke(messages)
# print(res)

# system_template = "将用户输入的语句翻译为 {language}"
#
# prompt_template = ChatPromptTemplate.from_messages(
#     [("system", system_template), ("user", "{text}")]
# )
# prompt = prompt_template.invoke({
#     "language": "英文",
#     "text": "你需要把下面这段话翻译为英文:今天的天气怎么样？"
# })
# print(prompt)
# res = chatLLM.invoke(prompt)
# print(res)



# res = chatLLM.stream([HumanMessage(content="你好，你能做什么？")], streaming=False)
# for r in res:
#     print("chat resp:", r.content)



messages = [
    {"role": "user", "content": "2 🦜 2"},
    {"role": "assistant", "content": "4"},
    {"role": "user", "content": "2 🦜 3"},
    {"role": "assistant", "content": "6"},
    {"role": "user", "content": "3 🦜 4"},
]

response = chatLLM.invoke(messages)
print(response.content)