from langchain_core.messages import SystemMessage, HumanMessage
from config.load_key import load_key
from langchain_deepseek import ChatDeepSeek
from langchain_core.prompts import ChatPromptTemplate

# llm = ChatDeepSeek(
#     model="deepseek-chat",
#     api_key=load_key("deepseek_api_key")
# )
# 1. 流式输出
# stream = llm.stream([
#     HumanMessage("你是谁？"),
# ])
# for chunk in stream:
#     print(chunk.text(), end="")

# 2. 提示词模板
# print("")
# prompt_template = ChatPromptTemplate.from_messages([
#     ("system", "Translate the following from English into {language}"),
#     ("user", "{text}"),
# ])
# prompt = prompt_template.invoke({"language": "Chinese", "text": "Hello, how are you?"})
# response = llm.invoke(prompt)
# print(response.content)

# 3. 定制参数
"""
temperature(温度)[0,2]: 控制生成文本的随机性。值越高，生成的文本越随机，越有可能包含新颖的内容。值越低，生成的文本越确定，越有可能遵循训练数据中的模式。
"""
from langchain_openai import ChatOpenAI
llm = ChatOpenAI(
    model="Qwen/Qwen2.5-7B-Instruct",
    api_key=load_key("siliconflow_api_key"),
    base_url=load_key("siliconflow_base_url"),
    temperature=1.2
)
for i in range(5):
    response = llm.invoke([HumanMessage("给一款智能手机起一个炫酷的名字？返回字数4个汉字以内")])
    print(str(i) + ">>" + response.content)
