import getpass
import os
from langchain_openai.chat_models.base import BaseChatOpenAI
from langchain_core.prompts import ChatPromptTemplate
os.environ["LANGSMITH_TRACING"] = "true"
os.environ["LANGSMITH_API_KEY"] = "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"
"""
模型引入 输出 以及提示词模板建立输出
"""

llm = BaseChatOpenAI(
    model = "deepseek-chat",
    openai_api_key = "sk-34eb5584cb434b79b8d5cd7a41a7ee73",
    openai_api_base = "https://api.deepseek.com/v1",
    max_tokens = 1024
)

# 普通输出
# response = llm.invoke([{"role": "user", "content": "Hello"}])

# 流式输出
# for token in llm.stream([{"role": "user", "content": "Hello"}]):
#     print(token.content, end="|")

# 提示词模板
# system_template = "Translate the following from English into {language}"
#
# prompt_template = ChatPromptTemplate.from_messages(
#     [("system", system_template), ("user", "{text}")]
# )
#
# prompt = prompt_template.invoke({"language": "Chinese", "text": "hello"})
#
# response = llm.invoke(prompt)
# print(response.content)


