# from langchain_community.llms import Tongyi
from langchain_community.llms.tongyi import Tongyi
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_core.prompts import PromptTemplate,ChatPromptTemplate

# from langchain_openai.chat_models.base import BaseChatOpenAI
# from langchain.llms import OpenAI
# from getpass import getpass
# DASHSCOPE_API_KEY = getpass()
import os


# os.environ["DASHSCOPE_API_KEY"] = DASHSCOPE_API_KEY
model = Tongyi(
                model="qwen-plus",
                api_key="sk-859a3b01f12b4a069821b77a5096179e",
            )


# ChatPromptTemplates

system_template = "Translate the following from English into {language}"
prompt_template = ChatPromptTemplate.from_messages(
    [("system", system_template), ("user", "{text}")]
)
prompt = prompt_template.invoke({"language": "Italian", "text": "hi!"})

# print(type(prompt))
# print(prompt)
# print(prompt.to_messages())



# string
prompt_template = PromptTemplate.from_template("Tell me a joke about {topic}")

r = prompt_template.invoke({"topic": "cats"})
print('r',r) #  text='Tell me a joke about cats'
print('type r',type(r)) #   <class 'langchain_core.prompt_values.StringPromptValue'>
print(r.to_messages())  # [HumanMessage(content='Tell me a joke about cats', additional_kwargs={}, response_metadata={})]
print('r to string',r.to_string()) # Tell me a joke about cats


# ChatPromptTemplate
template = ChatPromptTemplate([
            ("system", "You are a helpful AI bot. Your name is {name}."),
            ("human", "Hello, how are you doing?"),
            ("ai", "I'm doing well, thanks!"),
            ("human", "{user_input}"),
        ])
r = template.invoke({"name":"Bot", "user_input":"Tell me a joke about cats"})
print(r.to_messages())
'''
[SystemMessage(content='You are a helpful AI bot. Your name is Bot.', additional_kwargs={}, response_metadata={}), 
HumanMessage(content='Hello, how are you doing?', additional_kwargs={}, response_metadata={}), 
AIMessage(content="I'm doing well, thanks!", additional_kwargs={}, response_metadata={}), 
HumanMessage(content='Tell me a joke about cats', additional_kwargs={}, response_metadata={})]

'''

from langchain_core.messages import HumanMessage
from langchain_core.prompts import MessagesPlaceholder


promote_template = ChatPromptTemplate([
    ('system',"你是一个助手"),
    MessagesPlaceholder(variable_name="msgs"),
])
r = promote_template.invoke({"msgs":[HumanMessage(content=" Tell me a joke about cats")]})
print(r.to_messages())
'''
[SystemMessage(content='你是一个助手', additional_kwargs={}, response_metadata={}), 
HumanMessage(content=' Tell me a joke about cats', additional_kwargs={}, response_metadata={})]

'''
# r1 = model.invoke(r)

for chunk in model.stream(r):
    print(chunk,end='|')

