# from langchain_community.llms import Tongyi
from langchain_community.llms.tongyi import Tongyi
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_core.prompts import PromptTemplate

# from langchain_openai.chat_models.base import BaseChatOpenAI
# from langchain.llms import OpenAI
# from getpass import getpass
# DASHSCOPE_API_KEY = getpass()
import os


# os.environ["DASHSCOPE_API_KEY"] = DASHSCOPE_API_KEY
model = Tongyi(
                model="qwen-plus",
                api_key="sk-859a3b01f12b4a069821b77a5096179e",
            )

# # res = llm.invoke("人生的意义是啥？")
# for chunk in llm.stream('人生的意义是啥'):
#     print(chunk, end="")
messages = [
    SystemMessage("Translate the following from English into Italian"),
    HumanMessage("hi!"),
]

# r = model.invoke(messages)
# print(r)
# print(messages)


for token in model.stream(messages):
    print(token, end="|")
    
    
    

    
    

    
    