from langchain_ollama import ChatOllama
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
model = ChatOllama(base_url="10.12.8.21:11434", model="qwen2.5:14b")
messages = [SystemMessage(content="Translate the following from Chinese into English"),
            HumanMessage(content="明月几时有，把酒问青天！")]
parser = StrOutputParser()
system_template = "Translate the following into {language}:"
prompt_template = ChatPromptTemplate.from_messages(
    [("system", system_template), ("user", "{text}")]
)
# result = prompt_template.invoke({"language": "italian", "text": "hi"})
# print(result.to_messages())
# result = model.invoke(messages)
# result = parser.invoke(result)
# print(result)
chain = prompt_template | model | parser
print(chain.invoke({"language": "English", "text": "现在我们已经构建了一个应用程序，我们需要对其进行服务。"}))
