from langchain_core.callbacks.base import BaseCallbackHandler

class CustomCallbackHandler(BaseCallbackHandler):
    def on_llm_start(self, serialized, prompts, **kwargs):
        print(">> LLM started")
        print(">> prompts:", prompts)

    def on_llm_end(self, response, **kwargs):
        print(">> response:", response)
        print(">> LLM ended")

    def on_chain_start(self, serialized, inputs, **kwargs):
        print(">> Chain started")
        print(">> Inputs:", inputs)

    def on_chain_end(self, outputs, **kwargs):
        print(">> Outputs:", outputs)
        print(">> Chain ended")

from dotenv import load_dotenv, find_dotenv
from langchain_community.chat_models import ChatZhipuAI
from langchain.prompts.prompt import PromptTemplate

_ = load_dotenv(find_dotenv())

chat = ChatZhipuAI(
    model="glm-4-plus",
    temperature=0.9,
)

handlers=[CustomCallbackHandler()]

prompt = PromptTemplate.from_template("请写一句关于{topic}的诗。")

chain = prompt | chat

result = chain.invoke("春天", config={"callbacks": handlers})
print(result.content)
