'''
* This is the projet for Brtc LlmOps Platform
* @Author Leon-liao <liaosiliang@alltman.com>
* @Description //TODO 
* @File: 17_callback_for_llm.py
* @Time: 2025/10/23
* @All Rights Reserve By Brtc
'''
from typing import Any
from uuid import UUID
import dotenv
from langchain_core.callbacks import BaseCallbackHandler, StdOutCallbackHandler
from langchain_core.output_parsers import StrOutputParser
from langchain_core.outputs import GenerationChunk, ChatGenerationChunk
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnablePassthrough
from langchain_openai import ChatOpenAI

dotenv.load_dotenv()
class LLMOpsCallBackHandler(BaseCallbackHandler):
    """自定义LLMop回调处理器"""
    def on_llm_start(
        self,
        serialized: dict[str, Any],
        prompts: list[str],
        *,
        run_id: UUID,
        parent_run_id: UUID | None = None,
        tags: list[str] | None = None,
        metadata: dict[str, Any] | None = None,
        **kwargs: Any,
    ) -> Any:
        print("on_llm_start----> ", prompts)


    def on_llm_new_token(
        self,
        token: str,
        *,
        chunk: GenerationChunk | ChatGenerationChunk | None = None,
        run_id: UUID,
        parent_run_id: UUID | None = None,
        **kwargs: Any,
    ) -> Any:
        print("开始统计算钱了")


def retriever_from_database(query:str)->str:
    print("正在检索数据库，检索的问题是：", query)
    return "我叫吴彦祖， 今年18岁， 身价1000个亿， 钱花不完 好烦！！"

# 构建提示词
prompt = ChatPromptTemplate.from_template("""
请根据用户的问题回答, 可以参考对应的上下文进行生成
<context>
{context}
用户的提问是:{query}
""")
#大模型
llm = ChatOpenAI(model="gpt-4o-mini")
#解析器
parser = StrOutputParser()
#构建链
chain = RunnablePassthrough.assign(context=lambda x:retriever_from_database(x["query"]))|prompt|llm|parser
#调用链
content = chain.stream({"query":"你好我叫什么？请给我一些人生建议！"},
                       config={"callbacks":[StdOutCallbackHandler(), LLMOpsCallBackHandler()]})

for one in content:
    print(one)