


 
from pydantic import BaseModel,FilePath,Field
from langchain_core.utils.pydantic import (
    PydanticBaseModel,
    TBaseModel,
)

from langchain_core.beta.runnables.context import Context
from langchain_core.runnables.passthrough import RunnablePassthrough
from langchain_core.prompts.prompt import PromptTemplate
from langchain_core.output_parsers.string import StrOutputParser
from langchain.schema import HumanMessage, SystemMessage
 
 
from langchain.chat_models import ChatOpenAI
from langchain.output_parsers import PandasDataFrameOutputParser, OutputFixingParser
from langchain_core.prompts import PromptTemplate
import pandas as pd

from langchain_core.example_selectors import (
    LengthBasedExampleSelector,
    MaxMarginalRelevanceExampleSelector,
    SemanticSimilarityExampleSelector,
)
from langchain_core.prompts import (
    AIMessagePromptTemplate,
    BaseChatPromptTemplate,
    BasePromptTemplate,
    ChatMessagePromptTemplate,
    ChatPromptTemplate,
    FewShotChatMessagePromptTemplate,
    FewShotPromptTemplate,
    FewShotPromptWithTemplates,
    HumanMessagePromptTemplate,
    MessagesPlaceholder,
    PipelinePromptTemplate,
    PromptTemplate,
    StringPromptTemplate,
    SystemMessagePromptTemplate,
    load_prompt,
)

from langchain._api import create_importer
from langchain.prompts.prompt import Prompt


from langchain.tools import StructuredTool
from pydantic import BaseModel


from langchain_core.callbacks import AsyncCallbackHandler
from langchain_core.prompts import ChatPromptTemplate

import asyncio

class CustomAsyncHandler(AsyncCallbackHandler):
    async def on_chat_model_start(self, serialized, messages, **kwargs):
        print(f"异步模型启动，收到消息数: {len(messages)}")
        
    async def on_llm_new_token(self, token: str, **kwargs):
        print(f"流式Token: {token}", end="|")
        
    async def on_llm_end(self, response, **kwargs):
        print(f"\n异步处理完成，响应长度: {len(response.generations[0])}")

async def main():
    handler = CustomAsyncHandler()
    
    llm = ChatOpenAI(
    model="deepseek-chat",
    temperature=0,
    openai_api_key="sk-605e60a1301040759a821b6b677556fb",
    base_url="https://api.deepseek.com/v1")

    prompt = ChatPromptTemplate.from_template("解释{concept}的概念")
    chain = prompt | llm
    
    await chain.ainvoke({"concept": "量子计算"},config={"callbacks": [handler]})

if __name__ == "__main__":
    asyncio.run(main())
