from tools.chat_tools  import get_chat_tools
from models.json_response  import JsonData
from typing  import List,AsyncIterator,Dict,AsyncGenerator
from langchain.agents import create_openai_functions_agent,AgentExecutor
from langchain.prompts import ChatPromptTemplate,MessagesPlaceholder 
from langchain.tools import Tool
from core.llm import get_default_llm
from models.json_response import JsonData
import asyncio
from service.chat_service import ChatService
import logging

logger = logging.getLogger(__name__)

 
def create_chat_agent(tools:List[Tool]):
    system_prompt = """你是一个教学助手，你可以：
    1.进行日常对话和问答
    2.使用搜索工具获取最新消息
    3.记住与用户的对话历史
    4.主要根据资料回答
    请保持专业回答，友好且准确，如果用户问题有最新信息，请使用搜索工具，请结合资料回答。"""
     
    prompt = ChatPromptTemplate.from_messages([
        ("system",system_prompt),
        ("system","以下是之前的对话摘要：{summary}"),
        ("system","以下是用户问题的相关资料：{information}"),
        ("human", "{input}"),
        MessagesPlaceholder(variable_name="agent_scratchpad")
    ])
     
    llm = get_default_llm()
    agent = create_openai_functions_agent(
        llm=llm,
        tools=tools,
        prompt=prompt,
        )
    agent_executor = AgentExecutor.from_agent_and_tools(
        agent=agent,
        tools=tools,
        verbose=True,
        max_iterations=3,
        handle_parsing_errors=True,
        )
    return agent_executor

async def chat_with_agent(
    agent_executor: AgentExecutor,
    chat_service: ChatService,
    account_id:str,
    input_text:str,
    course_id:str):
    try:
        summary = await chat_service.generate_summary(account_id)
        information=chat_service.get_information(input_text,course_id)
        async for chunk in agent_executor.astream({"input": input_text,"summary":summary,"information":information}):
            if  "output" in chunk:
                response = chunk["output"]
                
                chat_service.save_chat_message(account_id, input_text, response)
                
                for token in response:
                    yield token
                    await asyncio.sleep(0.01)
    except Exception as e:
        logger.error(e)
        
async def generate_stream_response(
                                   chat_service:ChatService,
                                   account_id:str,
                                    message:str,
                                   course_id:str)->AsyncIterator:
    agent = create_chat_agent(get_chat_tools())
    current_chunk = ""
    async for token in chat_with_agent(agent,chat_service,account_id,message,course_id):
        current_chunk += token
        if token in ["\n", "\r\n","。",".","，"] or len(current_chunk) > 5:
            response = JsonData.stream_data(data=current_chunk)
            yield f"data: {response.model_dump_json()}]\n\n"
            current_chunk = ""
            await asyncio.sleep(0.1)
            
    if current_chunk:
        response = JsonData.stream_data(data=current_chunk)
        yield f"data: {response.model_dump_json()}]\n\n"

