import asyncio
import concurrent.futures
import json
from queue import Queue

from langchain_core.messages import AIMessageChunk
from langgraph.pregel.io import AddableUpdatesDict
from pydantic import BaseModel

from owl_ai.domain.graph.graph_compile import ChatFlowCompile
from owl_ai.mapper.ai_config_mapper import AgentConfigMapper

thread_pool_executor = concurrent.futures.ThreadPoolExecutor(max_workers=10)


class GraphInvokeDTO(BaseModel):
    # 要执行的图Id
    graph_id: int
    # 执行参数,
    invoke_params: dict


class GraphInvokeService:

    @classmethod
    def stream(cls, dto: GraphInvokeDTO, app):
        """
        调用图
        Args:
            dto:
            app:
        Returns:
            结果
        """
        graph_config = AgentConfigMapper.select_by_id(dto.graph_id)
        if graph_config is None:
            raise Exception("graph config not found")

        # 编译图
        graph_app = ChatFlowCompile.compile(graph_config)
        # 构造输入参数
        input_params = {
            "app": app,
            "node_params": {
                "__start__": {
                    "messages": dto.invoke_params.get("messages"),
                }
            }
        }

        return graph_app.stream(input=input_params, stream_mode=['messages'])

    @classmethod
    def streaming(cls, chunks):
        for chunk_type, chunk in chunks:
            try:
                if chunk_type == "updates":
                    if isinstance(chunk, AddableUpdatesDict):
                        continue
                elif chunk_type == "messages":
                    chunk_data = chunk[0]
                    metadata = chunk[1]

                    if isinstance(chunk_data, AIMessageChunk):
                        json_data = {
                            "nodeId": metadata.get('langgraph_node'),
                            "messageId": 1,
                            "type": "messageChunk",
                            "content": chunk_data.content
                        }

                        yield f"data: {json.dumps(json_data, ensure_ascii=False)}\n\n"
                    else:
                        print(chunk_data)
            except Exception as e:
                print(chunk)

    @classmethod
    def streaming_events(cls, events):

        queue = Queue(maxsize=20000)

        async def stream():
            async for event1 in events:
                print(event1)
                queue.put(f'data: {json.dumps(event1, ensure_ascii=False)}\n\n')
            print("async stream end")
            queue.put(None)

        loop = asyncio.new_event_loop()
        asyncio.set_event_loop(loop)

        with concurrent.futures.ThreadPoolExecutor() as executor:
            executor.submit(loop.run_until_complete, stream())
            while True:
                event = queue.get()
                if event is None:
                    break
                yield event
            loop.close()
