from langgraph.graph.state import CompiledStateGraph
from langchain_core.messages import AIMessage
from jinja2 import Template
import plotly
import logging

logger = logging.Logger(__name__)

TOOL_TEMPLATE = Template("""
Called Tool Name: 
```text
{{ tool_name }}
```
With Args: 
```json
{{ tool_args }}
```
""")

def get_dict_key(dct, keys):
    res = dct
    for key in keys:
        if key in res:
            res = res[key]
        else:
            return None
    return res

class GraphRunner():
    def __init__(self, graph_runnable:CompiledStateGraph, config):
        self.graph_runnable = graph_runnable
        self.config = config
        self.messages = []

    def add_message(self, message):
        if not isinstance(message, dict):
            raise ValueError("Message must be a dict")

        if len(self.messages) == 0:
            self.messages.append(message)
            return
        
        if message["type"] == "chunk" and message["type"] == self.messages[-1]["type"]:
            self.messages[-1]["content"] += message["content"]
        else:
            self.messages.append(message)

    async def astream(self, st_messages):
        try:
            async for message in self._astream(st_messages, self.config):
                self.add_message(message)
                index = len(self.messages) - 1
                yield {"index": index, **self.messages[index]}
        except GeneratorExit as e1:
            raise
        except Exception as e:
            import traceback
            error_msg = f"❌ Error occurred during query processing: {str(e)}\n{traceback.format_exc()}"
            # 假设st已导入
            # st.error(error_msg)
            msg = {"type": "error", "content": error_msg}
            self.add_message(msg)
            index = len(self.messages) - 1
            yield {"index": index, **self.messages[index]}

    async def _astream(self, st_messages, config):
        """
        Run the graph runner
        """
        # Stream events from the graph_runnable asynchronously
        async for event in self.graph_runnable.astream_events({"messages": st_messages}, config):
            kind = event["event"]  # Determine the type of event received

            if kind == "on_chat_model_stream":
                if  event["metadata"]["langgraph_node"] == "agent":
                    # The event corresponding to a stream of new content (tokens or chunks of text)
                    addition = event["data"]["chunk"].content  # Extract the new content chunk
                    yield {"type": "chunk", "content": addition}

            elif kind == "on_tool_start":
                # The event signals that a tool is about to be called
                tool_call_str = TOOL_TEMPLATE.render(tool_name=event['name'], tool_args=event['data'].get('input'))
                yield {"type": "tool_call_start", "content": tool_call_str}

            elif kind == "on_tool_end":
                # The event signals the completion of a tool's execution
                if 'output' in event['data'].keys():
                    # We assume that `on_tool_end` comes after `on_tool_start`, meaning output_placeholder exists
                    event_output = event['data'].get('output')
                    if hasattr(event_output, "content"):
                        content = event_output.content  # Display the tool's output
                    else:
                        content = event_output  # Display the tool's output
                    yield {"type": "tool_call_end", "content": content}

                    if hasattr(event_output, "artifact") and event_output.artifact is not None:
                        artifact = event_output.artifact
                        fig = plotly.io.from_json(artifact)
                        yield {"type": "artifact", "content": fig}

            elif kind == "on_chat_model_end":
                message = get_dict_key(event, ['data', 'output'])
                if isinstance(message, AIMessage):
                    meta_info = get_dict_key(message.model_dump(), ['usage_metadata'])
                    if meta_info:
                        yield {"type": "usage_metadata", "content": meta_info}
            else:
                logger.debug(kind)
                