import json
import time
from typing import List, Optional

from langchain_core.messages import SystemMessage
from langchain_core.runnables import RunnableConfig
from langchain_openai.chat_models.base import BaseChatOpenAI
from langgraph.types import StreamWriter
from loguru import logger
from redis import Redis

from ..graph.base import BaseGraph
from ..models.graph import GraphRequest, GraphState, GraphNodeParam, AgentStepLog
from ..parser import JSONObjOutputParser
from ..utils.str import print_message
from ..config.agents import AgentActions, SUPERVISOR_ROUTE_RESULT


class SupervisorGraph(BaseGraph):
    llm_supervisor: BaseChatOpenAI = None

    def __init__(self,
                 redis: Redis = None,
                 nodes: List[BaseGraph] = [],
                 agent_template: str = "agent_supervisor",
                 **kwargs):
        super().__init__(graph_name="agent_supervisor_",
                         agent_template=agent_template,
                         redis=redis,
                         **kwargs)
        self.nodes = nodes if nodes else []

    async def start(self, state: GraphState, writer: StreamWriter, *, config: Optional[RunnableConfig] = None):
        """
        Default implementation of the start method that can be overridden by subclasses if needed.
        """
        request = GraphRequest.from_runnable_config(config)

        if request.agent_name:
            return await self._route_to_specific_agent(request.agent_name, state, writer)

        return await self._route_using_llm(state, writer, request)

    async def _route_to_specific_agent(self, agent_name: str, state: GraphState, writer: StreamWriter):
        """Route to a specific agent based on the provided agent name"""
        for node in self.nodes:
            if isinstance(node, BaseGraph) and node.graph_name == agent_name:
                goto = GraphNodeParam(type=f"{agent_name}start")

                writer(AgentStepLog.build_tool_action(
                    meta={"finish": True, "usages": state.get_usages()},
                    action=AgentActions.AGENT_ROUTE.value,
                    output=json.dumps({"name": agent_name, "params": {}}, ensure_ascii=False)
                ))

                logger.info(f"Directly routing to specified agent: {goto.type}")
                return {
                    "goto": goto,
                    "usages": state.usages,
                    "ext": state.ext
                }

        error_msg = f"Specified agent '{agent_name}' does not exist"
        logger.error(error_msg)
        raise ValueError(error_msg)

    async def _route_using_llm(self, state: GraphState, writer: StreamWriter, request: GraphRequest):
        """Use LLM to decide which agent to route to next"""
        system_template = await self.get_system_template(request)
        messages = [SystemMessage(content=system_template)] + state.messages

        if self.verbose:
            print_message(messages)

        
        start_time = time.time()
        chain = self.llm_chat | JSONObjOutputParser()
        result, usage = chain.invoke(messages)
        result = result if result else {'next': 'agent_gitcode_chat_'}
        next_worker = result.get("next", "END")

        state.add_usage(
            model=self.llm_chat.model_name,
            input_tokens=usage.get('input_tokens', 0),
            output_tokens=usage.get('output_tokens', 0)
        )
        state.ext[SUPERVISOR_ROUTE_RESULT] = result

        logger.info(f'SupervisorGraph next step={next_worker} AI result={result} elapsed_time={(time.time() - start_time):.3f}s')

        goto = GraphNodeParam(type=self.graph_end if next_worker == "END" else f"{next_worker}start")

        writer(AgentStepLog.build_tool_action(
            action=AgentActions.AGENT_ROUTE.value,
            meta={"finish": True, "usages": state.get_usages()},
            output=json.dumps({
                "name": next_worker,
                "params": {k: v for k, v in result.items() if k != "next"}
            }, ensure_ascii=False)
        ))

        logger.info(f"Supervisor routing to graph: {goto.type}")
        return {
            "goto": goto,
            "usages": state.usages,
            "ext": state.ext
        }

    async def get_system_template(self, request, state = None):
        """
        Override base implementation to add supervisor-specific default values.
        """
        agents_desc = []
        for node in self.nodes:
            if isinstance(node, BaseGraph):
                doc = node.__class__.__doc__ or ""
                agents_desc.append(f"worker: {node.graph_name}\nworker description: {doc.strip()}")

        request.kwargs["agents_desc"] = "\n\n".join(agents_desc)

        return await super().get_system_template(request)
