import importlib
import inspect
from typing import Dict, List, Optional, Any, Union
from dataclasses import dataclass, field

from redis.asyncio import Redis as AsyncRedis

from ...config import get_env_config
from ...config.agents import LLMType
from ..nodes.base import BaseNodes
from ..base import BaseGraph
from ...llms import LLMConfig, InsCodeModel
from ...mcps import MCPClientManager
from ...utils.redis import AsyncRedisClient


@dataclass
class NodeConfig:
    name: str
    type: str = "node"
    llm: Dict[str, str] = field(default_factory=dict)
    params: Dict[str, str] = field(default_factory=dict)


@dataclass
class GraphConfig:
    name: str
    type: str
    nodes: List[Union[NodeConfig, 'GraphConfig']] = field(default_factory=list)
    template: Optional[str] = None
    prefix_node: Optional[Union[NodeConfig, List[NodeConfig]]] = None
    suffix_node: Optional[Union[NodeConfig, List[NodeConfig]]] = None
    llm: Dict[str, str] = field(default_factory=dict)
    graph_config: Dict[str, Any] = field(default_factory=dict)
    parse_tool_args: bool = True
    sub_graph_is_loop: bool = False  # 控制是否环形结束跳回父级

    @classmethod
    def from_json(cls, json_data: Dict[str, Any]) -> 'GraphConfig':
        nodes = []
        if json_data.get('nodes'):
            for node_data in json_data['nodes']:
                if node_data.get('type') == 'node':
                    nodes.append(NodeConfig(
                        name=node_data['name'],
                        type=node_data['type'],
                        llm=node_data.get('llm', {}),
                        params=node_data.get('params', {})
                    ))
                elif node_data.get('type') == 'graph':
                    nodes.append(cls.from_json(node_data))

        def parse_node_config(node_data):
            if isinstance(node_data, list):
                return [
                    NodeConfig(name=n, type="node", llm={}) if isinstance(n, str)
                    else NodeConfig(name=n['name'], type="node", llm=n.get('llm', {}), params=n.get('params', {}))
                    for n in node_data
                ]
            else:
                return NodeConfig(name=node_data['name'], type="node", llm=node_data.get('llm', {}))

        prefix_node = parse_node_config(json_data['prefix_node']) if json_data.get('prefix_node') else None
        suffix_node = parse_node_config(json_data['suffix_node']) if json_data.get('suffix_node') else None

        return cls(
            name=json_data['name'],
            type=json_data['type'],
            template=json_data.get('template'),
            nodes=nodes,
            prefix_node=prefix_node,
            suffix_node=suffix_node,
            llm=json_data.get('llm'),
            graph_config=json_data.get('graph_config'),
            parse_tool_args=json_data.get('parse_tool_args', True),
            sub_graph_is_loop=json_data.get('sub_graph_is_loop', False)
        )


class GraphBuilder:

    def __init__(
        self,
        graph_config: Optional[Union[GraphConfig, Dict[str, Any]]],
        redis: AsyncRedis = None,
        nodes_module_path: str = ".nodes",
        graphs_module_path: str = "...graph"
    ):

        self.redis = redis if redis is not None else AsyncRedisClient(**get_env_config("redis")).redis
        self.graph_config = GraphConfig.from_json(graph_config) if isinstance(graph_config, dict) else graph_config

        chat_config = LLMConfig.build_from_type(LLMType.LLM_CHAT)
        self.llm_chat = InsCodeModel(config=chat_config) if chat_config else None
        vl_config = LLMConfig.build_from_type(LLMType.LLM_VL)
        self.llm_vl = InsCodeModel(config=vl_config) if vl_config else None
        image_config = LLMConfig.build_from_type(LLMType.LLM_IMAGE)
        self.llm_image = InsCodeModel(config=image_config) if image_config else None

        self.node_name_class_map = self._load_node_classes(nodes_module_path)
        self.graph_name_class_map = self._load_graph_classes(graphs_module_path)

    def build_graph(
        self,
        config: Optional[Union[GraphConfig, Dict[str, Any]]] = None,
        mcp_client_manager: Optional[MCPClientManager] = None,
        parent_graph: Optional[BaseGraph] = None,
        **kwargs
    ) -> BaseGraph:

        graph_config = config or self.graph_config

        if isinstance(graph_config, dict):
            graph_config = GraphConfig.from_json(graph_config)

        graph_class = self.graph_name_class_map.get(graph_config.name, BaseGraph)

        llm_config = graph_config.llm or {}
        chat_config = LLMConfig.build_from_name(llm_config.get("chat")) if llm_config.get("chat") else None
        graph_llm_chat = InsCodeModel.from_config(chat_config) if chat_config else self.llm_chat
        vl_config = LLMConfig.build_from_name(llm_config.get("vl")) if llm_config.get("vl") else None
        graph_llm_vl = InsCodeModel.from_config(vl_config) if vl_config else self.llm_vl
        image_config = LLMConfig.build_from_name(llm_config.get("image")) if llm_config.get("image") else None
        graph_llm_image = InsCodeModel.from_config(image_config) if image_config else self.llm_image

        graph_param = {
            "redis": self.redis,
            "llm_chat": graph_llm_chat,
            "llm_vl": graph_llm_vl,
            "llm_image": graph_llm_image,
            "parse_tool_args": graph_config.parse_tool_args,
            "sub_graph_is_loop": graph_config.sub_graph_is_loop,
            "mcp_client": mcp_client_manager
        }

        graph_nodes = []
        for node_config in graph_config.nodes or []:
            if node_config.type == "node":
                node_instance = self._create_node_instance(
                    node_config,
                    llm_chat=graph_llm_chat,
                    llm_vl=graph_llm_vl,
                    llm_image=graph_llm_image,
                    mcp_client_manager=mcp_client_manager,
                    **kwargs
                )
                if node_instance:
                    graph_nodes.append(node_instance)
            elif node_config.type == "graph":
                current_graph_instance = graph_class(**graph_param)
                graph_instance = self.build_graph(
                    config=node_config,
                    mcp_client_manager=mcp_client_manager,
                    parent_graph=current_graph_instance,
                    **kwargs
                )
                if graph_instance:
                    graph_nodes.append(graph_instance)

        if graph_nodes:
            graph_param["nodes"] = graph_nodes

        for node_type in ["prefix_node", "suffix_node"]:
            node_config_list = getattr(graph_config, node_type, None)
            if not node_config_list:
                continue

            if not isinstance(node_config_list, list):
                node_config_list = [node_config_list]

            instances = []
            for item in node_config_list:
                node_instance = self._create_node_instance(
                    node_config=item,
                    llm_chat=graph_llm_chat,
                    llm_vl=graph_llm_vl,
                    llm_image=graph_llm_image
                )
                if node_instance:
                    instances.append(node_instance)

            if instances:
                graph_param[node_type] = instances

        if graph_config.graph_config:
            graph_param['graph_config'] = graph_config.graph_config

        if graph_config.template:
            graph_param["agent_template"] = graph_config.template

        graph_param["agent_template_language"] = graph_llm_chat.config.ext.get('language') if hasattr(graph_llm_chat,
                                                                                                      'config') and graph_llm_chat.config and graph_llm_chat.config.ext else None

        return graph_class(**graph_param)

    def _create_node_instance(
        self,
        node_config: NodeConfig,
        llm_chat: LLMConfig = None,
        llm_vl: LLMConfig = None,
        llm_image: LLMConfig = None,
        mcp_client_manager: Optional[MCPClientManager] = None,
        **kwargs
    ) -> Any:
        """
        Create node instance

        Args:
            node_config: Node configuration
            llm_chat: Chat LLM instance
            llm_vl: Vision-language LLM instance
            llm_image: Image LLM instance
            mcp_client_manager: MCP client manager

        Returns:
            Node instance
        """
        node_name = node_config.name
        if not node_name:
            raise ValueError("Node config missing name field")

        if '.' in node_name:
            class_name, method_name = node_name.split('.', 1)
        else:
            class_name, method_name = node_name, None

        if class_name not in self.node_name_class_map:
            raise ValueError(f"Node class not found: {class_name}")

        node_class = self.node_name_class_map[class_name]

        if method_name and not hasattr(node_class, method_name):
            raise ValueError(f"Node {class_name} has no method {method_name}")

        node_chat_config = LLMConfig.build_from_name(node_config.llm.get("chat")) if node_config.llm.get(
            "chat") else None
        node_llm_chat = InsCodeModel.from_config(node_chat_config) if node_chat_config else llm_chat
        node_vl_config = LLMConfig.build_from_name(node_config.llm.get("vl")) if node_config.llm.get("vl") else None
        node_llm_vl = InsCodeModel.from_config(node_vl_config) if node_vl_config else llm_vl
        node_image_config = LLMConfig.build_from_name(node_config.llm.get("image")) if node_config.llm.get(
            "image") else None
        node_llm_image = InsCodeModel.from_config(node_image_config) if node_image_config else llm_image

        param_keys = self._get_constructor_params(node_class)
        if param_keys:
            available_params = {
                "llm_chat": node_llm_chat,
                "llm_vl": node_llm_vl,
                "llm_image": node_llm_image,
                "mcp_client_manager": mcp_client_manager
            }
            params = {key: available_params[key] for key in param_keys if key in available_params}

            if node_config.params:
                for key, value in node_config.params.items():
                    if key in param_keys:
                        params[key] = value
            node_instance = node_class(**params)
        else:
            node_instance = node_class()

        return getattr(node_instance, method_name) if method_name else node_instance

    def _load_node_classes(self, nodes_module_path: str) -> Dict[str, type]:
        """
        Dynamically load node classes

        Args:
            nodes_module_path: Node module path

        Returns:
            Mapping from node name to class
        """
        try:
            importlib.import_module(nodes_module_path)
        except Exception:
            pass

        node_name_class_map = {}
        for cls in BaseNodes.__subclasses__():
            node_name_class_map[cls.__name__] = cls

        return node_name_class_map

    def _load_graph_classes(self, graphs_module_path: str) -> Dict[str, type]:
        """
        Dynamically load graph classes

        Args:
            graphs_module_path: Graph module path

        Returns:
            Mapping from graph name to class
        """
        try:
            importlib.import_module(graphs_module_path)
        except Exception:
            pass

        graph_name_class_map = {}
        for cls in BaseGraph.__subclasses__():
            graph_name_class_map[cls.__name__] = cls

        return graph_name_class_map

    def _get_constructor_params(self, cls: type) -> List[str]:
        init = cls.__init__
        sig = inspect.signature(init)

        params = []
        for name, param in sig.parameters.items():
            if name == 'self':
                continue
            if param.kind in (param.VAR_POSITIONAL, param.VAR_KEYWORD):
                continue
            params.append(name)

        return params

    async def destroy(self):
        """
        Clean up GraphBuilder resources to prevent memory leaks.
        This method should be called when the builder is no longer needed.
        """
        try:
            from loguru import logger
            logger.debug("Starting cleanup for GraphBuilder")

            # Clean up chat LLM client
            if self.llm_chat:
                await self.llm_chat.close()
                self.llm_chat = None

            # Clean up vision LLM client
            if self.llm_vl:
                await self.llm_vl.close()
                self.llm_vl = None

            # Clean up image LLM client
            if self.llm_image:
                await self.llm_image.close()
                self.llm_image = None

            # Clean up Redis connection
            if self.redis:
                try:
                    await self.redis.aclose()
                    logger.debug("Redis connection closed successfully")
                except Exception as e:
                    logger.warning(f"Error closing Redis connection: {e}")
                finally:
                    self.redis = None

            # Clear other references
            self.graph_config = None
            self.node_name_class_map = {}
            self.graph_name_class_map = {}

            logger.debug("GraphBuilder cleanup completed successfully")

        except Exception as e:
            from loguru import logger
            logger.error(f"Error during GraphBuilder cleanup: {e}")
            raise
