import asyncio
from abc import ABC
from functools import wraps
from typing import Type, List, Optional

from inspect import getmembers, ismethod

from ...llms import LLMConfig, InsCodeModel
from ...utils.str import get_message_content
from langchain_core.messages import HumanMessage, BaseMessage
from langgraph.types import interrupt
from pydantic import BaseModel
from loguru import logger


def tool_node(args_schema: Optional[Type[BaseModel]] = None,
              args_parser: Optional[dict[str, callable]] = None,
              stream: bool = True,
              interrupt: bool = False,
              execution_mode: str = "client"):
    def decorator(func):
        @wraps(func)
        async def async_wrapper(*args, **kwargs):
            return await func(*args, **kwargs)

        @wraps(func)
        def sync_wrapper(*args, **kwargs):
            return func(*args, **kwargs)

        wrapper = async_wrapper if asyncio.iscoroutinefunction(func) else sync_wrapper

        wrapper.is_tool = True
        wrapper.args_schema = args_schema
        wrapper.args_parser = args_parser or {}
        wrapper.stream = stream
        wrapper.interrupt = interrupt
        wrapper.execution_mode = execution_mode
        return wrapper

    return decorator


class BaseNodes(ABC):
    llm_chat: InsCodeModel = None
    llm_vl: InsCodeModel = None
    llm_image: InsCodeModel = None
    verbose: bool = False
    parallel: bool = False

    def __init__(self, llm_chat: LLMConfig | InsCodeModel = None,
                 llm_vl: InsCodeModel | LLMConfig = None,
                 llm_image: InsCodeModel | LLMConfig = None,
                 verbose: bool = False,
                 parallel: bool = False,
                 **kwargs):
        self.llm_chat = InsCodeModel.build(llm_chat)
        self.llm_vl = InsCodeModel.build(llm_vl)
        self.llm_image = InsCodeModel.build(llm_image)
        self.verbose = verbose
        self.parallel = parallel

        for k, v in kwargs.items():
            setattr(self, k, v)

    def get_nodes(self) -> List[dict]:
        nodes = []
        for name, method in getmembers(self, predicate=ismethod):
            if hasattr(method, 'is_tool'):
                nodes.append({
                    "name": name,
                    "func": method,
                    "args_schema": method.args_schema,
                    "args_parser": method.args_parser,
                    "stream": method.stream,
                    "interrupt": method.interrupt,
                    "execution_mode": getattr(method, 'execution_mode', 'client')
                })
        return nodes

    def get_tool_methods(self) -> List[callable]:
        tool_methods = []
        for name, method in getmembers(self, predicate=ismethod):
            if hasattr(method, 'is_tool'):
                tool_methods.append(method)
        return tool_methods

    def wait_human_feedback(self, interrupt_msg: str = "interrupt", **kwargs):
        value = interrupt(interrupt_msg)
        return {
            "messages": [HumanMessage(content=value)],
            **kwargs
        }

    def node_success(self, message: str | BaseMessage | List[BaseMessage] = None, **kwargs):
        if "force_goto" not in kwargs:
            kwargs["force_goto"] = None
        if not message:
            return {**kwargs}

        if isinstance(message, str):
            messages = [HumanMessage(content=message)]
        elif isinstance(message, BaseMessage):
            messages = [message]
        else:
            messages = message

        return {
            "messages": messages,
            **kwargs
        }

    def node_failure(self, message: str | BaseMessage | List[BaseMessage] = None, **kwargs):
        if "force_goto" not in kwargs:
            kwargs["force_goto"] = None

        if isinstance(message, str):
            messages = [HumanMessage(content=f"Result error: {message}")]
        elif isinstance(message, BaseMessage):
            messages = [message]
        else:
            messages = message

        return {
            "messages": messages,
            **kwargs
        }

    async def destroy(self):
        """
        Clean up node resources to prevent memory leaks.
        This method should be called when the node is no longer needed.
        """
        try:
            if self.llm_chat:
                await self.llm_chat.close()
                self.llm_chat = None

            await self._cleanup_custom_resources()
        except Exception as e:
            logger.error(f"Error during node cleanup: {e}")

    async def _cleanup_custom_resources(self):
        """
        Override this method in subclasses to clean up custom resources.
        This is called by the destroy method.
        """
        pass
    
    def _extract_user_question(self, messages) -> str:
        """提取用户问题"""
        for msg in messages:
            if (isinstance(msg, HumanMessage) and
                msg.additional_kwargs and
                msg.additional_kwargs.get("is_question", False)):
                return get_message_content(msg)
        return ""