import re
from typing import Generator, List, Dict, Set, Iterator, Optional, Literal, AsyncIterator
from pydantic import BaseModel

from langchain_core.messages import BaseMessageChunk, AIMessage
from ..models.graph import GraphNodeParam, AgentStepLog, GraphState, Usage


class XmlToolResultParser:
    tool_thinking: str = "thinking"
    tool_subagent: str = "subagent_toolkit"
    tools_names: List[str] = [tool_thinking,tool_subagent]
    tools_params: Dict[str, List] = {}
    tools_stream: Dict[str, bool] = {}

    def __init__(self, tools: Dict[str, dict]):
        self.tools_names.extend(tools.keys())
        self.tools_params = {}
        self.tools_stream = {}
        for name, info in tools.items():
            schema = info.get("schema")
            stream = info.get("stream", True)
            self.tools_stream[name] = stream
            self.tools_params[name] = self._parse_schema_params(schema)

    def _parse_schema_params(self, schema):
        """Parse schema parameters and return structured params"""
        if isinstance(schema, type) and issubclass(schema, BaseModel):
            params = []
            for field_name, field_info in schema.__fields__.items():
                field_type = field_info.annotation
                if hasattr(field_type, '__origin__') and field_type.__origin__ is list:
                    inner_type = field_type.__args__[0] if field_type.__args__ else None
                    if inner_type and isinstance(inner_type, type) and issubclass(inner_type, BaseModel):
                        sub_fields = list(inner_type.__fields__.keys())
                        params.append({field_name: sub_fields})
                    else:
                        params.append(field_name)
                elif isinstance(field_type, type) and issubclass(field_type, BaseModel):
                    sub_fields = list(field_type.__fields__.keys())
                    params.append({field_name: sub_fields})
                else:
                    params.append(field_name)
            return params
        else:
            return schema

    async def parse_stream(self, chunks: AsyncIterator[BaseMessageChunk],
                           state: GraphState,
                           model: str = "default") -> (Generator)[
        tuple[AgentStepLog, str], None, None]:

        def build_agent_step_log(output, finish: bool = False,
                                 finish_reason: Optional[Literal["stop", "length"]] = "stop",
                                 usages: List[Usage] = [],
                                 response_id: str = None,
                                 line: str = '') -> AgentStepLog:
            _usages = list(map(lambda x: x.__dict__, usages))
            meta = {"finish": finish, "finish_reason": finish_reason, "usages": _usages}

            if isinstance(output, str):
                return AgentStepLog.build_answer(chunk=output, meta=meta, response_id=response_id, response_content=line)
            if output.type == self.tool_subagent:
                return AgentStepLog.build_subagent_action(action=output.type, output=output, meta=meta, response_id=response_id, response_content=line)
            return AgentStepLog.build_tool_action(action=output.type, output=output, meta=meta, response_id=response_id, response_content=line)

        def clean_content(content: str) -> str:
            import re
            """Clean content by removing tool call markers and fixing XML tags."""
            content = re.sub(r'<\|tool_calls_section_begin\|>_([^>]+)', r'<\1', content)
            content = re.sub(r'<\|tool_calls_section_begin\|>', '', content)
            content = re.sub(r'<\|tool_calls_section_end\|>', '', content)
            content = re.sub(r'<\|tool_call_begin\|>_function>([^:>]+):\d+', r'<\1>', content)
            content = re.sub(r'<\|tool_call_begin\|>([^>]+)>', r'<\1>', content)
            content = re.sub(r'<\|tool_call_end\|>', '', content)
            # Convert <functions.tool_name:id format to <tool_name>
            content = re.sub(r'<functions\.([^:>]+):\d+', r'<\1>', content)
            content = re.sub(r'<_function>([^:>]+):\d+', r'<\1>', content)
            content = re.sub(r'<\|tool_call_argument_begin\|>', '', content)
            content = re.sub(r'<\|tool_call_argument_end\|>', '', content)

            for tool_name in self.tools_names:
                content = content.replace(f'<_{tool_name}>', f'<{tool_name}>')
                content = content.replace(f'_{tool_name}>', f'<{tool_name}>')

            return content

        last_line = ""
        processed: Set[str] = set()

        last_usage_metadata = None
        current_response_id = None

        async for chunk in chunks:
            last_line += chunk.content
            last_line = clean_content(last_line)
            
            if hasattr(chunk, 'id') and chunk.id:
                current_response_id = chunk.id
            if hasattr(chunk, 'usage_metadata') and chunk.usage_metadata:
                last_usage_metadata = chunk.usage_metadata

            for tool_result, is_content_end in self.parse(content=last_line):
                if not tool_result:
                    continue
                if isinstance(tool_result, GraphNodeParam) and not self.tools_stream.get(tool_result.type, True):
                    if not is_content_end:
                        continue

                result_str = f"{tool_result}_{is_content_end}"
                if result_str in processed:
                    continue

                yield build_agent_step_log(tool_result, is_content_end, usages=state.usages, response_id=current_response_id, line=last_line), last_line
                processed.add(result_str)

        if last_usage_metadata and 'input_tokens' in last_usage_metadata and 'output_tokens' in last_usage_metadata:
            state.add_usage_meta(model, last_usage_metadata)
            yield AgentStepLog.build_usage_action(usage=list(map(lambda x: x.__dict__, state.usages)), response_id=current_response_id), last_line

        if last_line:
            tool_names = self.content_contain_tools(last_line)
            if not tool_names:
                if last_line not in processed:
                    yield build_agent_step_log(last_line, True, usages=state.usages, response_id=current_response_id, line=last_line), last_line
                    processed.add(last_line)
            else:
                last_end_idx = max(last_line.rfind(f"</{name}>") for name in tool_names)
                if last_end_idx >= 0:
                    remaining = last_line[last_end_idx + last_line[last_end_idx:].find('>') + 1:].strip()
                    if remaining and remaining not in processed:
                        ## 处理超长中断的情况
                        remaining_tool_names = self.content_contain_tools(remaining)
                        if remaining_tool_names:
                            # 如果剩余内容包含工具，说明是超长导致的中断
                            tool_result, _ = next(self.parse(remaining))
                            if tool_result and (not isinstance(tool_result, GraphNodeParam) or tool_result.params):
                                yield build_agent_step_log(tool_result, True, "length", usages=state.usages, response_id=current_response_id, line=last_line), last_line
                        else:
                            yield build_agent_step_log(remaining, True, usages=state.usages, response_id=current_response_id, line=last_line), last_line

    def parse(self, content: str, buffer_size: int = 30) -> Generator[
        tuple[GraphNodeParam | str | None, bool], None, None]:
        if len(content) < buffer_size:
            yield None, False
            return

        tool_names = self.content_contain_tools(content)
        if not tool_names:
            yield content[:len(content) - buffer_size], False
            return

        remaining_content = content
        last_content_end = False
        for tool_name in tool_names:
            prefix_content, cut_start_idx = self._parse_tool_prefix_content(tool_name, remaining_content)
            if prefix_content:
                yield prefix_content, True
            if cut_start_idx is not None:
                remaining_content = remaining_content[cut_start_idx:]

            for tool_result, content_end in self._parse_content(tool_name, remaining_content):
                if not tool_result or (isinstance(tool_result, GraphNodeParam) and not tool_result.params):
                    continue
                yield tool_result, content_end
                last_content_end = content_end
                if content_end:
                    symbol_end = f"</{tool_name}>"
                    end_idx = remaining_content.find(symbol_end)
                    if end_idx != -1:
                        remaining_content = remaining_content[end_idx + len(symbol_end):].strip()

        if last_content_end and len(remaining_content) > buffer_size:
            tool_names = self.content_contain_tools(remaining_content)
            if not tool_names:
                yield remaining_content[:len(remaining_content) - buffer_size], False

    def _parse_tool_prefix_content(self, tool_name: str, content: str) -> (str, int) :
        symbol_start = f"<{tool_name}>"
        symbol_end = f"</{tool_name}>"
        if symbol_start in content and len(content) >= len(symbol_start) + len(symbol_end):
            start_idx = content.find(symbol_start)
            if start_idx > 0:
                prefix_content = content[:start_idx].strip()
                if prefix_content:
                    return prefix_content, start_idx
        return None, None

    def _parse_content(self, tool_name: str, content: str) -> Generator[
        tuple[GraphNodeParam | str | None, bool], None, None]:
        symbol_start = f"<{tool_name}>"
        symbol_end = f"</{tool_name}>"
        if symbol_start not in content or len(content) < len(symbol_start) + len(symbol_end):
            yield None, False
            return

        params = []
        if tool_name != "thinking":
            if tool_name in self.tools_params:
                params = self.tools_params[tool_name]
            elif tool_name == self.tool_subagent:
                params = ["name"]

        start_idx = content.find(symbol_start)
        # if start_idx > 0:
        #     prefix_content = content[:start_idx].strip()
        #     if prefix_content:
        #         yield prefix_content, True
        content = content[start_idx + len(symbol_start):]

        if symbol_end in content:
            content_end = True
            content = content[:content.find(symbol_end)]
        else:
            content_end = False
            content = content[:-len(symbol_end)]

        if not params:
            yield content, content_end
            return

        tool_result = GraphNodeParam(type=tool_name, params={})

        for param in params:
            param_name = param if isinstance(param, str) else next(iter(param.keys()))
            param_start = f"<{param_name}>"
            param_end = f"</{param_name}>"

            if param_start not in content:
                continue

            param_values = []
            search_content = content

            while param_start in search_content:
                start_idx = search_content.find(param_start)
                end_idx = search_content.find(param_end, start_idx)

                if end_idx == -1:
                    param_content = search_content[start_idx + len(param_start):-len(param_end)].strip()
                    if param_content:
                        param_values.append(param_content)
                    break
                else:
                    param_content = search_content[start_idx + len(param_start):end_idx].strip()
                    if param_content:
                        param_values.append(param_content)
                    search_content = search_content[end_idx + len(param_end):]

            if param_values:
                if isinstance(param, dict):
                    sub_fields = param[param_name]

                    def parse_nested_value(value):
                        result = {}
                        for sub_field in sub_fields:
                            sub_start = f"<{sub_field}>"
                            sub_end = f"</{sub_field}>"
                            if sub_start in value:
                                sub_start_idx = value.find(sub_start)
                                sub_end_idx = value.find(sub_end, sub_start_idx)
                                if sub_end_idx != -1:
                                    sub_content = value[sub_start_idx + len(sub_start):sub_end_idx].strip()
                                    result[sub_field] = sub_content
                                else:
                                    sub_content = value[sub_start_idx + len(sub_start):].strip()
                                    result[sub_field] = sub_content
                        return result if result else value

                    parsed_values = [parse_nested_value(value) for value in param_values]
                else:
                    parsed_values = param_values

                tool_result.add_param(param_name, parsed_values[0] if len(parsed_values) == 1 else parsed_values)

        yield tool_result, content_end

    def content_contain_tools(self, content: str, direct_break: bool = False, except_thinking: bool = False) -> List[str]:
        escaped_tools = '|'.join(re.escape(name) for name in self.tools_names)
        pattern = rf'<({escaped_tools})(?:\s[^>]*)?>'
        matches = re.findall(pattern, content)
        if except_thinking:
            matches = list(filter(lambda x: x != self.tool_thinking, matches))
        return matches
