import json
import logging
from collections.abc import Generator
from typing import Any
from urllib.parse import urljoin

import requests
from dify_plugin import Tool
from dify_plugin.entities.tool import ToolInvokeMessage


class DatTool(Tool):
    def _invoke(self, tool_parameters: dict[str, Any]) -> Generator[ToolInvokeMessage]:
        """
        invoke tools
        """
        base_url = self.runtime.credentials.get("base_url")
        timeout = self.runtime.credentials.get("timeout")

        conversation_id = tool_parameters.get("conversation_id")
        agent_name = tool_parameters.get("agent_name", "default")
        question = tool_parameters.get("question")
        if not question:
            raise ValueError("Please fill in the question")

        url = urljoin(base_url.rstrip('/') + '/', 'ask/stream')

        data = {
            'agent_name': agent_name,
            'question': question
        }
        if conversation_id:
            data["conversation_id"] = conversation_id

        headers = {
            'Accept': 'text/event-stream; charset=utf-8',
            'Cache-Control': 'no-cache',
            'Connection': 'keep-alive',
        }

        # 发送 POST 请求，设置 stream=True 以获取流式响应
        try:
            with requests.post(url, json=data, stream=True, headers=headers, timeout=int(timeout)) as response:
                # 检查响应状态
                if response.status_code != 200:
                    error_msg = f"Request failed, status code: {response.status_code}, response text: {response.text}"
                    logging.error(error_msg)
                    return RuntimeError(error_msg)

                # 设置响应编码为 UTF-8
                response.encoding = 'utf-8'

                # 处理 SSE 流
                result = {}
                events = []
                answer = ""
                buffer = ""
                for chunk in response.iter_content(chunk_size=1024, decode_unicode=True):
                    if chunk:
                        # 确保 chunk 是字符串
                        if isinstance(chunk, bytes):
                            chunk = chunk.decode('utf-8')
                        buffer += chunk
                        # 处理完整的事件（以两个换行符分隔）
                        while "\n\n" in buffer:
                            event_data, buffer = buffer.split("\n\n", 1)
                            logging.info(f"====================================")
                            logging.info(f"{event_data}")
                            logging.info(f"====================================")

                            lines = event_data.strip().split('\n')
                            event_type = None
                            data = None

                            for line in lines:
                                if line.startswith('event:'):
                                    event_type = line.split(':', 1)[1].strip()
                                elif line.startswith('data:'):
                                    data_str = line.split(':', 1)[1].strip()
                                    try:
                                        data = json.loads(data_str)
                                    except json.JSONDecodeError:
                                        data = data_str

                            conversation_id = ""
                            if event_type and data:
                                if not conversation_id:
                                    conversation_id = data.get('conversation_id')

                                if event_type == "other":
                                    sub_event_type = data.get('sub_event')
                                    sub_data = {k: v for k, v in data.items() if k != 'sub_event'}
                                    events.append({"event": sub_event_type, "data": sub_data})
                                # elif event_type not in {"ping", "agent_answer", "agent_answer_end"}:
                                elif event_type != "ping":
                                    events.append({"event": event_type, "data": data})

                                if event_type == "sql_generate":
                                    result['semantic_sql'] = data.get('semantic_sql')
                                elif event_type == "semantic_to_sql":
                                    result['query_sql'] = data.get('query_sql')
                                elif event_type == "sql_execute":
                                    result['query_data'] = json.dumps(data.get('query_data', []))
                                elif event_type == "agent_answer":
                                    answer += data.get('answer')
                                elif event_type == "agent_answer_end":
                                    events.append({"event": "complete_agent_answer", "data": {"answer": answer}})
                                    answer = ""
                                elif event_type in {"hitl_ai_request", "hitl_tool_approval"}:
                                    raise RuntimeError("HITL (Human-in-the-loop) is not supported.")
                                elif event_type == "error":
                                    result['error'] = data.get('error')
                                elif event_type == "finished":
                                    result['status'] = data.get('status')

                            result['conversation_id'] = conversation_id
                            result['events'] = events

                logging.info(f"result: {result}")
                yield self.create_json_message(result)
        except requests.exceptions.RequestException as e:
            error_msg = f"Request exception: {e}"
            logging.error(error_msg)
            return RuntimeError(error_msg)
