import time
from typing import Any, Dict, List
from urllib import parse, request, error

from .base_node import BaseNode


class HttpRequestNode(BaseNode):
    """
    HTTP 请求节点（单一职责：按照配置发起 HTTP 请求并生成标准化输出）。
    
    职责说明：
    - 支持方法、头、查询参数与重试机制
    - 输出结构可通过 outputConfig 控制是否包含 statusCode/headers/body 等字段
    - 下游节点通过默认出边继续执行
    """
    def execute(self, graph: 'FlowGraph', context: Dict[str, Any], log: callable) -> List[str]:
        """
        执行 HTTP 请求并返回默认后继节点。
        
        Args:
            graph ('FlowGraph'): 流程图对象
            context (Dict[str, Any]): 当前执行上下文
            log (callable): 日志记录函数
        
        Returns:
            List[str]: 下一步要执行的节点 ID 列表（默认出边）
        """
        # Merge input into data for execution convenience
        effective_data: Dict[str, Any] = {**self.data}
        if isinstance(self.input, dict):
            effective_data.update(self.input)
        data = effective_data
        method = (data.get('method') or 'GET').upper()
        url = data.get('url') or ''
        timeout = int(data.get('timeout') or 30)
        retry = int(data.get('retryCount') or 0)
        headers = {h.get('key'): h.get('value') for h in data.get('headers', []) if h.get('key')}
        query_params = [(q.get('key'), q.get('value')) for q in data.get('queryParams', []) if q.get('key')]
        output_cfg = data.get('outputConfig', {})

        if query_params:
            qstr = parse.urlencode(query_params)
            delimiter = '&' if '?' in url else '?'
            url = f"{url}{delimiter}{qstr}"

        if not url:
            log(f"[HTTP] Node {self.id}: URL is empty, skipping request.")
            # Reflect basic output structure for downstream nodes to reference
            self.output = {"statusCode": None, "headers": {}, "body": None}
        else:
            req = request.Request(url=url, method=method, headers=headers)
            attempt = 0
            last_err = None
            while attempt <= retry:
                try:
                    log(f"[HTTP] {method} {url} (attempt {attempt+1}/{retry+1})")
                    with request.urlopen(req, timeout=timeout) as resp:
                        status = resp.getcode()
                        body_bytes = resp.read()
                        body_text = body_bytes.decode('utf-8', errors='replace')
                        result: Dict[str, Any] = {}
                        if output_cfg.get('statusCode'):
                            result['statusCode'] = status
                        if output_cfg.get('headers'):
                            result['headers'] = dict(resp.getheaders())
                        if output_cfg.get('body'):
                            result['body'] = body_text
                        # set node output only; per-node outputs are recorded by BaseRunner
                        self.output = result
                        log(f"[HTTP] Node {self.id} success: status={status}")
                        break
                except error.URLError as e:
                    last_err = e
                    log(f"[HTTP] Node {self.id} error: {e}. Retrying...")
                    time.sleep(0.3)
                attempt += 1
            if last_err and attempt > retry:
                log(f"[HTTP] Node {self.id} failed after {retry+1} attempts: {last_err}")
        return [e.target for e in graph.get_outgoing_edges(self.id) if e.source_handle in (None, 'default')]