from typing import Any, Dict
import json
from pathlib import Path

from flow_engine.core.graph import FlowGraph
from flow_engine.execution.runner import FlowRunner
from flow_engine.nodes.node_factory import NodeFactory


def trigger_demo_flow_by_event(event: Dict[str, Any]) -> None:
    """
    Trigger the demo flow (examples/show_message_flow.json) when the start node is configured
    with triggerType='event' and the eventCode matches.

    - Loads the example flow JSON
    - Constructs FlowGraph and FlowRunner
    - Passes the incoming event into context as {'event': event}
    - Executes the flow; logs minimal info
    """
    # --- Trace logging: start ---
    try:
        print(f"[Trace] trigger_demo_flow_by_event called. event={event}")
    except Exception:
        pass

    project_root = Path(__file__).resolve().parents[2]
    flow_path = project_root / "examples" / "show_message_flow.json"
    try:
        print(f"[Trace] Loading flow definition from: {flow_path}")
    except Exception:
        pass

    with open(flow_path, "r", encoding="utf-8") as f:
        flow_def = json.load(f)

    # Log basic flow structure
    try:
        nodes = (flow_def or {}).get("nodes") or []
        edges = (flow_def or {}).get("edges") or []
        print(f"[Trace] Flow loaded. nodes={len(nodes)}, edges={len(edges)}")
        # Identify which nodes are expected to run externally based on NodeFactory mapping
        internal_map = getattr(NodeFactory, "TYPE_MAP", {}) or {}
        internal_types = {str(t).lower() for t in internal_map.keys()}
        external_candidates = [
            {
                "id": n.get("id"),
                "type": n.get("type"),
                "name": n.get("data", {}).get("name"),
            }
            for n in nodes
            if (n.get("type") or "").lower() not in internal_types
        ]
        if external_candidates:
            print(f"[Trace] Nodes expected to use external execution (by nodeType): {external_candidates}")
        else:
            print("[Trace] No nodes detected for external execution by nodeType mapping.")
    except Exception as log_ex:
        print(f"[Trace] Failed to analyze flow nodes for external execution: {log_ex}")

    graph = FlowGraph(flow_def)
    runner = FlowRunner(graph, max_steps=1000)
    try:
        print("[Trace] Runner created: FlowRunner (engine=flow). max_steps=1000")
    except Exception:
        pass

    ctx = {"event": event}
    try:
        print("[Trace] Starting flow execution with context keys:", list(ctx.keys()))
    except Exception:
        pass

    result = runner.run(ctx)

    # Detailed execution trace logging
    try:
        trace = result.get("trace", []) if isinstance(result, dict) else []
        trace_len = len(trace)
        print(f"[Trace] Execution finished. trace_len={trace_len}")
        for i, step in enumerate(trace):
            if isinstance(step, dict):
                node_id = step.get("nodeId") or step.get("node_id") or step.get("id")
                node_type = step.get("nodeType") or step.get("type")
                mode = (
                    "external" if step.get("external") is True
                    else "internal" if step.get("external") is False
                    else step.get("mode") or "unknown"
                )
                resp = step.get("response") or {}
                code = resp.get("code")
                msg = resp.get("message") or resp.get("msg")
                next_node = step.get("next") or step.get("nextNode")
                print(f"[Trace] Step[{i}] node_id={node_id}, node_type={node_type}, exec_mode={mode}, code={code}, message={msg}, next={next_node}")
            else:
                # Non-structured trace entry (likely plain string)
                try:
                    print(f"[Trace] Step[{i}] raw={step}")
                except Exception:
                    print(f"[Trace] Step[{i}] raw=<unprintable>")
        # Context external markers (guard result type)
        context = result.get("context") if isinstance(result, dict) else None
        if isinstance(context, dict):
            ext_keys = [k for k in context.keys() if str(k).endswith("_external")]
            if ext_keys:
                print(f"[Trace] Context contains external markers: {ext_keys}")
            else:
                print("[Trace] No explicit external markers found in context.")
        else:
            print("[Trace] Result has no context dictionary to inspect.")
    except Exception as ex:
        print(f"[Trace] Failed to print detailed execution trace: {ex}")

    print(f"[Flow] Triggered flow from eventCode={event.get('eventCode')}, trace_len={trace_len if 'trace_len' in locals() else 0}")

# --- Extended: trigger DB flows by event ---
import asyncio
from typing import List, Optional
from ..dao import list_flows, create_run, update_run
from ..managers.engine_manager import create_runner, register_runner, unregister_runner
from ..utils import DEFAULT_ENGINE


def _start_node_matches_event(flow_def: Dict[str, Any], event: Dict[str, Any]) -> bool:
    """
    Check if the flow's start node is configured with triggerType='event' and matches the incoming event.
    Mirrors StartNode matching logic: eventCode/eventId equals, and optional eventSource equals when both present.
    """
    try:
        nodes = (flow_def or {}).get('nodes') or []
        start_nodes = [n for n in nodes if (n.get('type') or '').lower() == 'start']
        if not start_nodes:
            return False
        data = (start_nodes[0].get('data') or {})
        trigger_type = (data.get('triggerType') or '').lower()
        if trigger_type != 'event':
            return False
        trigger = data.get('trigger') or {}
        expected_code = trigger.get('eventId') or trigger.get('eventCode')
        expected_source = trigger.get('eventSource')
        actual_code = event.get('eventCode') or event.get('eventId')
        actual_source = event.get('source') or event.get('eventSource')
        if not expected_code:
            return False
        if actual_code != expected_code:
            return False
        # 不用校验事件来源
        # if expected_source and actual_source and expected_source != actual_source:
        #     return False
        return True
    except Exception:
        return False


async def trigger_db_flows_by_event(event: Dict[str, Any]) -> Dict[str, Any]:
    """
    Query flows from DB and run all flows whose start node is configured with triggerType='event' and
    matches the incoming event (eventCode/eventId and optional eventSource).

    Scheduling is done asynchronously per matched flow to avoid blocking Kafka consumer handler.
    Returns a summary with matched count and scheduled run IDs.
    """
    try:
        flows = list_flows()
    except Exception as ex:
        print(f"[Flow] Failed to list flows from DB: {ex}")
        return {"matched": 0, "scheduled_run_ids": []}

    matched: List[Dict[str, Any]] = []
    for f in flows:
        flow_def = f.get('flow_json') or {}
        if _start_node_matches_event(flow_def, event):
            matched.append(f)

    run_ids: List[int] = []
    for f in matched:
        try:
            flow_id = int(f.get('id'))
            engine = DEFAULT_ENGINE  # respect default engine setting
            ctx = {"event": event}
            # create run record and runner
            run_dict = create_run(flow_id, status='pending', max_steps=1000, context_json=ctx)
            run_id = int(run_dict.get('id'))
            graph = FlowGraph(f['flow_json'])
            runner = create_runner(graph, engine, 1000, None, None, flow_id=flow_id, run_id=run_id)
            register_runner(run_id, runner)

            async def _exec(run_id: int = run_id, runner_obj=runner, ctx_obj: Dict[str, Any] = ctx):
                try:
                    runner_obj.run(dict(ctx_obj))
                except Exception as e:
                    from datetime import datetime, timezone
                    update_run(run_id, status='error', finished_at=datetime.now(timezone.utc).replace(tzinfo=None).isoformat(), error=str(e))
                finally:
                    unregister_runner(run_id)

            asyncio.create_task(_exec())
            run_ids.append(run_id)
            print(f"[Flow] Scheduled flow run: flow_id={flow_id}, run_id={run_id}, eventCode={event.get('eventCode')}")
        except Exception as ex:
            print(f"[Flow] Failed to schedule flow id={f.get('id')}: {ex}")

    return {"matched": len(matched), "scheduled_run_ids": run_ids}