from typing import Any, Dict, Optional
import json
from dataclasses import dataclass
import uuid
import time
from pathlib import Path
from .event_trigger_service import trigger_db_flows_by_event,trigger_demo_flow_by_event

@dataclass
class BinlogRecordInDTO:
    """Python counterpart of Java BinlogRecordInDTO for Canal/Kafka binlog handling.
    Fields mirror the Java class and use Optional for missing values.
    """
    data: Optional[Dict[str, Any]] = None
    database: Optional[str] = None
    es: Optional[int] = None
    sqlType: Optional[Dict[str, int]] = None
    old: Optional[Dict[str, Any]] = None
    table: Optional[str] = None
    ts: Optional[int] = None
    type: Optional[str] = None  # expected: 'insert' | 'update' | 'delete'

    @classmethod
    def from_payload(cls, payload: Dict[str, Any]) -> "BinlogRecordInDTO":
        return cls(
            data=payload.get("data"),
            database=payload.get("database"),
            es=payload.get("es"),
            sqlType=payload.get("sqlType"),
            old=payload.get("old"),
            table=payload.get("table"),
            ts=payload.get("ts"),
            type=payload.get("type"),
        )

    def to_dict(self) -> Dict[str, Any]:
        return {
            "data": self.data,
            "database": self.database,
            "es": self.es,
            "sqlType": self.sqlType,
            "old": self.old,
            "table": self.table,
            "ts": self.ts,
            "type": self.type,
        }

    def is_insert(self) -> bool:
        return (self.type or "").lower() == "insert"

    def is_update(self) -> bool:
        return (self.type or "").lower() == "update"

    def is_delete(self) -> bool:
        return (self.type or "").lower() == "delete"


@dataclass
class AbstractEvent:
    """Python version of AbstractEvent<T> for event-driven DSL."""
    id: str
    eventCode: str
    version: str
    entity: str
    time: int
    data: Any = None
    trace: Optional[str] = None
    extensions: Optional[Dict[str, Any]] = None
    source: Optional[str] = None

    def to_dict(self) -> Dict[str, Any]:
        return {
            "id": self.id,
            "eventCode": self.eventCode,
            "version": self.version,
            "entity": self.entity,
            "time": self.time,
            "data": self.data if not hasattr(self.data, "to_dict") else self.data.to_dict(),
            "trace": self.trace,
            "extensions": self.extensions,
            "source": self.source,
        }


@dataclass
class Partner:
    """Flexible Partner representation to carry binlog row fields."""
    attrs: Dict[str, Any]

    @classmethod
    def from_dict(cls, d: Dict[str, Any]) -> "Partner":
        return cls(attrs=d or {})

    def to_dict(self) -> Dict[str, Any]:
        return dict(self.attrs or {})


@dataclass
class RegisterAccountEvent(AbstractEvent):
    """Event for registering account, maps Java RegisterAccountEvent."""
    # Use explicit values when constructing; do not override base dataclass fields with defaults to avoid ordering issues.

    @classmethod
    def from_binlog(cls, binlog: Dict[str, Any]) -> "RegisterAccountEvent":
        # time in milliseconds, id without hyphens to match ^[a-zA-Z0-9]+$ and length<=32
        now_ms = int(time.time() * 1000)
        event_id = uuid.uuid4().hex
        src = "db"
        data_dict = binlog.get("data") or {}
        partner = Partner.from_dict(data_dict)
        return cls(
            id=event_id,
            eventCode="REGISTER_ACCOUNT",
            version="1.0",
            entity="io.aicn.lowcode.message.entity.prm.editor.Partner4Editor",
            time=now_ms,
            data=partner,
            source=src,
        )


async def binlog_handler(message: Any) -> None:
    """Handle Canal-style binlog JSON payload.
    Expected JSON keys: 'type', 'data' (list of dicts), 'old' (list of dicts), plus optional 'database', 'table', 'ts', 'pkNames'.
    """
    payload: Optional[Dict[str, Any]] = None
    try:
        if isinstance(message, dict):
            payload = message
        elif isinstance(message, (bytes, bytearray)):
            try:
                payload = json.loads(message.decode("utf-8"))
            except Exception:
                payload = None
        elif isinstance(message, str):
            try:
                payload = json.loads(message)
            except Exception:
                payload = None
        if payload is None:
            print("[Kafka] binlog_handler: payload not JSON, ignoring.")
            return
        data_list = payload.get("data") or []
        old_list = payload.get("old") or []
        op_type = str(payload.get("type") or "").upper()
        if (not data_list) and (op_type != "DELETE"):
            # For DELETE, some payloads may only provide 'old'
            print("[Kafka] binlog_handler: data is empty, ignoring.")
            return
        base_db = payload.get("database")
        base_table = payload.get("table")
        base_ts = payload.get("ts")
        base_es = payload.get("es")
        base_sql_type = payload.get("sqlType")
        # Iterate over data items; align 'old' by index when present
        for i in range(len(data_list) or len(old_list)):
            data = data_list[i] if i < len(data_list) else None
            old = old_list[i] if i < len(old_list) else None
            # Build DTO according to operation type
            if op_type == "INSERT":
                dto = BinlogRecordInDTO(
                    data=data,
                    old=None,
                    database=base_db,
                    table=base_table,
                    ts=base_ts,
                    es=base_es,
                    sqlType=base_sql_type,
                    type="insert",
                )
            elif op_type == "UPDATE":
                dto = BinlogRecordInDTO(
                    data=data,
                    old=old,
                    database=base_db,
                    table=base_table,
                    ts=base_ts,
                    es=base_es,
                    sqlType=base_sql_type,
                    type="update",
                )
            elif op_type == "DELETE":
                dto = BinlogRecordInDTO(
                    data=None,
                    old=old if old is not None else data,
                    database=base_db,
                    table=base_table,
                    ts=base_ts,
                    es=base_es,
                    sqlType=base_sql_type,
                    type="delete",
                )
            else:
                # Unknown operation
                continue
            await deal_binlog(dto.to_dict())
    except Exception as e:
        print(f"[Kafka] binlog_handler error: {e}")


async def canal_handle_message(binlog: Dict[str, Any]) -> Optional[Dict[str, Any]]:
    """Transform binlog into event when table matches, else passthrough.
    - If table == 'prm.partner' and data present, create RegisterAccountEvent.
    - Otherwise, return binlog as-is.
    """
    try:
        database = (binlog or {}).get("database")
        table = (binlog or {}).get("table")
        matchTableName=database+"."+table
        data = (binlog or {}).get("data")
        if matchTableName == "prm.partner" and data:
            event = RegisterAccountEvent.from_binlog(binlog)
            return event.to_dict()
        return binlog
    except Exception:
        return None


async def deal_binlog(binlog: Dict[str, Any]) -> None:
    """Process a single binlog record: canal handle -> event notify if data present."""
    processed = await canal_handle_message(binlog)
    if processed is None:
        print("[Kafka] dealBinLog: handler returned None.")
        return
    if processed.get("data") is not None:
        try:
            await event_notify_matching(processed)
        except Exception as e:
            print(f"[Kafka] Event Router Error eventNotifyService: {e}")
    else:
        # For DELETE without data, still emit a notice
        print("[Kafka] dealBinLog: delete operation processed.")







async def event_notify_matching(binlog: Dict[str, Any]) -> None:
    """Route or log events/binlog.
    - For event dicts (with 'eventCode'), log eventCode and entity.
    - For raw binlog dicts, log operation type and table.
    """
    if isinstance(binlog, dict) and "eventCode" in binlog:
        print(
            f"[Kafka] event_notify_matching: EVENT {binlog.get('eventCode')} entity={binlog.get('entity')} id={binlog.get('id')}"
        )
        try:
            # trigger_demo_flow_by_event(binlog)
            summary = await trigger_db_flows_by_event(binlog)
            print(f"[Flow] DB-trigger summary: matched={summary.get('matched')} run_ids={summary.get('scheduled_run_ids')}")
        except Exception as ex:
            print(f"[Flow] Failed to trigger DB flows from event: {ex}")
        return
    # Fallback: original binlog logging
    print(
        f"[Kafka] event_notify_matching: {binlog.get('type')} "
        f"{binlog.get('database')}.{binlog.get('table')}"
    )

