from typing import List, Tuple, Dict, Any
import psycopg2
import psycopg2.extras
from psycopg2.extensions import adapt
from psycopg2.pool import ThreadedConnectionPool
from contextlib import contextmanager

from spider.util import logger
from cause_inference.config import infer_config
from typing import Any
from cause_inference.model import TopoNode, TopoEdge
from cause_inference.exceptions import DBException


def connect_to_postgres(dsn):
    try:
        conn = psycopg2.connect(dsn)
    except Exception as ex:
        raise DBException('Connect to postgres error because {}'.format(ex)) from ex
    return conn


# Pool helpers
_pg_pool: ThreadedConnectionPool = None


def init_pg_pool(dsn, minconn=1, maxconn=5, enforce_age=True):
    global _pg_pool
    if _pg_pool:
        return _pg_pool
    try:
        _pg_pool = ThreadedConnectionPool(minconn, maxconn, dsn)
    except Exception as ex:
        raise DBException('Unable to create pg pool: {}'.format(ex)) from ex
    if enforce_age:
        try:
            with _pg_pool.getconn() as conn:
                if not is_age_installed(conn):
                    raise DBException('AGE extension is required but not installed on the Postgres server')
        finally:
            # put back connection if pool created
            if _pg_pool:
                _pg_pool.putconn(conn)
    return _pg_pool


@contextmanager
def get_pg_conn_from_pool():
    global _pg_pool
    if _pg_pool is None:
        raise DBException('PG connection pool is not initialized')
    conn = _pg_pool.getconn()
    try:
        yield conn
    finally:
        _pg_pool.putconn(conn)


def connect_to_postgres_enforce_age(dsn):
    conn = connect_to_postgres(dsn)
    use_age = infer_config.pg_conf.get('use_age', True)
    if use_age and not is_age_installed(conn):
        raise DBException('Apache AGE extension is required but not installed on the target Postgres server')
    return conn


def is_age_installed(conn) -> bool:
    try:
        with conn.cursor() as cur:
            cur.execute("SELECT count(*) as cnt FROM pg_extension WHERE extname = 'age'")
            cnt = cur.fetchone()[0]
            return cnt > 0
    except Exception:
        return False


def run_cypher(conn, graph, cypher_sql) -> List[Dict[str, Any]]:
    res = []
    with conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) as cur:
        cur.execute("SELECT * FROM cypher(%s, %s)", (graph, cypher_sql))
        rows = cur.fetchall()
        for row in rows:
            res.append(row)
    return res


def query_all(conn, sql, params=None):
    res = []
    with conn.cursor(cursor_factory=psycopg2.extras.RealDictCursor) as cur:
        cur.execute(sql, params)
        rows = cur.fetchall()
        for row in rows:
            res.append(row)
    return res


def query_recent_topo_ts(conn, ts) -> int:
    sql = 'SELECT max(ts) as last_ts FROM timestamps WHERE ts <= %s'
    res = query_all(conn, sql, (ts,))
    if len(res) == 0 or res[0]['last_ts'] is None:
        raise DBException('Can not find topological graph at the abnormal timestamp {}'.format(ts))
    return int(res[0]['last_ts'])


def query_topo_entities(conn, ts, query_options=None) -> List[TopoNode]:
    query_options = query_options or {}
    # map arango _key to entity_id for compatibility
    if '_key' in query_options and 'entity_id' not in query_options:
        query_options['entity_id'] = query_options.get('_key')
        query_options.pop('_key')
    where_clauses = ['ts = %s']
    params = [ts]
    for k, v in query_options.items():
        # only allow certain keys that exist in the observe_entities table
        if k not in ['entity_id', 'type', 'machine_id', 'level', 'ts']:
            continue
        where_clauses.append(f"{k} = %s")
        params.append(v)
    where = ' AND '.join(where_clauses)
    sql = f"SELECT entity_id, attrs, type, machine_id, ts FROM observe_entities WHERE {where}"
    # AGE is required for entity queries in inference. Validate AGE presence.
    use_age = infer_config.pg_conf.get('use_age', True)
    if not use_age:
        raise DBException('Apache AGE is required for topology queries.')
    if not is_age_installed(conn):
        raise DBException('Apache AGE extension is required but not installed on the Postgres server')

    # build cypher where clause safely
    wheres = [f"n.ts = {int(ts)}"]
    for k, v in query_options.items():
        if k not in ['entity_id', 'type', 'machine_id', 'level', 'ts']:
            continue
        val = adapt(v).getquoted().decode()
        wheres.append(f"n.{k} = {val}")
    where = ' AND '.join(wheres)
    cypher = f"MATCH (n:ObserveEntity) WHERE {where} RETURN DISTINCT n.entity_id as entity_id, n.attrs as attrs, n.type as type, n.machine_id as machine_id, n.ts as ts"
    try:
        rows = run_cypher(conn, 'topo', cypher)
    except Exception as ex:
        raise DBException(ex) from ex
    res = []
    for r in rows:
        res.append(TopoNode(
            id=r.get('entity_id'),
            entity_id=r.get('entity_id'),
            entity_type=r.get('type'),
            machine_id=r.get('machine_id'),
            timestamp=r.get('ts'),
            raw_data=r.get('attrs') or {}
        ))
    return res


def query_subgraph(conn, ts, start_entity_id, edge_collection, depth=1, query_options=None) \
        -> Tuple[Dict[str, TopoNode], Dict[str, TopoEdge]]:
    query_options = query_options or {}
        # Using AGE/Cypher only for traversals; SQL CTE removed
    # psycopg2 requires list for array parameters
    # AGE is required for subgraph queries and we will use cypher only
    use_age = infer_config.pg_conf.get('use_age', True)
    if not use_age:
        raise DBException('Apache AGE is required for subgraph queries.')
    if not is_age_installed(conn):
        raise DBException('Apache AGE extension is required but not installed on the Postgres server')

    # build cypher to get nodes reachable up to depth and matching relation types
    start_e = adapt(start_entity_id).getquoted().decode()
    type_list = ','.join([adapt(t).getquoted().decode() for t in edge_collection])
    cypher_nodes = f"MATCH (s:ObserveEntity {{entity_id: {start_e}, ts: {int(ts)}}}) MATCH p=(s)-[r:Relation*1..{int(depth)}]-(v) WHERE v.ts = {int(ts)} AND ALL(x IN relationships(p) WHERE x.type IN [{type_list}]) RETURN DISTINCT v.entity_id as entity_id, v.attrs as attrs, v.type as type, v.machine_id as machine_id, v.ts as ts"
    node_rows = run_cypher(conn, 'topo', cypher_nodes)
    ids = set()
    for r in node_rows:
        ids.add(r.get('entity_id'))
    # query edges among these nodes via cypher
    if len(ids) > 0:
        placeholders = ','.join([adapt(i).getquoted().decode() for i in ids])
        type_list2 = ','.join([adapt(t).getquoted().decode() for t in edge_collection])
        cypher_edges = f"MATCH (s:ObserveEntity {{entity_id: {start_e}, ts: {int(ts)}}}) MATCH p=(s)-[r:Relation*1..{int(depth)}]-(v) WHERE v.ts = {int(ts)} AND ALL(x IN relationships(p) WHERE x.type IN [{type_list2}]) UNWIND relationships(p) AS e RETURN DISTINCT startNode(e).entity_id as from_id, endNode(e).entity_id as to_id, e.type as etype"
        edge_rows = run_cypher(conn, 'topo', cypher_edges)
    else:
        edge_rows = []
    rows = []
    for er in edge_rows:
        rows.append({'from_id': er.get('from_id'), 'to_id': er.get('to_id'), 'etype': er.get('etype')})

    nodes = {}
    edges = {}
    ids = set()
    for row in rows:
        from_id = row.get('from_id')
        to_id = row.get('to_id')
        etype = row.get('etype')
        ids.add(from_id)
        ids.add(to_id)
        key = f"{from_id}->{to_id}:{etype}"
        edges.setdefault(key, TopoEdge(id=key, type=etype, from_id=from_id, to_id=to_id))

    # ensure start entity is included
    ids.add(start_entity_id)
    if len(ids) > 0:
        placeholders = ','.join([adapt(i).getquoted().decode() for i in ids])
        cy_nodes = f"MATCH (n:ObserveEntity) WHERE n.entity_id IN [{placeholders}] AND n.ts = {int(ts)} RETURN DISTINCT n.entity_id as entity_id, n.attrs as attrs, n.type as type, n.machine_id as machine_id, n.ts as ts"
        try:
            node_rows = run_cypher(conn, 'topo', cy_nodes)
        except Exception as ex:
            raise DBException(ex) from ex
        for r in node_rows:
            nid = r.get('entity_id')
            nodes.setdefault(nid, TopoNode(
                id=nid,
                entity_id=nid,
                entity_type=r.get('type'),
                machine_id=r.get('machine_id'),
                timestamp=r.get('ts'),
                raw_data=r.get('attrs') or {}
            ))

    return nodes, edges


def query_cross_host_edges_detail(conn, edge_type, ts) -> List[TopoEdge]:
    use_age = infer_config.pg_conf.get('use_age', True)
    if not use_age:
        raise DBException('Apache AGE is required for cross-host edge queries.')
    if not is_age_installed(conn):
        raise DBException('Apache AGE extension is required but not installed on the Postgres server')
    # construct parameterized EDGE-SELECT cypher
    edge_type_safe = adapt(edge_type).getquoted().decode()
    cypher = f"MATCH (f:ObserveEntity)-[e:Relation]->(t:ObserveEntity) WHERE e.ts = {int(ts)} AND f.machine_id != t.machine_id AND e.type = {edge_type_safe} RETURN DISTINCT startNode(e).entity_id as from_id, endNode(e).entity_id as to_id, e.type as type, f.attrs as f_attrs, f.machine_id as f_machine, t.attrs as t_attrs, t.machine_id as t_machine"
    try:
        rows = run_cypher(conn, 'topo', cypher)
    except Exception as ex:
        raise DBException(ex) from ex
    except Exception as ex:
        raise DBException(ex) from ex
    res = []
    for r in rows:
        edge = TopoEdge(
            id=str(r.get('id')),
            type=r.get('type'),
            from_id=r.get('from_id'),
            to_id=r.get('to_id')
        )
        # add node info
        from_node = TopoNode(
            id=edge.from_id,
            entity_id=edge.from_id,
            entity_type=None,
            machine_id=r.get('f_machine'),
            timestamp=r.get('ts'),
            raw_data=r.get('f_attrs') or {}
        )
        to_node = TopoNode(
            id=edge.to_id,
            entity_id=edge.to_id,
            entity_type=None,
            machine_id=r.get('t_machine'),
            timestamp=r.get('ts'),
            raw_data=r.get('t_attrs') or {}
        )
        edge.from_node = from_node
        edge.to_node = to_node
        res.append(edge)
    return res
