from typing import Any, Optional, Dict
import time
from tenacity import retry, stop_after_attempt, wait_exponential, retry_if_exception_type
import pymysql
from intelli_port.commons.config import load_settings
from intelli_port.commons.logging import json_logger
from intelli_port.commons.observability import db_query_latency_ms, db_errors_total, db_slow_queries_total, db_queries_total, vector_op_latency_ms, vector_errors_total, graph_op_latency_ms, graph_errors_total
from intelli_port.data_clients.redis import get_redis as _get_redis
from intelli_port.data_clients.milvus import get_milvus as _get_milvus
from intelli_port.data_clients.neo4j import get_neo4j as _get_neo4j
from intelli_port.data_clients.mysql import get_mysql as _get_mysql, get_mysql_read as _get_mysql_read


def mysql_client() -> Any:
    s = load_settings()
    return s.mysql_uri


def redis_client():
    return _get_redis()


def milvus_client():
    return _get_milvus()


def neo4j_client():
    return _get_neo4j()


def mysql_connect() -> Optional[pymysql.connections.Connection]:
    pool = _get_mysql()
    if not pool:
        return None
    return pool.acquire()


def mysql_connect_read() -> Optional[pymysql.connections.Connection]:
    pool = _get_mysql_read()
    if pool:
        return pool.acquire()
    return mysql_connect()


@retry(reraise=False, stop=stop_after_attempt(int(getattr(load_settings(), "http_retry_attempts", 3))), wait=wait_exponential(multiplier=0.2, min=0.2, max=float(getattr(load_settings(), "http_retry_backoff_max", 2.0))), retry=retry_if_exception_type((pymysql.err.OperationalError, pymysql.err.InterfaceError)))
def mysql_execute_write(query: str, params: Optional[Any] = None, fetch: str = "all") -> Dict[str, Any]:
    conn = None
    try:
        t0 = time.time()
        conn = mysql_connect()
        if conn is None:
            return {"rows": [], "affected": 0}
        cur = conn.cursor()
        try:
            s = load_settings()
            max_ms = int(getattr(s, "mysql_max_exec_time_ms", 0))
            if max_ms > 0:
                cur.execute(f"SET SESSION MAX_EXECUTION_TIME = {max_ms}")
        except Exception:
            pass
        cur.execute(query, params or ())
        affected = cur.rowcount
        rows = []
        if fetch == "one":
            r = cur.fetchone()
            rows = [r] if r else []
        elif fetch == "all":
            rows = cur.fetchall()
        conn.commit()
        cur.close()
        elapsed_ms = max(0, (time.time() - t0) * 1000)
        db_query_latency_ms.labels(name="mysql").observe(elapsed_ms)
        db_queries_total.labels(name="mysql").inc()
        try:
            s = load_settings()
            if elapsed_ms > int(getattr(s, "mysql_slow_threshold_ms", 500)):
                db_slow_queries_total.labels(name="mysql").inc()
                stmt_snippet = (query or "")[:120]
                json_logger().info("db.slow_query", extra={"service": "api", "latency_ms": round(elapsed_ms, 2), "statement": stmt_snippet})
        except Exception:
            pass
        _get_mysql().release(conn)
        return {"rows": rows, "affected": affected}
    except Exception as e:
        db_errors_total.labels(name="mysql", type=e.__class__.__name__).inc()
        db_queries_total.labels(name="mysql").inc()
        try:
            if conn:
                _get_mysql().release(conn)
        except Exception:
            pass
        return {"rows": [], "affected": 0}


@retry(reraise=False, stop=stop_after_attempt(int(getattr(load_settings(), "http_retry_attempts", 3))), wait=wait_exponential(multiplier=0.2, min=0.2, max=float(getattr(load_settings(), "http_retry_backoff_max", 2.0))), retry=retry_if_exception_type((pymysql.err.OperationalError, pymysql.err.InterfaceError)))
def mysql_execute_read(query: str, params: Optional[Any] = None, fetch: str = "all") -> Dict[str, Any]:
    conn = None
    try:
        t0 = time.time()
        conn = mysql_connect_read()
        if conn is None:
            return {"rows": [], "affected": 0}
        cur = conn.cursor()
        cur.execute(query, params or ())
        affected = cur.rowcount
        rows = []
        if fetch == "one":
            r = cur.fetchone()
            rows = [r] if r else []
        elif fetch == "all":
            rows = cur.fetchall()
        cur.close()
        elapsed_ms = max(0, (time.time() - t0) * 1000)
        db_query_latency_ms.labels(name="mysql").observe(elapsed_ms)
        db_queries_total.labels(name="mysql").inc()
        try:
            s = load_settings()
            if elapsed_ms > int(getattr(s, "mysql_slow_threshold_ms", 500)):
                db_slow_queries_total.labels(name="mysql").inc()
                stmt_snippet = (query or "")[:120]
                json_logger().info("db.slow_query", extra={"service": "api", "latency_ms": round(elapsed_ms, 2), "statement": stmt_snippet})
        except Exception:
            pass
        pool = _get_mysql_read() or _get_mysql()
        pool.release(conn)
        return {"rows": rows, "affected": affected}
    except Exception as e:
        db_errors_total.labels(name="mysql", type=e.__class__.__name__).inc()
        db_queries_total.labels(name="mysql").inc()
        try:
            if conn:
                pool = _get_mysql_read() or _get_mysql()
                pool.release(conn)
        except Exception:
            pass
        return {"rows": [], "affected": 0}


class mysql_transaction:
    def __init__(self, isolation: str | None = None, read_only: bool = False):
        self.conn = None
        self.cur = None
        self.isolation = isolation
        self.read_only = read_only

    def __enter__(self):
        self.conn = mysql_connect()
        if not self.conn:
            return None
        self.cur = self.conn.cursor()
        try:
            if self.isolation:
                self.cur.execute(f"SET SESSION TRANSACTION ISOLATION LEVEL {self.isolation}")
            if self.read_only:
                self.cur.execute("SET SESSION TRANSACTION READ ONLY")
            s = load_settings()
            max_ms = int(getattr(s, "mysql_max_exec_time_ms", 0))
            if max_ms > 0:
                self.cur.execute(f"SET SESSION MAX_EXECUTION_TIME = {max_ms}")
        except Exception:
            pass
        return self.cur

    def __exit__(self, exc_type, exc, tb):
        try:
            if self.cur:
                if exc_type is None:
                    self.conn.commit()
                else:
                    self.conn.rollback()
                self.cur.close()
        except Exception:
            pass
        try:
            if self.conn:
                _get_mysql().release(self.conn)
        except Exception:
            pass


def milvus_exec(op: str, **kwargs) -> Any:
    client = _get_milvus()
    if not client:
        return None
    t0 = time.time()
    try:
        fn = getattr(client, op)
        res = fn(**kwargs)
        vector_op_latency_ms.labels(name="milvus", op=op).observe(max(0, (time.time() - t0) * 1000))
        return res
    except Exception as e:
        vector_errors_total.labels(name="milvus", op=op, type=e.__class__.__name__).inc()
        return None


def neo4j_exec(query: str, params: Optional[Dict[str, Any]] = None) -> Any:
    driver = _get_neo4j()
    if not driver:
        return None
    t0 = time.time()
    try:
        with driver.session() as session:
            res = session.run(query, params or {})
            data = res.data()
        graph_op_latency_ms.labels(name="neo4j", op="run").observe(max(0, (time.time() - t0) * 1000))
        return data
    except Exception as e:
        graph_errors_total.labels(name="neo4j", op="run", type=e.__class__.__name__).inc()
        return None