from typing import List
import json

import psycopg2
import psycopg2.extras
from psycopg2.extensions import adapt
from psycopg2.pool import ThreadedConnectionPool
from contextlib import contextmanager

from spider.util import logger
from spider.dao import BaseDao
from spider.dao import ObserveEntityDao
from spider.dao import RelationDao
from spider.entity_mgt import ObserveEntity
from spider.entity_mgt import Relation
from spider.exceptions import StorageConnectionException

_TIMESTAMP_TABLE_NAME = 'timestamps'
_OBSERVE_ENTITY_TABLE_NAME = 'observe_entities'
_EDGE_TABLE_NAME_PREFIX = 'edges'


def _get_edge_table_name(edge_type, ts):
    return '{}_{}'.format(edge_type, ts)


class PgBaseDaoImpl(BaseDao):
    def __init__(self, db_conf):
        self.dsn = db_conf.get('url')
        self.conn = None
        self.use_age = db_conf.get('use_age', False)
        self.pool = None
        self.minconn = db_conf.get('minconn', 1)
        self.maxconn = db_conf.get('maxconn', 5)
        self.init_connection()

    def init_connection(self):
        if self.conn is None:
            try:
                self.conn = psycopg2.connect(self.dsn)
                self.conn.autocommit = True
            except Exception as ex:
                raise StorageConnectionException(ex)
        # init pool if asked
        if self.minconn and self.maxconn and self.pool is None:
            try:
                self.pool = ThreadedConnectionPool(self.minconn, self.maxconn, dsn=self.dsn)
            except Exception as ex:
                logger.logger.warning('Failed to init PG pool: {}'.format(ex))
        # create base tables / indexes if not exists
        with self.conn.cursor() as cur:
            try:
                cur.execute(f"CREATE TABLE IF NOT EXISTS { _TIMESTAMP_TABLE_NAME } (ts bigint PRIMARY KEY)")
                cur.execute(f"""
                CREATE TABLE IF NOT EXISTS { _OBSERVE_ENTITY_TABLE_NAME } (
                    entity_id text,
                    ts bigint,
                    type text,
                    level text,
                    machine_id text,
                    attrs jsonb,
                    PRIMARY KEY (entity_id, ts)
                ) PARTITION BY RANGE (ts)
                """)
                cur.execute("""
                CREATE TABLE IF NOT EXISTS edges (
                    id bigserial PRIMARY KEY,
                    from_id text,
                    to_id text,
                    type text,
                    layer text,
                    ts bigint,
                    attrs jsonb
                )
                """)
                # add indexes useful for queries
                cur.execute(f"CREATE INDEX IF NOT EXISTS idx_obs_ts ON { _OBSERVE_ENTITY_TABLE_NAME } (ts)")
                cur.execute(f"CREATE INDEX IF NOT EXISTS idx_obs_type_ts ON { _OBSERVE_ENTITY_TABLE_NAME } (type, ts)")
                cur.execute(f"CREATE INDEX IF NOT EXISTS idx_obs_machine_ts ON { _OBSERVE_ENTITY_TABLE_NAME } (machine_id, ts)")
                cur.execute(f"CREATE INDEX IF NOT EXISTS idx_obs_entity_id ON { _OBSERVE_ENTITY_TABLE_NAME } (entity_id)")
                cur.execute(f"CREATE INDEX IF NOT EXISTS idx_obs_attrs_gin ON { _OBSERVE_ENTITY_TABLE_NAME } USING GIN (attrs)")
                cur.execute("CREATE INDEX IF NOT EXISTS idx_edges_ts ON edges (ts)")
                cur.execute("CREATE INDEX IF NOT EXISTS idx_edges_from_ts ON edges (from_id, ts)")
                cur.execute("CREATE INDEX IF NOT EXISTS idx_edges_to_ts ON edges (to_id, ts)")
                cur.execute("CREATE INDEX IF NOT EXISTS idx_edges_type_ts ON edges (type, ts)")
                cur.execute("CREATE INDEX IF NOT EXISTS idx_edges_from_to_type_ts ON edges (from_id, to_id, type, ts)")
                cur.execute("CREATE INDEX IF NOT EXISTS idx_edges_to_type_ts ON edges (to_id, type, ts)")
            except Exception as ex:
                logger.logger.error('Failed to create tables/indexes: {}'.format(ex))
        # optionally attempt to enable AGE and create a graph named 'topo' if possible
        try:
            with self.conn.cursor() as cur:
                # check if AGE extension exists
                try:
                    cur.execute("SELECT count(*) FROM pg_extension WHERE extname = 'age'")
                    cnt = cur.fetchone()[0]
                except Exception:
                    cnt = 0
                if cnt == 0:
                    # attempt to create extension if possible
                    try:
                        cur.execute("CREATE EXTENSION IF NOT EXISTS age")
                        # create graph
                        try:
                            cur.execute("SELECT create_graph('topo')")
                        except Exception:
                            # graph may already exist
                            pass
                    except Exception as ex:
                        logger.logger.debug('AGE extension not available or cannot be created: {}'.format(ex))
                        if self.use_age:
                            raise StorageConnectionException('AGE extension required but not available: {}'.format(ex))

    def _add_timestamp(self, ts_sec) -> bool:
        if self.pool:
            conn = self.pool.getconn()
            try:
                cur = conn.cursor()
            except Exception as ex:
                logger.logger.error(ex)
                return False
        else:
            cur = self.conn.cursor()
        with cur:
            try:
                cur.execute(f"CREATE TABLE IF NOT EXISTS { _TIMESTAMP_TABLE_NAME } (ts bigint PRIMARY KEY)")
                cur.execute(f"INSERT INTO { _TIMESTAMP_TABLE_NAME } (ts) VALUES (%s) ON CONFLICT DO NOTHING", (ts_sec,))
            except Exception as ex:
                logger.logger.error(ex)
                return False
        if self.pool:
            self.pool.putconn(conn)
        return True


class PgObserveEntityDaoImpl(PgBaseDaoImpl, ObserveEntityDao):
    def __init__(self, db_conf):
        super().__init__(db_conf)

    def add_all(self, ts_sec, observe_entities: List[ObserveEntity]) -> bool:
        if not observe_entities:
            return True

        if not self._add_timestamp(ts_sec):
            return False

        # create table if not exists
        with self.conn.cursor() as cur:
            try:
                cur.execute(f"CREATE TABLE IF NOT EXISTS { _OBSERVE_ENTITY_TABLE_NAME } (
                    entity_id text,
                    ts bigint,
                    type text,
                    level text,
                    machine_id text,
                    attrs jsonb,
                    PRIMARY KEY (entity_id, ts)
                )")
                # create partition for the day containing ts_sec
                from datetime import datetime, timezone, timedelta
                day = datetime.fromtimestamp(ts_sec, tz=timezone.utc).strftime('%Y%m%d')
                # compute start and end ts for that day
                dt = datetime.fromtimestamp(ts_sec, tz=timezone.utc)
                start_ts = int(datetime(dt.year, dt.month, dt.day, tzinfo=timezone.utc).timestamp())
                end_ts = start_ts + 86400
                part_name = f"{ _OBSERVE_ENTITY_TABLE_NAME }_{day}"
                try:
                    cur.execute(f"CREATE TABLE IF NOT EXISTS {part_name} PARTITION OF { _OBSERVE_ENTITY_TABLE_NAME } FOR VALUES FROM ({start_ts}) TO ({end_ts})")
                except Exception:
                    # ignore partition creation errors
                    pass
            except Exception as ex:
                logger.logger.error(ex)
                return False

        rows = []
        for e in observe_entities:
            attrs = e.attrs if hasattr(e, 'attrs') else {}
            machine_id = attrs.get('machine_id') if attrs else None
            rows.append((e.id, ts_sec, e.type, e.level, machine_id, json.dumps(attrs)))

        if self.pool:
            conn = self.pool.getconn()
            cur = conn.cursor()
        else:
            cur = self.conn.cursor()
            try:
                psycopg2.extras.execute_values(cur,
                                               f"INSERT INTO { _OBSERVE_ENTITY_TABLE_NAME } (entity_id, ts, type, level, machine_id, attrs) VALUES %s ON CONFLICT (entity_id, ts) DO UPDATE SET attrs = EXCLUDED.attrs",
                                               rows,
                                               template=None,
                                               page_size=100)
            except Exception as ex:
                logger.logger.error(ex)
                return False

        # optionally insert into AGE graph if installed
        try:
            with self.conn.cursor() as cur:
                cur.execute("SELECT count(*) FROM pg_extension WHERE extname = 'age'")
                cnt = cur.fetchone()[0]
                if cnt > 0:
                    # create graph if not exists
                    try:
                        cur.execute("SELECT create_graph('topo')")
                    except Exception:
                        pass
                    # MERGE nodes in cypher
                    for e in observe_entities:
                        attrs_json = json.dumps(e.attrs or {})
                        machine_id_val = (e.attrs.get('machine_id') if e.attrs else '')
                        eid_q = adapt(e.id).getquoted().decode()
                        attrs_q = adapt(attrs_json).getquoted().decode()
                        type_q = adapt(e.type).getquoted().decode()
                        level_q = adapt(e.level).getquoted().decode()
                        machine_q = adapt(machine_id_val).getquoted().decode()
                        cy = f"MERGE (n:ObserveEntity {{entity_id: {eid_q}, ts: {int(ts_sec)}}}) SET n.type = {type_q}, n.level = {level_q}, n.machine_id = {machine_q}, n.attrs = {attrs_q}"
                        try:
                            cur.execute("SELECT * FROM cypher(%s, %s)", ('topo', cy))
                        except Exception:
                            pass
        except Exception:
            pass

        logger.logger.debug('Total {} documents created.'.format(len(rows)))
        if self.pool:
            conn.commit()
            self.pool.putconn(conn)
        return True


class PgRelationDaoImpl(PgBaseDaoImpl, RelationDao):
    def __init__(self, db_conf):
        super().__init__(db_conf)

    def add_all(self, ts_sec, relations: List[Relation]) -> bool:
        if not relations:
            return True

        if not self._add_timestamp(ts_sec):
            return False

        # use edge table per relation type or a generic edges table
        # create a common edges table
        with self.conn.cursor() as cur:
            try:
                cur.execute(f"CREATE TABLE IF NOT EXISTS edges (
                    id bigserial PRIMARY KEY,
                    from_id text,
                    to_id text,
                    type text,
                    layer text,
                    ts bigint,
                    attrs jsonb
                )")
            except Exception as ex:
                logger.logger.error(ex)
                return False

        # batch inserts
        rows = []
        for r in relations:
            attrs = {}
            rows.append((r.sub_entity.id, r.obj_entity.id, r.type, r.layer, ts_sec, json.dumps(attrs)))

        if self.pool:
            conn = self.pool.getconn()
            cur = conn.cursor()
        else:
            cur = self.conn.cursor()
            try:
                psycopg2.extras.execute_values(cur,
                                               "INSERT INTO edges (from_id, to_id, type, layer, ts, attrs) VALUES %s",
                                               rows,
                                               template=None,
                                               page_size=100)
            except Exception as ex:
                logger.logger.error(ex)
                return False

        # optionally insert into AGE graph
        try:
            with self.conn.cursor() as cur:
                cur.execute("SELECT count(*) FROM pg_extension WHERE extname = 'age'")
                cnt = cur.fetchone()[0]
                if cnt > 0:
                    for r in relations:
                        sid_q = adapt(r.sub_entity.id).getquoted().decode()
                        oid_q = adapt(r.obj_entity.id).getquoted().decode()
                        type_q = adapt(r.type).getquoted().decode()
                        # use generic Relationship label 'Relation' and a property 'type' instead of dynamic label names for safety
                        cy = f"MATCH (a {{entity_id: {sid_q}, ts: {int(ts_sec)}}}), (b {{entity_id: {oid_q}, ts: {int(ts_sec)}}}) MERGE (a)-[e:Relation {{ts: {int(ts_sec)}, type: {type_q}}}]->(b)"
                        try:
                            cur.execute("SELECT * FROM cypher(%s, %s)", ('topo', cy))
                        except Exception:
                            pass
        except Exception:
            pass

        logger.logger.debug('Total {} edges created.'.format(len(rows)))
        if self.pool:
            conn.commit()
            self.pool.putconn(conn)
        return True
