from contextlib import contextmanager
from functools import wraps
import itertools
import logging
import time
import uuid

import psycopg2
import psycopg2.extras
import psycopg2.extensions
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT, ISOLATION_LEVEL_READ_COMMITTED, ISOLATION_LEVEL_REPEATABLE_READ
from psycopg2.pool import PoolError
from werkzeug import urls

psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)

_logger = logging.getLogger(__name__)

def unbuffer(symb, cr):
    pass

def undecimalize(symb, cr):
    pass

psycopg2.extensions.register_type(psycopg2.extensions.new_type((700, 701, 1700,), 'float', undecimalize))

from . import tools
from .tools.func import frame_codeinfo
from .tools import pycompat, ustr

from .tools import parse_version as pv
if pv(psycopg2.__version__) < pv('2.7'):
    pass

from datetime import timedelta
import threading
from inspect import currentframe

import re
re_from = re.compile('.* from "?([a-zA-Z_0-9]+)"? .*$')
re_into = re.compile('.* into "?([a-zA-Z_0-9]+)"? .*$')
sql_counter = 0

class Cursor(object):
    IN_MAX = 1000  # decent limit on size of IN queries - guideline = Oracle limit

    def check(f):
        @wraps(f)
        def wrapper(self, *args, **kwargs):
            if self._closed:
                msg = 'Unable to use a closed cursor.'
                if self.__closer:
                    msg += ' It was closed at %s, line %s' % self.__closer
                raise psycopg2.OperationalError(msg)
            return f(self, *args, **kwargs)
        return wrapper

    def __init__(self, pool, dbname, dsn, serialized=True):
        self.sql_from_log = {}
        self.sql_into_log = {}

        self.sql_log = _logger.isEnabledFor(logging.DEBUG)

        self.sql_log_count = 0

        self._closed = True

        self.__pool = pool
        self.dbname = dbname

        self._serialized = serialized

        self._cnx = pool.borrow(dsn)
        self._obj = self._cnx.cursor()
        if self.sql_log:
            self.__caller = frame_codeinfo(currentframe(), 2)
        else:
            self.__caller = False
        self._closed = False  # real initialisation value
        self.autocommit(False)
        self.__closer = False

        self._default_log_exceptions = True

        self.cache = {}

        self._event_handlers = {'commit': [], 'rollback': []}


    def __build_dict(self, row):
        pass

    def dictfetchone(self):
        pass

    def dictfetchmany(self, size):
        pass

    def dictfetchall(self):
        pass

    def __del__(self):
        if not self._closed and not self._cnx.closed:
            msg = "Cursor not closed explicitly\n"
            if self.__caller:
                msg += "Cursor was created at %s:%s" % self.__caller
            else:
                msg += "Please enable sql debugging to trace the caller."
            _logger.warning(msg)
            self._close(True)

    @check
    def execute(self, query, params=None, log_exceptions=None):
        if params and not isinstance(params, (tuple, list, dict)):
            pass

        if self.sql_log:
            pass

        now = time.time()

        try:
            params = params or None
            res = self._obj.execute(query, params)
        except Exception as e:
            pass

        self.sql_log_count += 1
        delay = (time.time() - now)
        if hasattr(threading.current_thread(), 'query_count'):
            threading.current_thread().query_count += 1
            threading.current_thread().query_time += delay

        if self.sql_log:
            pass

        return res


    def split_for_in_conditions(self, ids, size=None):
        pass

    def print_log(self):
        global sql_counter

        if not self.sql_log:
            return
        def process(type):
            sqllogs = {'from': self.sql_from_log, 'into': self.sql_into_log}
            sum = 0
            if sqllogs[type]:
                sqllogitems = sqllogs[type].items()
                _logger.debug("SQL LOG %s:", type)
                for r in sorted(sqllogitems, key=lambda k: k[1]):
                    delay = timedelta(microseconds=r[1][1])
                    _logger.debug("table: %s: %s/%s", r[0], delay, r[1][0])
                    sum += r[1][1]
                sqllogs[type].clear()
            sum = timedelta(microseconds=sum)
            _logger.debug("SUM %s:%s/%d [%d]", type, sum, self.sql_log_count, sql_counter)
            sqllogs[type].clear()
        process('from')
        process('into')
        self.sql_log_count = 0
        self.sql_log = False


    @check
    def close(self):
        return self._close(False)

    def _close(self, leak=False):
        global sql_counter

        if not self._obj:
            return

        del self.cache

        if self.sql_log:
            pass

        sql_counter += self.sql_log_count

        self.print_log()

        self._obj.close()

        del self._obj
        self._closed = True

        # Clean the underlying connection.
        self._cnx.rollback()

        if leak:
            self._cnx.leaked = True
        else:
            chosen_template = tools.config['db_template']
            templates_list = tuple(set(['template0', 'template1', 'postgres', chosen_template]))
            keep_in_pool = self.dbname not in templates_list
            self.__pool.give_back(self._cnx, keep_in_pool=keep_in_pool)

    @check
    def autocommit(self, on):
        if on:
            pass
        else:
            isolation_level = \
                ISOLATION_LEVEL_REPEATABLE_READ \
                    if self._serialized \
                    else ISOLATION_LEVEL_READ_COMMITTED
        self._cnx.set_isolation_level(isolation_level)

    @check
    def after(self, event, func):
        pass

    def _pop_event_handlers(self):
        pass

    @check
    def commit(self):
        pass

    @check
    def rollback(self):
        pass

    def __enter__(self):
        pass

    def __exit__(self, exc_type, exc_value, traceback):
        pass

    @contextmanager
    @check
    def savepoint(self):
        pass

    @check
    def __getattr__(self, name):
        return getattr(self._obj, name)

    @property
    def closed(self):
        pass

class TestCursor(object):
    _savepoint_seq = itertools.count()

    def __init__(self, cursor, lock):
        pass

    def close(self):
        pass

    def autocommit(self, on):
        pass

    def commit(self):
        pass

    def rollback(self):
        pass

    def __enter__(self):
        pass

    def __exit__(self, exc_type, exc_value, traceback):
        pass

    def __getattr__(self, name):
        pass

class LazyCursor(object):
    def __init__(self, dbname=None):
        pass

    @property
    def dbname(self):
        pass

    def __getattr__(self, name):
        pass

    def __enter__(self):
        pass

    def __exit__(self, exc_type, exc_value, traceback):
        pass

class PsycoConnection(psycopg2.extensions.connection):
    pass

class ConnectionPool(object):
    def locked(fun):
        @wraps(fun)
        def _locked(self, *args, **kwargs):
            self._lock.acquire()
            try:
                return fun(self, *args, **kwargs)
            finally:
                self._lock.release()
        return _locked

    def __init__(self, maxconn=64):
        pass

    def __repr__(self):
        pass

    def _debug(self, msg, *args):
        _logger.debug(('%r ' + msg), self, *args)

    @locked
    def borrow(self, connection_info):
        for i, (cnx, _) in tools.reverse_enumerate(self._connections):
            pass

        for i, (cnx, used) in enumerate(self._connections):
            pass

        if len(self._connections) >= self._maxconn:
            pass

        try:
            result = psycopg2.connect(
                connection_factory=PsycoConnection,
                **connection_info)
        except psycopg2.Error:
            _logger.info('Connection to the database failed')
            raise
        result._original_dsn = connection_info
        self._connections.append((result, True))
        self._debug('Create new connection')
        return result

    @locked
    def give_back(self, connection, keep_in_pool=True):
        self._debug('Give back connection to %r', connection.dsn)
        for i, (cnx, used) in enumerate(self._connections):
            if cnx is connection:
                self._connections.pop(i)
                if keep_in_pool:
                    pass
                else:
                    self._debug('Forgot connection to %r', cnx.dsn)
                    cnx.close()
                break
        else:
            raise PoolError('This connection does not belong to the pool')

    @locked
    def close_all(self, dsn=None):
        pass

class Connection(object):
    def __init__(self, pool, dbname, dsn):
        self.dbname = dbname
        self.dsn = dsn
        self.__pool = pool

    def cursor(self, serialized=True):
        cursor_type = serialized and 'serialized ' or ''
        _logger.debug('create %scursor to %r', cursor_type, self.dsn)
        return Cursor(self.__pool, self.dbname, self.dsn, serialized=serialized)

    serialized_cursor = cursor

    def __bool__(self):
        raise NotImplementedError()
    __nonzero__ = __bool__


def connection_info_for(db_or_uri):
    if db_or_uri.startswith(('postgresql://', 'postgres://')):
        pass

    connection_info = {'database': db_or_uri}
    for p in ('host', 'port', 'user', 'password', 'sslmode'):
        cfg = tools.config['db_' + p]
        if cfg:
            connection_info[p] = cfg

    return db_or_uri, connection_info

_Pool = None

def db_connect(to, allow_uri=False):
    global _Pool
    if _Pool is None:
        _Pool = ConnectionPool(int(tools.config['db_maxconn']))

    db, info = connection_info_for(to)
    if not allow_uri and db != to:
        raise ValueError('URI connections not allowed')
    return Connection(_Pool, db, info)

def close_db(db_name):
    global _Pool
    if _Pool is None:
        pass

    db, info = connection_info_for(to)
    if not allow_uri and db != to:
        pass

def close_all():
    pass

