"""
This module implements the new caching functionality, inserting it dynamically
into the existing django.db.models.query.QuerySet class.
"""
__all__ = ['CachingQuerySet']

from django.core.cache import cache
from django.db import backend, connection
from django.db.models.query import QuerySet, GET_ITERATOR_CHUNK_SIZE
from django.db.models.manager import Manager
import qshash
import registry
import replacements

# Python 2.3 compatibility
if not hasattr(__builtins__, 'set'):
    from sets import Set as set

######################
# NEW QUERYSET CLASS #
######################

class CachingQuerySet(QuerySet):
    """
    This QuerySet contains new integrated caching functionality.
    """

    has_set_cache = property(lambda s: bool(s._qscache.queryset_key))
    is_cached = property(lambda s: s._qscache.queryset_key in cache)

    def invalidate(self):
        """Delete the cached set."""
        assert self.has_set_cache
        cache.delete(self._qscache.queryset_key)

    def iterator(self):
        """Loop through the cached items."""
        # Do we already have a cached set?
        if self.has_set_cache and self.is_cached:
            # TODO: Wow, iterating over this is a bad idea
            for row in cache.get(self._qscache.queryset_key):
                yield row
            raise StopIteration

        if self._qscache.individual:
            # cache() is used (and possibly still cache_set())
            pks = self.pk_iterator()
            misses = []

            type_id = registry.get_model_id(self.model)
            key_format = self._qscache.format

            # This currently stores all the rows in memory rather than
            # iterating; TODO: Use GET_ITERATOR_CHUNK_SIZE
            misses = {}
            result_set = []
            # See which pks are already cached, and track misses
            for ctr, pk in enumerate(pks):
                row = cache.get(key_format % (type_id, pk))
                result_set.append(row)
                if not row:
                    # TODO: Save iterator position rather than ctr
                    misses[pk] = ctr
            # Retrieve the misses
            if misses:
                missed = self.get_cache_misses(misses)
                # This is not ideal; use an ordered dict for misses, and
                # something other than a list for result_set?
                for miss_obj in missed:
                    # Put the misses back in the result set and cache
                    pk = miss_obj._get_pk_val()
                    result_set[misses[pk]] = miss_obj
                    cache.set(key_format % (type_id, pk), miss_obj)
            # Is cache_set() also in effect?
            if self.has_set_cache:
                cache.set(self._qscache.queryset_key, result_set)
            # Finally, loop through the results.
            for result in result_set:
                yield result
        elif self.has_set_cache:
            # Cache the result of this normal database lookup
            set_cache = list(super(CachingQuerySet, self).iterator())
            cache.set(self._qscache.queryset_key, set_cache)
            # And yield the values as usual
            for row in set_cache:
                yield row
        else:
            # Normal behavior
            for row in super(CachingQuerySet, self).iterator():
                yield row

    def pk_iterator(self):
        select, sql, params = self._get_sql_clause()
        meta = self.model._meta

        # We're only interested in the primary key value right now
        pk_field = '%s.%s' % (backend.quote_name(meta.db_table),
                backend.quote_name(meta.pk.name))

        # Perform the database query
        cursor = connection.cursor()
        cursor.execute("SELECT " + pk_field + sql, params)
        while True:
            rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)
            if not rows:
                raise StopIteration
            for row in rows:
                yield row[0] # return the pk

    def get_cache_misses(self, ids):
        """Similar to in_bulk()."""

        # XXX: Temporary restrictions (should be moved elsewhere!!)
        h = hash(self)
        assert not h & qshash.DISTINCT, "SQL Distinct not accounted for yet."
        assert not h & qshash.SELECT_EXTRA, "extra(select) not supported yet."
        assert not h & qshash.SELECT_RELATED, "Relations not yet supported."

        select, sql, params = self._get_sql_clause()
        meta = self.model._meta

        # Prepare for the reconstruction loop
        type_id = registry.get_model_id(self.model)
        key_format = self._qscache.format
        columns = ['%s.%s' % (backend.quote_name(meta.db_table),
                backend.quote_name(f.column)) for f in meta.fields]
        select_all = 'SELECT %s FROM %s' % (','.join(columns), meta.db_table)
        select_bulk = select_all + ' WHERE ' + pk_field + ' IN (%s)'

        # Perform the database query
        cursor = connection.cursor()
        cursor.execute("SELECT " + pk_field + sql, params)

        while True:
            rows = cursor.fetchmany(GET_ITERATOR_CHUNK_SIZE)
            if not rows:
                raise StopIteration
            for row in rows:
                # Okay, so is this row already cached?
                row_id = row[0]
                cache_key = key_format % (type_id, row_id)
                row = cache.get(cache_key)
                if row is None:
                    # Missed; reconstruct this row
                    cursor.execute(select_single, [row_id])
                    row = self.model(*cursor.fetchone())
                    # TODO: select(extra) stuff should go here
                    cache.set(cache_key, row, self._qscache.timeout)
                yield row

########################
# NEW QUERYSET METHODS #
########################

def cache_(self, timeout=None, prefix=None): # TODO: "smart" arg
    """
    Causes individual model objects to be automatically cached.
    """
    qscache = self._qscache.clone()
    # Set the requested options
    if prefix is not None: # TODO: Allow specifying the actual format instead?
        qscache.format = prefix + '%d,%d'
    if timeout is not None:
        qscache.timeout = timeout
    # Create the new QuerySet
    return self._clone(klass=CachingQuerySet, _qscache=qscache,
            _hash=self._hash | qshash.CACHE)

def cache_related(self, fields, timeout=None, prefix=None):
    """
    Causes model objects in the specified relation(s) to be cached.

    fields can be a field name or a list of field names.
    """
    qscache = self._qscache.clone()
    # Add the new fields to the existing set
    if isinstance(fields, basestring):
        qscache.relations.add(fields)
    elif hasattr(fields, '__iter__'):
        qscache.relations.update(fields)
    else:
        assert False, "Invalid fields argument for cache_related()!"
    # Deal with options
    if prefix is not None:
        qscache.format = prefix + '%d,%d' # TODO: See above in cache()
    if timeout is not None:
        qscache.timeout = timeout
    # Create the new QuerySet
    return self._clone(klass=CachingQuerySet, _qscache=qscache,
            _hash=self._hash | qshash.CACHE_RELATION)

# XXX: Rename? It's not actually a "set" per se -- but it caches QuerySets...
def cache_set(self, cache_key, timeout=None): # TODO: smart, depth args
    """
    Similar to caching the resulting data from a QuerySet.

    Accepts the cache_key under which the data will be stored.
    """
    qscache = self._qscache.clone()
    # Set options
    qscache.queryset_key = cache_key
    if timeout is not None:
        qscache.timeout = timeout
    # Register the set for updates
    registry.register_set(self, cache_key)
    # Create new QuerySet
    return self._clone(klass=CachingQuerySet, _qscache=qscache,
            _hash=self._hash | qshash.CACHE)

########################
# NEW METHOD INJECTION #
########################

QuerySet.cache = cache_
QuerySet.cache_related = cache_related
QuerySet.cache_set = cache_set

# Add a few Manager proxy methods too.
def proxy_creator(method):
    return lambda s, *a, **kw: getattr(s.get_query_set(), method)(*a, **kw)
Manager.cache = proxy_creator('cache')
Manager.cache_related = proxy_creator('cache_related')
Manager.cache_set = proxy_creator('cache_set')

