#!/usr/bin/env python
#
# Copyright 2010 Sebible Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A hack against django.db.models.query to build a working QuerySet class for django_ldap"""


from django.db.models.query import QuerySet as DjangoQuerySet, ValuesQuerySet as DjangoValuesQuerySet, ValuesListQuerySet as DjangoValuesListQuerySet
from django.db.models import signals, sql
from django.db.models.sql.where import EverythingNode, NothingNode, AND, OR
from django.db.models.sql.constants import *
from django.db.models.sql.datastructures import EmptyResultSet, FullResultSet, MultiJoin
from django.db.models.fields import Field as FieldBase
from django.db.models.query_utils import QueryWrapper
from django.utils import tree
from django.conf import settings

import datetime
import copy
import ldap

import logging
logger = logging.getLogger('django_ldap.query')

from django_ldap.backend import connection
from django_ldap import exceptions

class Field(FieldBase):
    """docstring for Field"""
    def get_db_prep_lookup(self, lookup_type, value):
        "Returns field's value prepared for database lookup."
        if hasattr(value, 'as_sql'):
            sql, params = value.as_sql()
            return QueryWrapper(('(%s)' % sql), params)
        if lookup_type in ('regex', 'iregex', 'month', 'day', 'search'):
            return [value]
        elif lookup_type in ('exact', 'gt', 'gte', 'lt', 'lte'):
            return [self.get_db_prep_value(value)]
        elif lookup_type in ('range', 'in'):
            return [self.get_db_prep_value(v) for v in value]
        elif lookup_type in ('contains', 'icontains'):
            return ["%s" % connection.ops.prep_for_like_query(value)] # hacked. The original was: return ["%%%s%%" % connection.ops.prep_for_like_query(value)]
        elif lookup_type == 'iexact':
            return [connection.ops.prep_for_iexact_query(value)]
        elif lookup_type in ('startswith', 'istartswith'):
            return ["%s" % connection.ops.prep_for_like_query(value)] # hacked. The original was: return ["%s%%" % connection.ops.prep_for_like_query(value)]
        elif lookup_type in ('endswith', 'iendswith'):
            return ["%s" % connection.ops.prep_for_like_query(value)] # hacked. The original was: return ["%%%s" % connection.ops.prep_for_like_query(value)]
        elif lookup_type == 'isnull':
            return []
        elif lookup_type == 'year':
            try:
                value = int(value)
            except ValueError:
                raise ValueError("The __year lookup type requires an integer argument")

            if self.get_internal_type() == 'DateField':
                return connection.ops.year_lookup_bounds_for_date_field(value)
            else:
                return connection.ops.year_lookup_bounds(value)

        raise TypeError("Field has invalid lookup: %s" % lookup_type)

class WhereNode(tree.Node):
    """
    Used to represent the SQL where-clause.

    The class is tied to the Query class that created it (in order to create
    the corret SQL).

    The children in this tree are usually either Q-like objects or lists of
    [table_alias, field_name, field_class, lookup_type, value]. However, a
    child could also be any class with as_sql() and relabel_aliases() methods.
    """
    default = AND

    def add(self, data, connector):
        """
        Add a node to the where-tree. If the data is a list or tuple, it is
        expected to be of the form (alias, col_name, field_obj, lookup_type,
        value), which is then slightly munged before being stored (to avoid
        storing any reference to field objects). Otherwise, the 'data' is
        stored unchanged and can be anything with an 'as_sql()' method.
        """
        # Because of circular imports, we need to import this here.
        from django.db.models.base import ObjectDoesNotExist

        if not isinstance(data, (list, tuple)):
            super(WhereNode, self).add(data, connector)
            return

        alias, col, field, lookup_type, value = data
        try:
            if field:
                params = field.get_db_prep_lookup(lookup_type, value)
                db_type = field.db_type()
            else:
                # This is possible when we add a comparison to NULL sometimes
                # (we don't really need to waste time looking up the associated
                # field object).
                params = Field().get_db_prep_lookup(lookup_type, value)
                db_type = None
        except ObjectDoesNotExist:
            # This can happen when trying to insert a reference to a null pk.
            # We break out of the normal path and indicate there's nothing to
            # match.
            super(WhereNode, self).add(NothingNode(), connector)
            return
        if isinstance(value, datetime.datetime):
            annotation = datetime.datetime
        else:
            annotation = bool(value)
        super(WhereNode, self).add((alias, col, db_type, lookup_type,
                annotation, params), connector)

    def as_sql(self, qn=None):
        """
        Returns the SQL version of the where clause and the value to be
        substituted in. Returns None, None if this node is empty.

        If 'node' is provided, that is the root of the SQL generation
        (generally not needed except by the internal implementation for
        recursion).
        """
        if not qn:
            qn = connection.ops.quote_name
        if not self.children:
            return None, []
        result = []
        result_params = []
        empty = True
        for child in self.children:
            try:
                if hasattr(child, 'as_sql'):
                    sql, params = child.as_sql(qn=qn)
                else:
                    # A leaf node in the tree.
                    sql, params = self.make_atom(child, qn)
            except EmptyResultSet:
                if self.connector == AND and not self.negated:
                    # We can bail out early in this particular case (only).
                    raise
                elif self.negated:
                    empty = False
                continue
            except FullResultSet:
                if self.connector == OR:
                    if self.negated:
                        empty = True
                        break
                    # We match everything. No need for any constraints.
                    return '', []
                if self.negated:
                    empty = True
                continue
            empty = False
            if sql:
                ############### begin hack ###############
                # the orginal was:
                #result.append(sql)
                #result_params.extend(params)
                if sql == '(pk=%s)' and not self.negated:
                    return sql, params
                else:
                    result.append(sql)
                    result_params.extend(params)
                ############### end hack #################
        if empty:
            raise EmptyResultSet

        conn = '' # hacked. The original was: conn = ' %s ' % self.connector
        sql_string = conn.join(result)
        if sql_string:
            if self.negated:
                sql_string = '(!%s)' % sql_string # hacked. The original was: sql_string = 'NOT (%s)' % sql_string
            ############### begin hack ###############
            # the orginal was:
            #elif len(self.children) != 1:
            #    sql_string = '(%s)' % sql_string
            ############### end hack #################
        return sql_string, result_params


    def make_atom(self, child, qn):
        """
        Turn a tuple (table_alias, column_name, db_type, lookup_type,
        value_annot, params) into valid SQL.

        Returns the string for the SQL fragment and the parameters to use for
        it.
        """
        table_alias, name, db_type, lookup_type, value_annot, params = child
        if table_alias:
            lhs = '%s' % qn(name) # hacked. The original was: lhs = '%s.%s' % (qn(table_alias), qn(name))
        else:
            lhs = qn(name)
        field_sql = connection.ops.field_cast_sql(db_type) % lhs

        if value_annot is datetime.datetime:
            cast_sql = connection.ops.datetime_cast_sql()
        else:
            cast_sql = '%s'

        if isinstance(params, QueryWrapper):
            extra, params = params.data
        else:
            extra = ''

        if lookup_type in connection.operators:
            ############### begin hack ###############
            # The original was:
            # format = "%s %%s %s" % (connection.ops.lookup_cast(lookup_type),
            #         extra)
            format = "(%s%%s%s)" % (connection.ops.lookup_cast(lookup_type),
                    extra)
            ############### end hack ###############
            return (format % (field_sql,
                    connection.operators[lookup_type] % cast_sql), params)

        if lookup_type == 'in':
            if not value_annot:
                raise EmptyResultSet
            if extra:
                return ('%s IN %s' % (field_sql, extra), params)
            return ('%s IN (%s)' % (field_sql, ', '.join(['%s'] * len(params))),
                    params)
        elif lookup_type in ('range', 'year'):
            return ('%s BETWEEN %%s and %%s' % field_sql, params)
        elif lookup_type in ('month', 'day'):
            return ('%s = %%s' % connection.ops.date_extract_sql(lookup_type,
                    field_sql), params)
        elif lookup_type == 'isnull':
            return ('%s IS %sNULL' % (field_sql,
                (not value_annot and 'NOT ' or '')), ())
        elif lookup_type == 'search':
            return (connection.ops.fulltext_search_sql(field_sql), params)
        elif lookup_type in ('regex', 'iregex'):
            return connection.ops.regex_lookup(lookup_type) % (field_sql, cast_sql), params

        raise TypeError('Invalid lookup_type: %r' % lookup_type)


    def relabel_aliases(self, change_map, node=None):
        """
        Relabels the alias values of any children. 'change_map' is a dictionary
        mapping old (current) alias values to the new values.
        """
        if not node:
            node = self
        for pos, child in enumerate(node.children):
            if hasattr(child, 'relabel_aliases'):
                child.relabel_aliases(change_map)
            elif isinstance(child, tree.Node):
                self.relabel_aliases(change_map, child)
            else:
                if child[0] in change_map:
                    node.children[pos] = (change_map[child[0]],) + child[1:]

class Query(sql.Query):
    """docstring for ldap"""

    def get_ldap_options(self):
        return self.model._ldap_options

    def results_iter(self):
        from ldap.filter import escape_filter_chars
        conn=self.connection.cursor()
        filter, params = self.where.as_sql()
        logger = logging.getLogger('django_ldap.query.search')
        
        if filter == '(pk=%s)':
            try:
                logger.debug('Get object %s' % params[0])
                results = conn.search('(objectClass=*)', params[0], ldap.SCOPE_BASE)
                yield results[0]
            except exceptions.LdapBackendNoSuchObjectException:
                pass
        else:
            if params:
                params = map(escape_filter_chars, params)
                ldap_filter = filter % tuple(params)
                ldap_filter = '(&%s%s)' % (ldap_filter, self.get_opts().filter)
            else:
                ldap_filter = self.get_opts().filter

            logger.debug('Search under %s with filter: %s' % (self.get_opts().base, ldap_filter))

            results = conn.search(ldap_filter, self.get_opts().base, ldap.SCOPE_SUBTREE)
            for data in results:
                yield data

    def add_filter(self, filter_expr, connector=AND, negate=False, trim=False,
            can_reuse=None):
        """
        Add a single filter to the query. The 'filter_expr' is a pair:
        (filter_string, value). E.g. ('name__contains', 'fred')

        If 'negate' is True, this is an exclude() filter. It's important to
        note that this method does not negate anything in the where-clause
        object when inserting the filter constraints. This is because negated
        filters often require multiple calls to add_filter() and the negation
        should only happen once. So the caller is responsible for this (the
        caller will normally be add_q(), so that as an example).

        If 'trim' is True, we automatically trim the final join group (used
        internally when constructing nested queries).

        If 'can_reuse' is a set, we are processing a component of a
        multi-component filter (e.g. filter(Q1, Q2)). In this case, 'can_reuse'
        will be a set of table aliases that can be reused in this filter, even
        if we would otherwise force the creation of new aliases for a join
        (needed for nested Q-filters). The set is updated by this method.
        """
        arg, value = filter_expr
        parts = arg.split(LOOKUP_SEP)
        if not parts:
            raise FieldError("Cannot parse keyword query %r" % arg)

        # Work out the lookup type and remove it from 'parts', if necessary.
        if len(parts) == 1 or parts[-1] not in self.query_terms:
            lookup_type = 'exact'
        else:
            lookup_type = parts.pop()

        # Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all
        # uses of None as a query value.
        if value is None:
            if lookup_type != 'exact':
                raise ValueError("Cannot use None as a query value")
            lookup_type = 'isnull'
            value = True
        elif callable(value):
            value = value()

        ############### begin hack ###############
        opts = self.get_ldap_options() # hacked. the orginal was opts = self.get_meta()
        field_name = parts[0] in opts.attr_mapping.keys() and opts.attr_mapping[parts[0]] or parts[0]
        ############### end hack ###############
        alias = self.get_initial_alias()
        allow_many = trim or not negate

        ############### begin hack ###############
        # don't usr multi table search in ldap
        # try:
        #     field, target, opts, join_list, last = self.setup_joins(parts, opts,
        #             alias, True, allow_many, can_reuse=can_reuse)
        # except MultiJoin, e:
        #     self.split_exclude(filter_expr, LOOKUP_SEP.join(parts[:e.level]))
        #     return
        # final = len(join_list)
        # penultimate = last.pop()
        # if penultimate == final:
        #     penultimate = last.pop()
        # if trim and len(join_list) > 1:
        #     extra = join_list[penultimate:]
        #     join_list = join_list[:penultimate]
        #     final = penultimate
        #     penultimate = last.pop()
        #     col = self.alias_map[extra[0]][LHS_JOIN_COL]
        #     for alias in extra:
        #         self.unref_alias(alias)
        # else:
        #     col = target.column
        # alias = join_list[-1]
        #
        # if final > 1:
        #     # An optimization: if the final join is against the same column as
        #     # we are comparing against, we can go back one step in the join
        #     # chain and compare against the lhs of the join instead. The result
        #     # (potentially) involves one less table join.
        #     join = self.alias_map[alias]
        #     if col == join[RHS_JOIN_COL]:
        #         self.unref_alias(alias)
        #         alias = join[LHS_ALIAS]
        #         col = join[LHS_JOIN_COL]
        #         join_list = join_list[:-1]
        #         final -= 1
        #         if final == penultimate:
        #             penultimate = last.pop()
        #
        # if (lookup_type == 'isnull' and value is True and not negate and
        #         final > 1):
        #     # If the comparison is against NULL, we need to use a left outer
        #     # join when connecting to the previous model. We make that
        #     # adjustment here. We don't do this unless needed as it's less
        #     # efficient at the database level.
        #     self.promote_alias(join_list[penultimate])
        #
        # if connector == OR:
        #     # Some joins may need to be promoted when adding a new filter to a
        #     # disjunction. We walk the list of new joins and where it diverges
        #     # from any previous joins (ref count is 1 in the table list), we
        #     # make the new additions (and any existing ones not used in the new
        #     # join list) an outer join.
        #     join_it = iter(join_list)
        #     table_it = iter(self.tables)
        #     join_it.next(), table_it.next()
        #     for join in join_it:
        #         table = table_it.next()
        #         if join == table and self.alias_refcount[join] > 1:
        #             continue
        #         self.promote_alias(join)
        #         if table != join:
        #             self.promote_alias(table)
        #         break
        #     for join in join_it:
        #         self.promote_alias(join)
        #     for table in table_it:
        #         # Some of these will have been promoted from the join_list, but
        #         # that's harmless.
        #         self.promote_alias(table)
        ############### end hack ###############
        self.where.add((alias, field_name, None, lookup_type, value), connector) # hacked. the original was: self.where.add((alias, col, field, lookup_type, value), connector)

        ############### begin hack ###############
        # don't use multi table search
        # the original was:
        # if negate:
        #     for alias in join_list:
        #         self.promote_alias(alias)
        #     if final > 1 and lookup_type != 'isnull':
        #         for alias in join_list:
        #             if self.alias_map[alias] == self.LOUTER:
        #                 j_col = self.alias_map[alias][RHS_JOIN_COL]
        #                 entry = Node([(alias, j_col, None, 'isnull', True)])
        #                 entry.negate()
        #                 self.where.add(entry, AND)
        #                 break
        ############### end hack ###############

        ############### begin hack ###############
        # don't use reuse feature in ldap
        # if can_reuse is not None:
        #     can_reuse.update(join_list)
        ############### end hack ###############

#    def __str__(self):
#        """
#        Returns the query as a string of SQL with the parameter values
#        substituted in.
#
#        Parameter values won't necessarily be quoted correctly, since that is
#        done by the database interface at execution time.
#        """
#        sql, params = self.where.as_sql()
#        return "%s, %s" % (sql, params)

    def _setup_query(self):
        self.field_names = []

    def add_fields(self, field_names, allow_m2m=True):
        for field_name in field_names:
            if not field_name in self.field_names:
                self.field_names.append(field_name)
    
    def get_opts(self):
        return self.model._ldap_options

class QuerySet(DjangoQuerySet):
    def __init__(self, model, query=None):
        super(QuerySet, self).__init__(model, query and query or Query(model, connection, WhereNode))

    def get_opts(self):
        return self.model._ldap_options

    def values(self, *fields):
        return self._clone(klass=ValuesQuerySet, setup=True, _fields=fields)

    def values_list(self, *fields, **kwargs):
        flat = kwargs.pop('flat', False)
        if kwargs:
            raise TypeError('Unexpected keyword arguments to values_list: %s'
                    % (kwargs.keys(),))
        if flat and len(fields) > 1:
            raise TypeError("'flat' is not valid when values_list is called with more than one field.")
        return self._clone(klass=ValuesListQuerySet, setup=True, flat=flat,
                _fields=fields)

    def iterator(self):
        """
        An iterator over the results from applying this QuerySet to the
        database.
        """
        fill_cache = self.query.select_related
        if isinstance(fill_cache, dict):
            requested = fill_cache
        else:
            requested = None
        max_depth = self.query.max_depth
        extra_select = self.query.extra_select.keys()
        index_start = len(extra_select)
        for row in self.query.results_iter():
            if fill_cache:
                obj, _ = get_cached_row(self.model, row, index_start,
                        max_depth, requested=requested)
            else:
                obj = self.model(**row) # hacked. the original was: obj = self.model(*row[index_start:])
            for i, k in enumerate(extra_select):
                setattr(obj, k, row[i])
            yield obj


    def count(self):
        """
        Performs a SELECT COUNT() and returns the number of records as an
        integer.

        If the queryset is already cached (i.e. self._result_cache is set) this
        simply returns the length of the cached results set to avoid multiple
        SELECT COUNT(*) calls.
        """
        # if self._result_cache is not None:
        #     return len(self._result_cache)
        #
        # return self.query.get_count()
        if self._result_cache is not None:
            return len(self._result_cache)
        return self.__len__()

    def delete(self):
        qs = self._clone()
        delete_objects(qs)

    def update(self, **kwargs):
        """
        Updates all elements in the current QuerySet, setting all the given
        fields to the appropriate values.
        """
        assert self.query.can_filter(), \
                "Cannot update a query once a slice has been taken."
        data = kwargs.copy()
        if 'dn' in kwargs.keys():
            data.pop('dn')
        conn = self.query.connection.cursor()
        qs = self._clone()

        logger = logging.getLogger('django_ldap.query.update')
        logger.debug('Update %s with attr: %s' % ([obj.pk for obj in qs], data))

        for obj in qs:
            result = conn.modify(dn=obj.pk, attrs=data)
        self._result_cache = None
        return True

    def getMemberOf(self, DN, historyDN=[], rdn={'member':'ppMember','parent':'ppMemberOf'}):
        """
        Find ancester of a DN recursively
        """
        memberOf = []
        if DN not in historyDN:
            conn = connection.cursor()
            results = conn.search(self.options['base'], backend.SUBTREE, '%s=%s' % (rdn['member'], DN))
            historyDN.append(DN)
            for result in results['results']:
                memberOf.append(result['dn'])
                if result.has_key(rdn['parent']):
                    memberOf.extend(self.getMemberOf(result['dn'], historyDN, rdn))
        return memberOf


class ValuesQuerySet(QuerySet, DjangoValuesQuerySet):
    def iterator(self):
        for row in self.query.results_iter():
            yield dict([(k, row[k][0]) for k in self.field_names])

    def _clone(self, klass=None, setup=False, **kwargs):
        """
        Cloning a ValuesQuerySet preserves the current fields.
        """
        c = super(ValuesQuerySet, self)._clone(klass, **kwargs)
        c._fields = self._fields[:]
        c.field_names = self.field_names
        c.extra_names = self.extra_names
        if setup and hasattr(c, '_setup_query'):
            c._setup_query()
        return c

class ValuesListQuerySet(ValuesQuerySet, DjangoValuesListQuerySet):
    def iterator(self):
        if self.flat and len(self.field_names) == 1:
            for row in self.query.results_iter():
                yield row[self.field_names[0]][0]
        else:
            for row in self.query.results_iter():
                yield tuple([row[f][0] for f in self.field_names])

    def _clone(self, *args, **kwargs):
        clone = super(ValuesListQuerySet, self)._clone(*args, **kwargs)
        clone.flat = self.flat
        return clone

def delete_objects(seen_objs):
    """
    Iterate through a list of seen classes, and remove any instances that are
    referred to.
    """
    conn = connection.cursor()
    
    logger = logging.getLogger('django_ldap.query.delete')
    logger.debug('Delete %s' % [record.pk for record in seen_objs])
    
    for record in seen_objs:
        conn.delete(record.pk)


def insert_query(model, values, return_id=False, raw_values=False):
    """
    Inserts a new record for the given model. This provides an interface to
    the InsertQuery class and is how Model.save() is implemented. It is not
    part of the public API.
    """
    conn = connection.cursor()
    data = copy.copy(values)
    if data.has_key('dn'):
        dn = data.pop('dn')[0]
    
    logger = logging.getLogger('django_ldap.query.insert')
    logger.debug('Insert %s with attrs: %s' % (dn, data))

    conn.insert(dn, attrs=data)
    return dn

def get_all_attributes(name):
    conn = connection.cursor()
    atts = set()
    if isinstance(name, basestring):
        return atts.update(conn.get_all_attributes(name))
    elif isinstance(name, list):
        for item in name:
            atts.update(conn.get_all_attributes(item))
        return atts
