# Copyright (C) 2007-2008, Open Society Institute
#                Thomas Moroz: tmoroz@sorosny.org
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License Version 2 as published
# by the Free Software Foundation.  You may not use, modify or distribute
# this program under any other version of the GNU General Public License.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
# General Public License for more details.
# 
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.

import os
import glob
import time
import logging
import operator
import cPickle
import weakref
import xapian
import inspect

from signal import SIGTERM, SIGINT
try:
    from signal import SIGHUP, SIGUSR1, SIGUSR2
except ImportError:
    # Windows doesn't have these (but also doesn't care what the exact
    # numbers are)
    SIGHUP = 1
    SIGUSR1 = 10
    SIGUSR2 = 12

import threading

from datetime import date, datetime
from dateutil.tz import gettz, tzutc
from lemur.xapian.sei import DatabaseClosedError
from lemur.xapian.sei import DocumentStore, DocumentPiece
from lemur.xapian.sei import Term, SortableValue
from lemur.xapian.sei import flatten_integer
from lemur.xapian.sei import RangeRestrictions

try:
    from zope.interface import implements
    from enfold.xapian.interface import IStore, ISearch
except ImportError:
    IStore = ISearch = None

logger = logging.getLogger('enfold.xapian.store')

UTC = tzutc()

retry_errors = (xapian.DocNotFoundError,)
reopen_errors = (xapian.DatabaseModifiedError,)
reset_errors = (xapian.NetworkError, xapian.DatabaseError, DatabaseClosedError)
fail_errors = (xapian.DatabaseCorruptError, xapian.DocNotFoundError)
                    
def _buildFacade(spec, docstring):
    """Build a facade function, matching the decorated method in signature.

    Note that defaults are replaced by None, and _curried will reconstruct
    these to preserve mutable defaults.

    """
    args = inspect.formatargspec(formatvalue=lambda v: '=None', *spec)
    callargs = inspect.formatargspec(formatvalue=lambda v: '', *spec)
    return 'def _facade%s:\n    """%s"""\n    return _curried%s' % (
        args, docstring, callargs)

class retry_on_error(object):

    def __init__(self, reopen=reopen_errors,
                 reset=reset_errors,
                 retry=retry_errors,
                 fail=fail_errors):
        self.reopen = reopen
        self.reset = reset
        self.retry = retry
        self.fail = fail

    def __call__(self, callable):
        spec = inspect.getargspec(callable)
        args, defaults = spec[0], spec[3]

        arglen = len(args)
        if defaults is not None:
            defaults = zip(args[arglen - len(defaults):], defaults)
            arglen -= len(defaults)

        func_name = callable.func_name

        def _curried(*args, **kw):
            # Reconstruct keyword arguments
            if defaults is not None:
                args, kwparams = args[:arglen], args[arglen:]
                for positional, (key, default) in zip(kwparams, defaults):
                    if positional is None:
                        kw[key] = default
                    else:
                        kw[key] = positional

            for i in range(0, 3):
                e = None
                try:
                    return callable(*args, **kw)
                except self.fail, e:
                    logger.exception('Failure calling %s.', 
                                     func_name)
                    raise
                except self.reopen, e:
                    logger.exception('Failure calling %s, reopen database.', 
                                     func_name)
                    target = args[0]
                    target.reopen()
                except self.reset, e:
                    logger.exception('Failure calling %s, reconnecting.', 
                                     func_name)
                    target = args[0]
                    target.reset()
                except self.retry, e:
                    # Do nothing, just retry.
                    pass

            raise RuntimeError('Given up after 3 tries (%s)' % e)
        
        # Build a facade, with a reference to our locally-scoped _curried
        facade_globs = dict(_curried=_curried)
        exec _buildFacade(spec, callable.__doc__) in facade_globs
        return facade_globs['_facade']

class Index(object):

    def __init__(self, name, splitter=None, stopwords=None,
                 sortable=False, exact=False, weight=None):
        self.name = name
        self.splitter = splitter
        self.stopwords = stopwords
        self.sortable = sortable
        self.exact = exact
        self.weight = weight

class TextIndex(Index):

    accepts = (str, unicode)

    def __init__(self, name, splitter=None, stopwords=None, 
                 sortable=False, startswith=False, weight=None):
        Index.__init__(self, name, splitter=splitter,
                       stopwords=stopwords, sortable=sortable,
                       exact=False, weight=weight)
        self.startswith = startswith

    def flatten(self, value):
        if isinstance(value, unicode):
            return value.encode('utf-8')
        return str(value)

class KeywordIndex(Index):

    def __init__(self, name, sortable=True, exact=True, weight=None):
        Index.__init__(self, name, splitter=None,
                       stopwords=None, sortable=sortable,
                       exact=exact, weight=weight)

def timedelta_to_utime(t):
    return (t.days * 24 * 3600) + t.seconds

class DateIndex(Index):

    accepts = (datetime, date)
    try:
        from DateTime.DateTime import DateTime
        accepts = accepts + (DateTime,)
    except ImportError:
        pass

    epoch_date = date(1970, 1, 1)
    epoch_datetime = datetime(1970, 1, 1, 0, 0, 0, tzinfo=UTC)
    
    def __init__(self, name, weight=None):
        Index.__init__(self, name, splitter=None,
                       stopwords=None, sortable=True,
                       exact=True, weight=weight)

    def flatten(self, value):
        if isinstance(value, (datetime,)):
            delta = value - self.epoch_datetime
            return str(timedelta_to_utime(delta))
        elif isinstance(value, date):
            delta = value - self.epoch_date
            return str(timedelta_to_utime(delta))
        elif hasattr(value, 'timeTime'): # Zope 2 DateTime
            return str(value.timeTime())
        raise ValueError('Cannot flatten %r' % value)

class Result(object):

    def __init__(self, r, results):
        self._data = r[0]
        self._relevance = r[1]
        self._info = None
        self._results = results

    def _init(self):
        if self._info is None:
            self._info = cPickle.loads(self._data)

    def __getitem__(self, key):
        self._init()
        return self._info[key]

    def __iter__(self):
        self._init()
        for key in self._info.keys():
            yield key

    def __setitem__(self, key, value):
        self._init()
        self._info[key] = value

    def get(self, key, default=None):
        try:
            return self[key]
        except KeyError:
            return default

    def keys(self):
        self._init()
        return self._info.keys()

    def values(self):
        self._init()
        return self._info.values()

    def items(self):
        self._init()
        return self._info.items()

    def relevance(self):
        return self._relevance

    def context(self, fname='body', maxlen=600, hl=None):
        return self._results.makeSample(self.get(fname, ''), maxlen, hl)

class Results(object):

    def __init__(self, store, query, max_items=-1, partial=False,
                 combiner=DocumentStore.OP_OR,
                 sort_on=None, ascending=True,
                 cmpfn_generator=None,
                 range_restrictions=None,
                 start=0, end=1):
        self.store = store
        self.query = query
        self.partial = partial
        self.combiner = combiner
        self.sort_on = sort_on
        self.ascending = ascending
        self.cmpfn_generator = cmpfn_generator
        self.range_restrictions = range_restrictions
        self.max_items = max_items
        self.size = None
        self.fetch(start=start, end=end)

    def reopen(self):
        self.store.read_store.reopen()

    def reset(self):
        self.store.read_store.reset()

    @retry_on_error()
    def fetch(self, start, end):
        return self._fetch(start, end)
    
    def _fetch(self, start, end):
        logger.debug('Query %r', self.query)
        t0 = time.clock()
        self.results = self.store.read_store.store.performSearch(
            self.query, start, end, 
            partial=self.partial, 
            combiner=self.combiner,
            sort_on=self.sort_on, 
            ascending=self.ascending,
            cmpfn_generator=self.cmpfn_generator,
            range_restrictions=self.range_restrictions)
        self.size = None # Reset, based on the new result set.
        logger.debug('Took %.6fs to query %r, %s:%s', 
                     time.clock() - t0, self.query, start, end)

    @retry_on_error()
    def __len__(self):
        if self.size is not None:
            return self.size

        logger.debug('Estimating result count for %r', self.query)
        t0 = time.clock()

        at_least = 100
        if not self.max_items == -1:
            at_least = self.max_items

        # Make sure estimated is at least a minimum size.
        estimated = self.results.estimatedResultCount()
        estimated = max(estimated, at_least)

        # Make a mset of at least a minimum size, to avoid issues with
        # estimatedResultCount possibly being off.
        self.results._makeMset(estimated)
        self.size = self.results._mset.get_matches_estimated()
        logger.debug('Took %.6fs to estimate results for %r', 
                     time.clock() - t0, self.query)
        return self.size

    def __max(self):
        max_items = self.max_items
        if not max_items == -1:
            return max_items

        count = self.__len__()
        count = max(count, 1)  # Fetch *at least* 1, in case
                               # estimatedResultCount is wrong.
        if max_items == -1:
            max_items = count
        count = min(count, max_items)
        return count

    @retry_on_error()
    def __iterate(self, start, end, step):
        if start == end:
            raise StopIteration

        # Refetch results, now that we know what range we are
        # interested in.
        bstart, bend = min(max(start, 0), end), max(start, end)
        assert (bstart >= 0 and bend >= 0, bend >= bstart), (bstart, bend)
        if (self.results._start_index != bstart and 
            self.results._end_index != bend):
            self._fetch(bstart, bend)

        logger.debug('Fetching results %s:%s for %r', 
                     bstart, bend, self.query)
        t0 = time.clock()

        res = []
        for i in xrange(start, end, step):
            count = 3
            r = None
            while count:
                try:
                    r = self.results.getResult(i)
                    break
                except xapian.DatabaseModifiedError:
                    # Refetch and retry
                    self.reopen()
                    self._fetch(bstart, bend)
                    count -= 1
                except (IndexError, xapian.DocNotFoundError):
                    raise StopIteration
            if r is None:
                if count == 0:
                    raise xapian.DatabaseModifiedError(
                        "Database being modified too often, given up "
                        "trying to fetch result %d" % i)
            else:
                yield Result(r, self.results)

        logger.debug('Took %.6fs to fetch results %s:%s for %r', 
                     time.clock() - t0, bstart, bend, self.query)

        raise StopIteration

    def __getitem__(self, s):
        count = self.__max()

        single = False
        if isinstance(s, slice):
            logger.debug('Fetch result slice %s:%s:%s (%s) for %r', 
                         s.start, s.stop, s.step, count, self.query)
            start = s.start or 0
            end = s.stop
            step = s.step
        elif isinstance(s, int):
            logger.debug('Fetch result %s (%s) for %r', s, count, self.query)
            single = True
            start = s
            end = s + 1
            step = 1

        if end is None:
            end = count - 1
        if end < 0:
            end = count + end
            if end < 0:
                end = 0
        if start is None:
            start = 0
        if start < 0:
            start = count + start
            if start < 0:
                start = 0
        if not step:
            step = 1
            
        if step < 0 and start < end:
            end, start = max(start - 1, -1), end
            assert (start >= end), (start, end, step, count, s)
        elif step > 0 and start > end:
            end, start = start, end
            assert (start <= end), (start, end, step, count, s)
        
        if step > 0:
            assert (start <= end), (start, end, step, count, s)
        elif step < 0:
            assert (start >= end), (start, end, step, count, s)

        res = list(self.__iterate(start, end, step))
        if not single:
            return res
        else:
            if not res:
                raise IndexError, s
            return res[0]

    def __iter__(self):
        return self.__iterate(0, self.__max(), 1)

class WritableDocumentStore(object):

    def __init__(self, path, lang='en', indexes=None):
        self.path = path
        self.lang = lang
        self.local = {}
        self.indexes = None
        self.init_lock = threading.Lock()
        self.reset(steal_lock=False, indexes=indexes)
        # Initialize metadata and close writable store.        
        self.init_lock.acquire()
        try:
            self.open()
        finally:
            self.init_lock.release()
        self.close()

    def reset(self, path=None, steal_lock=False, indexes=None):
        self.init_lock.acquire()
        try:
            try:
                s = self.local.get(self.path)
                if s is not None:
                    del self.local[self.path]
                    logger.debug('Closing writable xapian database connection')
                    s.close()
            except KeyError:
                pass
            if path is not None:
                self.path = path
            if indexes is not None:
                self.indexes = indexes
            if steal_lock:
                # Cleanup stale locks. We assume we are the only
                # process opening the database.
                for fname in glob.glob(os.path.join(self.path, 'db_lock*')):
                    logger.info('Removing stale lock file %r', fname)
                    os.unlink(fname)
        finally:
            self.init_lock.release()

    def open(self):
        max = count = 20 # Tops at 20 retries, with maximun wait of 40
                         # secs for the last retry.
        s = None
        while count:
            try:
                s = DocumentStore(self.path,
                                  language_code=self.lang,
                                  read_only=False)
                logger.debug('Writable database at %r opened successfully',
                             self.path)
                break
            except xapian.DatabaseLockError:
                count -= 1
                timeout = (max - count) * (max - count) * 0.1
                logger.debug('Database is locked, will sleep for '
                             '%.2fs and then retry (retry %d of %d)', timeout, 
                             max - count, max)
                time.sleep(timeout) # XXX Could this be replaced by a
                                    # FileWatcherThread or something?
        if s is None:
            raise

        if self.indexes is not None:
            v = s._values
            rv = s._revvalues
            f = s._fields
            rf = s._revfields

            # Make sure that new fields and values are added before
            # anything else happens.
            register = s.registerFlattener
            for name, index in self.indexes:
                if index.sortable:
                    if name not in v:
                        i = len(v)
                        v[name] = i
                        rv[i] = name
                        s._values_modified = True

                if hasattr(index, 'flatten') and hasattr(index, 'accepts'):
                    for kind in index.accepts:
                        register(kind, index.flatten)

                if index.exact and not name in s._exact:
                    s._exact.add(name)
                    s._fields_modified = True

                if index.weight is not None:
                    assert isinstance(index.weight, int), type(index.weight)
                    s.setFieldWeight(name, index.weight)
                
                if name not in f:
                    i = len(f)
                    p = s._genPrefix(len(f))
                    f[name] = p
                    rf[p] = name
                    s._fields_modified = True
        
            # Write out field and value definitions.
            s._writeValues()
            s._writeFields()

        return s

    @property
    def store(self):
        s = self.local.get(self.path)
        if s is not None and s._db is not None:
            return s

        self.init_lock.acquire()
        try:
            self.local[self.path] = self.open()
            return self.local[self.path]
        finally:
            self.init_lock.release()

    def add(self, data, pieces):
        uid = data.get('uid', None)
        if not pieces:
            logger.warning('No pieces to index for %s' % uid)
        else:
            self.store.addDocument(pieces, uid,
                                   data=cPickle.dumps(data, -1))

    def remove(self, uid):
        self.store.deleteDocument(uid)

    def flush(self):
        self.store.flush()
        
    def close(self):
        self.reset()

class LocalStore(threading.local):
    store = None

class ReadOnlyDocumentStore(object):

    def __init__(self, path, lang='en'):
        self.path = path
        self.lang = lang
        self.local = LocalStore()
        self.reset()

    def reset(self, path=None):
        if path is not None:
            self.path = path
        self.close()

    @property
    def store(self):
        s = self.local.store
        if s is not None and s._db is not None:
            return s

        self.local.store = s = DocumentStore(self.path,
                                             language_code=self.lang,
                                             read_only=True)
        return s


    def reopen(self):
        store = self.local.store
        if store is None:
            return
        if store._db is None:
            # Throw it away and open a new DocumentStore.
            self.reset()
        else:
            # We're inside the same thread here, so no need to acquire the
            # mutex. Just make sure to not call this from a different
            # thread.
            logger.debug('Reopening read-only xapian database')
            store.reopen()

    def close(self):
        s = self.local.store
        if s is not None:
            self.local.store = None
            logger.debug('Closing read-only xapian database connection')
            s.close()

    def close_all(self):
        pass

class Store(object):

    if IStore is not None:
        implements(IStore, ISearch)

    def __init__(self, path, lang='en', indexes=(),
                 encoding='utf-8'):
        self.text_indexes = {}
        self.keyword_indexes = {}
        self.sortable_indexes = {}
        self.indexes = []

        seen = {}
        for index in indexes:
            seen[index.name] = None
            if isinstance(index, KeywordIndex):
                self.keyword_indexes[index.name] = index
            elif isinstance(index, TextIndex):
                self.text_indexes[index.name]  = index
            if index.sortable:
                self.sortable_indexes[index.name] = index
            self.indexes.append((index.name, index))

        if not 'body' in seen:
            body = TextIndex('body')
            self.text_indexes['body'] = body
            self.indexes.insert(0, (body.name, body))
            
        # Add startswith indexes.
        for index in self.text_indexes.values():
            if index.startswith:
                self.text_indexes[index.name + 'startswith'] = index
                self.indexes.append((index.name + 'startswith', index))

        self.encoding = encoding
        self.read_store = None
        self.write_store = None
        self.registered = False
        self.init_lock = threading.Lock()
        self.register()
        self.reset(path, lang)

    def reopen(self):
        self.read_store.reopen()

    def reset(self, path=None, lang='en'):
        if path is not None:
            self.path = path
        if lang is not None:
            self.lang = lang
        self.cleanup()
        self.init_lock.acquire()
        try:
            write_path = read_path = self.path
            parts = [s.strip() for s in self.path.split(',')]
            if len(parts) > 1:
                assert len(parts) == 3, parts
                write_path, read_path, meta = parts

                if write_path.startswith('remote'):
                    write_path = ' '.join((parts[0], parts[2]))
                elif write_path == 'local':
                    write_path = parts[2]
                else:
                    raise ValueError, write_path

                if read_path.startswith('remote'):
                    read_path = ' '.join((parts[1], parts[2]))
                elif read_path == 'local':
                    read_path = parts[2]
                else:
                    raise ValueError, read_path
                
            self.write_store = WritableDocumentStore(write_path, self.lang, 
                                                     self.indexes)
            self.read_store = ReadOnlyDocumentStore(read_path,
                                                    self.lang)
        finally:
            self.init_lock.release()


    # Signal Handlers
    def register(self):
        if self.registered:
            return
        try:
            from Signals.Signals import SignalHandler
        except ImportError:
            logger.exception('Could not import SignalHandler from Zope')
            return
        SignalHandler.registerHandler(SIGTERM, self.cleanup)
        SignalHandler.registerHandler(SIGINT, self.cleanup)
        # XXX Disabled. We can't call 'close_all' here as it might
        # interfere with other threads that are still running and that
        # could cause a crash. Not sure what else we could do here,
        # maybe just call reopen?
        #
        # SignalHandler.registerHandler(SIGHUP, self.restart)
        # SignalHandler.registerHandler(SIGUSR2, self.restart)
        self.registered = True

    def cleanup(self):
        self.init_lock.acquire()
        try:
            ws = self.write_store
            if ws is not None:
                ws.close()
                self.write_store = None
            rs = self.read_store
            if rs is not None:
                rs.close_all()
                self.read_store = None
        finally:
            self.init_lock.release()

    close = cleanup
            
    def restart(self):
        self.reset()

    def __iter__(self):
        return self.read_store.store.getAllDocumentIterator()

    # Indexing
    def prepare_data(self, data):
        uid = data.get('uid', None)
        if not uid:
            msg = 'You must pass in the uid of the object to be indexed.'
            raise ValueError, msg
        if not isinstance(data, dict):
            data = dict(data) # Copy.
        assert isinstance(data, dict), 'Not a dict: %r' % data
        existing = self.get(uid)
        if existing is not None:
            existing = dict(existing) # Copy
            existing.update(data)
            data = existing
        return data

    def add(self, data, pieces=None, prepare=True):
        if prepare:
            data = self.prepare_data(data)
        if pieces is None:
            pieces = self.process(data)
        self.write_store.add(data, pieces)

    def remove(self, uid):
        if not uid:
            msg = 'You must pass in the uid of the object to be removed.'
            raise ValueError, msg
        self.write_store.remove(uid)

    @retry_on_error()
    def exists(self, uid):
        if not uid:
            msg = 'You must pass in the uid of the object to be indexed.'
            raise ValueError, msg
        store = self.read_store.store
        idterm = "Q" + store._normId(uid)
        if not store._db.term_exists(idterm):
            return False
        return True

    @retry_on_error()
    def get(self, uid, default=None):
        store = self.read_store.store
        docid = store._getXapianDocId(uid)
        if docid is None:
            return default
        data = store._db.get_document(docid).get_data()
        return cPickle.loads(data)
        
    def flush(self):
        try:
            self.write_store.flush()
        finally:
            self.write_store.close()
            self.reopen()

    def process(self, data):
        pieces = list()
        for name, index in self.indexes:
            values = data.get(name, '')
            if not values:
                continue
            if isinstance(values, tuple):
                values = list(values)
            elif not isinstance(values, list):
                values = [values]
            for v in values:
                pieces.extend(self.process_value(v, name, index))
        return pieces

    def process_value(self, value, name, index):
        pieces = []

        # XXX We assume value is either utf-8 encoded, or is a unicode
        # object.
        if isinstance(value, str):
            value = value.decode('utf-8')
            
        if self.sortable_indexes.has_key(name):
            # For sortable indexes, we add the value as-is.
            sort_value = value
            if self.text_indexes.has_key(name):
                # Unless it's a text value, then we lowercase it.
                sort_value = sort_value.lower()
            pieces.append(SortableValue(sort_value, field_name=name))

        if self.keyword_indexes.has_key(name):
            # For keyword indexes, we add the value as a 'Term'.
            pieces.append(Term(value, field_name=name))
        elif self.text_indexes.has_key(name):
            if index.splitter is not None:
                words = index.splitter.split(value)
            else:
                words = []
                for sub in map(lambda x: x.split('-'), value.split()):
                    words.extend(sub)

            # Process stopwords for DocumentPiece
            if index.stopwords is not None:
                words = index.stopwords.process(words)

            for idx, word in enumerate(filter(None, words)):
                if not idx and index.startswith:
                    # Index the first term as a 'startswith' index,
                    # with a lowercase SortableValue too, so you can
                    # sort on it, case-insensitive.
                    pieces.append(SortableValue(word.lower(), 
                                                field_name=name + 'startswith'))
                    pieces.append(DocumentPiece(word, 
                                                field_name=name + 'startswith'))
                pieces.append(DocumentPiece(word, field_name=name))

        return pieces

    def buildRestrictions(self, **kw):
        rules = []
        for name, index in self.indexes:
            values = kw.get(name, None)
            if values and isinstance(index, DateIndex):
                min = values.get('min')
                max = values.get('max')
                if min is not None:
                    min = index.flatten(min)
                if max is not None:
                    max = index.flatten(max)
                rules.append((name, min, max))
        if not rules:
            return None
        restrictions = RangeRestrictions()
        for rule in rules:
            restrictions.add(*rule)
        return restrictions
        
    # Searching
    def buildQuery(self, **kw):
        query = []
        for name, index in self.indexes:
            sub = []
            values = kw.get(name, '')
            if values and isinstance(index, DateIndex):
                raise ValueError('Cannot search on DateIndex: %r' % name)
            if values:
                if isinstance(values, tuple):
                    values = list(values)
                if not isinstance(values, list):
                    values = [values]
            is_term = self.keyword_indexes.has_key(name.lower())
            for v in values:
                if is_term:
                    v = '"%s"' % v
                if name.lower() in ('body',):
                    extra = v
                else:
                    extra = '%s:%s' % (name, v)
                sub.append(extra)

            extend = True
            if is_term:
                if len(sub) > 1:
                    extend = False
                    query.append('(' + ' OR '.join(sub) + ')')
            if extend:
                query.extend(sub)
        if len(query) == 1:
            query = query[0]
        else:
            query = '+' + ' +'.join(query)
        if isinstance(query, unicode):
            query = query.encode('utf-8')
        return query

    def search(self, query, max_items=-1, partial=False,
               combiner=DocumentStore.OP_OR,
               sort_on=None, ascending=True,
               cmpfn_generator=None,
               range_restrictions=None,
               start=0, end=1):

        logger.debug('Searching for %r, partial=%r, combiner=%r, '
                     'max_items=%r.', query, partial, combiner, max_items)

        return Results(self, query, max_items=max_items, 
                       partial=partial,
                       combiner=combiner,
                       sort_on=sort_on, ascending=ascending,
                       cmpfn_generator=cmpfn_generator,
                       range_restrictions=range_restrictions,
                       start=start, end=end)
