import os
import glob
import time
import logging
import cPickle

from signal import SIGTERM, SIGINT
try:
    from signal import SIGHUP, SIGUSR1, SIGUSR2
except ImportError:
    # Windows doesn't have these (but also doesn't care what the exact
    # numbers are)
    SIGHUP = 1
    SIGUSR1 = 10
    SIGUSR2 = 12

from thread import get_ident
from threading import local, Lock
from datetime import date, datetime
from dateutil.tz import gettz, tzutc
from lemur.xapian.sei import DocumentStore, DocumentPiece
from lemur.xapian.sei import Term, SortableValue
from lemur.xapian.sei import flatten_integer

try:
    from zope.interface import implements
    from enfold.xapian.interface import IStore, ISearch
except ImportError:
    IStore = ISearch = None

logger = logging.getLogger('enfold.xapian.store')

UTC = tzutc()

class Index(object):

    def __init__(self, name, splitter=None, stopwords=None, sortable=False):
        self.name = name
        self.splitter = splitter
        self.stopwords = stopwords
        self.sortable = sortable

class TextIndex(Index):

    accepts = (str, unicode)

    def __init__(self, name, splitter=None, stopwords=None, sortable=False):
        Index.__init__(self, name, splitter=splitter,
                       stopwords=stopwords, sortable=sortable)

    def flatten(self, value):
        if isinstance(value, unicode):
            return value.encode('utf-8')
        return str(value)

class KeywordIndex(Index):

    def __init__(self, name, sortable=True):
        Index.__init__(self, name, splitter=None,
                       stopwords=None, sortable=sortable)

def timedelta_to_utime(t):
    return (t.days * 24 * 3600) + t.seconds

class DateIndex(Index):

    accepts = (datetime, date)
    try:
        from DateTime.DateTime import DateTime
        accepts = accepts + (DateTime,)
    except ImportError:
        pass

    epoch_date = date(1970, 1, 1)
    epoch_datetime = datetime(1970, 1, 1, 0, 0, 0, tzinfo=UTC)
    
    def __init__(self, name):
        Index.__init__(self, name, splitter=None,
                       stopwords=None, sortable=True)

    def flatten(self, value):
        if isinstance(value, (datetime, date)):
            delta = value - self.epoch_datetime
            return str(timedelta_to_utime(delta))
        elif isinstance(value, date):
            delta = value - self.epoch_date
            return str(timedelta_to_utime(delta))
        elif hasattr(value, 'timeTime'): # Zope 2 DateTime
            return str(value.timeTime())
        raise ValueError('Cannot flatten %r' % value)

class Result(object):

    def __init__(self, r):
        self._data = r[0]
        self._relevance = r[1]
        self._info = None

    def _init(self):
        if self._info is None:
            self._info = cPickle.loads(self._data)

    def __getitem__(self, key):
        self._init()
        return self._info[key]

    def __iter__(self):
        self._init()
        for key in self._info.keys():
            yield key

    def __setitem__(self, key, value):
        self._init()
        self._info[key] = value

    def get(self, key, default=None):
        try:
            return self[key]
        except KeyError:
            return default

    def keys(self):
        self._init()
        return self._info.keys()

    def values(self):
        self._init()
        return self._info.values()

    def items(self):
        self._init()
        return self._info.items()

    def relevance(self):
        return self._relevance

class Results(object):

    def __init__(self, results, max_items=-1):
        self.results = results
        self.max_items = max_items
        self.size = None

    def __len__(self):
        if self.size is not None:
            return self.size
        # Make a mset of at least 10, to avoid issues with
        # estimatedResultCount being wrong.
        self.results._makeMset(10)
        self.size = self.results._mset.get_matches_estimated()
        return self.size

    def __max(self):
        count = self.__len__()
        count = max(count, 1)  # Fetch *at least* 1, in case
                               # estimatedResultCount is wrong.
        max_items = self.max_items
        if max_items == -1:
            max_items = count
        count = min(count, max_items)
        return count

    def __iterate(self, start, end, step):
        res = []
        if step == 1:
            for r in self.results.getResults(start, end):
                yield Result(r)
        else:
            for i in xrange(start, end, step):
                try:
                    r = self.results.getResult(i)
                except IndexError:
                    pass
                else:
                    yield Result(r)

        raise StopIteration

    def __getitem__(self, s):
        count = self.__max()

        single = False
        if isinstance(s, slice):
            start = s.start or 0
            end = s.stop
            step = s.step
        elif isinstance(s, int):
            single = True
            start = s
            end = s + 1
            step = 1

        if not end:
            end = count
        if end < 0:
            end = count + end
        if start < 0:
            start = count + start
        if not step:
            step = 1
        if step < 0 and start < end:
            end, start = start - 1, end

        iterator = self.__iterate(start, end, step)
        if not single:
            return iterator
        else:
            l = list(iterator)
            if not l:
                raise IndexError, s
            return l[0]

    def __iter__(self):
        for r in self.results.getResults(0, self.__max()):
            yield Result(r)

class WritableDocumentStore(object):

    def __init__(self, path, lang='en', indexes=None):
        self.path = path
        self.lang = lang
        self.store = None
        self.init_lock = Lock()
        self.reset(steal_lock=True, indexes=indexes)

    def reset(self, path=None, steal_lock=False, indexes=None):
        if path is not None:
            self.path = path
        self.init_lock.acquire()
        try:
            if self.store is not None:
                self.store.close()
            if steal_lock:
                # Cleanup stale locks. We assume we are the only
                # process opening the database.
                for fname in glob.glob(os.path.join(self.path, 'db_lock*')):
                    logger.info('Removing stale lock file %r', fname)
                    os.unlink(fname)
            self.store = s = DocumentStore(self.path,
                                       language_code=self.lang,
                                       read_only=False)

            if indexes is not None:
                v = s._values
                rv = s._revvalues
                f = s._fields
                rf = s._revfields

                # Make sure that new fields and values are added before
                # anything else happens.
                for name, index in indexes:
                    if index.sortable:
                        if name not in v:
                            i = len(v)
                            v[name] = i
                            rv[i] = name
                            s._values_modified = True
                    if name not in f:
                        i = len(f)
                        p = s._genPrefix(len(f))
                        f[name] = p
                        rf[p] = name
                        s._fields_modified = True
            
                # Write out field and value definitions.
                s._writeValues()
                s._writeFields()
            
        finally:
            self.init_lock.release()

    def close(self):
        self.init_lock.acquire()
        try:
            if self.store is not None:
                self.store.close()
                self.store = None
        finally:
            self.init_lock.release()

class ReadOnlyDocumentStore(object):

    def __init__(self, path, lang='en', stale=30):
        self.path = path
        self.lang = lang
        self.local = {}
        self.is_dirty = {}
        self.last_mod = None
        self.stale = stale
        self.init_lock = Lock()
        self.reset()

    def reset(self, path=None):
        if path is not None:
            self.path = path
        self.close()

    def prepare(self):
        tid = get_ident()
        if tid in self.is_dirty:
            if time.time() > self.last_mod + self.stale:
                try:
                    self.is_dirty.pop(tid)
                except KeyError:
                    pass
                try:
                    store = self.local.pop(tid)
                    store.close()
                except KeyError:
                    pass

        if not tid in self.local:
            self.init_lock.acquire()
            try:
                # See if someone beat us to it.
                if tid in self.local:
                    return
                self.local[tid] = DocumentStore(self.path,
                                                language_code=self.lang,
                                                read_only=True)
            finally:
                self.init_lock.release()

    @property
    def store(self):
        return self.local.get(get_ident())

    def dirty(self):
        self.last_mod = time.time()
        for tid in self.local.keys()[:]:
            self.is_dirty[tid] = True

    def close(self):
        while self.local:
            tid, store = self.local.popitem()
            store.close()
        self.is_dirty.clear()
    

class Store(object):

    if IStore is not None:
        implements(IStore, ISearch)

    def __init__(self, path, lang='en', indexes=(),
                 encoding='utf-8', stale=10):
        self.text_indexes = {}
        self.keyword_indexes = {}
        self.sortable_indexes = {}
        self.indexes = []

        seen = {}
        for index in indexes:
            seen[index.name] = None
            if isinstance(index, KeywordIndex):
                self.keyword_indexes[index.name] = index
            elif isinstance(index, TextIndex):
                self.text_indexes[index.name]  = index
            if index.sortable:
                self.sortable_indexes[index.name] = index
            self.indexes.append((index.name, index))

        if not 'body' in seen:
            body = TextIndex('body')
            self.text_indexes['body'] = body
            self.indexes.insert(0, ((body.name, body)))

        self.encoding = encoding
        self.read_store = None
        self.write_store = None
        self.registered = False
        self.stale = stale
        self.init_lock = Lock()
        self.register()
        self.reset(path, lang)

    def reset(self, path=None, lang='en'):
        if path is not None:
            self.path = path
        if lang is not None:
            self.lang = lang
        self.cleanup()
        self.init_lock.acquire()
        try:
            self.write_store = WritableDocumentStore(self.path, self.lang, 
                                                     self.indexes)
            self.read_store = ReadOnlyDocumentStore(self.path,
                                                    self.lang,
                                                    stale=self.stale)
        finally:
            self.init_lock.release()

        register = self.write_store.store.registerFlattener
        for name, index in self.indexes:
            if not (hasattr(index, 'flatten') and hasattr(index, 'accepts')):
                continue
            for kind in index.accepts:
                register(kind, index.flatten)


    # Signal Handlers
    def register(self):
        if self.registered:
            return
        try:
            from Signals.Signals import SignalHandler
        except ImportError:
            logger.exception('Could not import SignalHandler from Zope')
            return
        SignalHandler.registerHandler(SIGTERM, self.cleanup)
        SignalHandler.registerHandler(SIGINT, self.cleanup)
        SignalHandler.registerHandler(SIGHUP, self.restart)
        SignalHandler.registerHandler(SIGUSR2, self.restart)
        self.registered = True

    def cleanup(self):
        self.init_lock.acquire()
        try:
            if self.read_store is not None:
                self.read_store.close()
                self.read_store = None
            if self.write_store is not None:
                self.write_store.close()
                self.write_store = None
        finally:
            self.init_lock.release()
    close = cleanup
            
    def restart(self):
        self.reset()

    # Indexing
    def add(self, data):
        uid = data.get('uid', None)
        if not uid:
            msg = 'You must pass in the uid of the object to be indexed.'
            raise ValueError, msg
        pieces = list()
        for name, index in self.indexes:
            values = data.get(name, '')
            if not values:
                continue
            if isinstance(values, tuple):
                values = list(values)
            elif not isinstance(values, list):
                values = [values]
            for v in values:
                if isinstance(v, str):
                    v = v.decode('utf-8')
                pieces.extend(self.process(v, name, index))
        if not pieces:
            logger.warning('No pieces to index for %s' % uid)
        else:
            data = dict(data) # Copy.
            assert isinstance(data, dict), 'Not a dict: %r' % data
            self.write_store.store.addDocument(pieces, uid,
                                               data=cPickle.dumps(data, -1))
            self.read_store.dirty()

    def remove(self, uid):
        if not uid:
            msg = 'You must pass in the uid of the object to be indexed.'
            raise ValueError, msg
        self.write_store.store.deleteDocument(uid)
        self.read_store.dirty()

    def flush(self):
        self.write_store.store.flush()
        self.read_store.dirty()

    def process(self, value, name, index):
        pieces = []

        if self.sortable_indexes.has_key(name):
            # For sortable indexes, we add the value as-is.
            pieces.append(SortableValue(value, field_name=name))

        if self.keyword_indexes.has_key(name):
            # For keyword indexes, we add the value as a 'Term'.
            pieces.append(Term(value, field_name=name))
            pieces.append(DocumentPiece(value, field_name=name))
        elif self.text_indexes.has_key(name):
            if index.splitter is not None:
                words = index.splitter.split(value)
            else:
                words = value.split()

            # Don't process stopwords for DocumentPiece
            for word in filter(None, words):
                pieces.append(DocumentPiece(word, field_name=name))

            if index.stopwords is not None:
                words = index.stopwords.process(words)
            # Then after stopword processing, add all the remaining
            # words as 'Terms'
            for word in filter(None, words):
                pieces.append(Term(word, field_name=name))

        return pieces
        
    # Searching
    def buildQuery(self, **kw):
        query = []
        for name, index in self.indexes:
            sub = []
            values = kw.get(name, '')
            if values and isinstance(index, DateIndex):
                raise ValueError('Cannot search on DateIndex: %r' % name)
            if values:
                if isinstance(values, tuple):
                    values = list(values)
                if not isinstance(values, list):
                    values = [values]
            is_term = self.keyword_indexes.has_key(name.lower())
            for v in values:
                if is_term:
                    v = '"%s"' % v
                if name.lower() in ('body',):
                    extra = v
                else:
                    extra = '%s:%s' % (name, v)
                sub.append(extra)

            extend = True
            if is_term:
                if len(sub) > 1:
                    extend = False
                    query.append('(' + ' OR '.join(sub) + ')')
            if extend:
                query.extend(sub)
        if len(query) == 1:
            return query[0]
        return '+' + ' +'.join(query)

    def search(self, query, max_items=-1, partial=False,
               combiner=DocumentStore.OP_OR,
               sort_on=None, ascending=True,
               cmpfn_generator=None,
               range_restrictions=None):
        self.read_store.prepare()
        logger.info('Searching for %r, partial=%r, combiner=%r, '
                    'max_items=%r.', query, partial, combiner, max_items)
        res = self.read_store.store.performSearch(
            query, partial=partial, combiner=combiner,
            sort_on=sort_on, ascending=ascending,
            cmpfn_generator=cmpfn_generator,
            range_restrictions=range_restrictions)
        return Results(res, max_items)
