'''
This module provides a storage layer for Eyespy.

Design criteria for the storage layer should assume that the underlying storage
system is durable but may exhibit temporary high latency.

All storage systems should implement some kind of journalling support so that
sudden failure can be easily recovered.

The only thing that the storage layer should do is manage a simple
key/value database of objects in a journalled format.  In the case of an object
update, we prevent storage corruption by keeping *all* versions of an object.

Note that in no place do we ever try to optimize the changes made to an object
when serializing the list of object to disk.  We always want to be able to 
reconstruct the *exact* list of changes that were made upto a specific journal point.
'''

import simplejson, datetime
import os
import types
import time
import tempfile

class DateDecoder(object):
    decode_map = {
        '__datetime__' : lambda obj: datetime.datetime(*obj['__datetime__']),
        '__date__' : lambda obj: datetime.date(*obj['__date__'])
    }
    @classmethod
    def decode(cls, obj):
        #  serialize any of the datetime.date or datetime.datetime objects 
        for key in cls.decode_map.keys():
            if key in obj:
                return cls.decode_map[key](obj)
        return obj
            

class DateEncoder(simplejson.JSONEncoder):
    encode_map = {
        datetime.datetime: lambda obj: {'__datetime__': obj.timetuple()[:6]},
        datetime.date: lambda obj: {'__date__': obj.timetuple()[:3]}
    }
    def default(self, obj):
        #  serialize any of the datetime.date or datetime.datetime objects 
        method = self.encode_map.get(obj.__class__, None)
        if method:
            return method(obj)
        return simplejson.JSONEncoder.default(self, obj)

class FSStorage(object):
    '''
    FSStorage implementations can run on any locally mounted filesystem.

    The only thing we assume is that a closed file is durable between process
    runs.
    '''

    def __init__(self, path, cls_def):
        normpath = os.path.expanduser(os.path.normpath(path))
        self.object_journals = normpath + "/obj_journals"
        if not os.path.exists(normpath):
            # Store a journal of new/updated blobs
            os.makedirs(self.object_journals)

        self.current_journal = None
        self.next_journal()

    def next_journal(self):
        '''
        Close the existing journal file handle if it's open and 
        return the next one for this process.

        Journals are named with an 8 digit hex code, plus a process ID number.
        Process IDs are zero padded up to 8 digits

        ex: 044ACF24-00002132.log
        '''
        if self.current_journal:
            self.current_journal.flush()
            self.current_journal.close()

        existing_journals = sorted(os.listdir(self.object_journals))
        timestamp = hex(int(time.time()))[2:].upper().zfill(8)
        if existing_journals:
            # pick a new journal
            last_journal = existing_journals[-1]
            next_index = int("0x%s" % last_journal.split('-')[0], 16) + 1
            next_index = hex(next_index).upper()[2:].zfill(8)
            next_log = "%s-%s.log" % (next_index, timestamp)
        else:
            next_index = hex(1).upper()[2:].zfill(8)
            next_log = "%s-%s.log" % (next_index, timestamp)

        self.current_journal = open(self.object_journals + "/" + next_log, 'wa')
        return self.current_journal

    def add_documents(self, json_docs):
        if not isinstance(json_docs, types.ListType):
            json_docs = [json_docs]

        json = simplejson.dumps(json_docs, cls=DateEncoder, separators=(',',':'))

        # write to disk and flush immediately
        self.current_journal.write(json)
        self.current_journal.flush()
        self.next_journal()

    def document_generator(self, last_read_journal_index=0):
        '''
        This method is a generator which returns all the JSON objects
        indexed from earliest journal file onwards.

        '''
        sorted_names = []
        for fname in os.listdir(self.object_journals):
            if fname.split('-')[0] > hex(last_read_journal_index).upper()[2:].zfill(8):
                sorted_names.append(fname)
        sorted_names.sort()

        for fname in sorted_names:
            for line in open(self.object_journals+"/"+fname).readlines():
                json_list = simplejson.loads(line, object_hook=DateDecoder.decode)
                for json_obj in json_list:
                    yield json_obj
                

##############

class IndexBuilder(object):
    '''
    The index builder takes in a storage backend and can
    either reconstruct a new 
    '''
    def rebuild_index(self, storage):
        '''
        Rebuild the Xapian index using a given storage system and return gzip
        compressed index.
        '''


        index_home = tempfile.mkdtemp()
        index_name = "xap_index"

        indexer = Indexer(index_home, index_name)
        indexer.index_dociter(storage.document_generator())









