import collections
import logging
import os
import uuid
try:
    import cPickle as pickle
except ImportError:
    import pickle
    
try:
    import dobbin
    import transaction
    from dobbin import database
    HAS_DOBBIN = True
except ImportError:
    HAS_DOBBIN = False
    
_marker = object()
logger = logging.getLogger('store.storage')

class BaseStorage(object):

    def __init__(self, storage_path, get_next_batch_id=None):
        """ Create the storage
          - storage_path
            Path to the directory where the storage's file should be kept
            
          - get_next_batch_id
            Callable that returns the next batch id to use. The last batch
            id will be passed in as a single argument, or None if this is
            the first id to be requested.
        """
        self.storage_path = storage_path
        self._last_batch_id = None
        self.get_next_batch_id = get_next_batch_id
        
    def clean_key(self, key):
        if key is _marker:
            key = '__NOKEY%s' % str(uuid.uuid4())
        return key

    def batch_path(self, batch_id):
        return os.path.join(self.storage_path, batch_id) + self.extension
                    
    def list(self, batches=[]):
        include_batch_ids = dict([(id + self.extension, 1) for id in batches if id])
        for dirpath, dirnames, filenames in os.walk(self.storage_path):
            for name in filenames:
                full_path = os.path.join(dirpath, name)
                # If it doesn't look like one of our files, skip it
                if not name.endswith(self.extension):
                    continue
                    
                # If we're only selecting from some batches, check this is
                # in the set we're allowed
                if include_batch_ids and not include_batch_ids.has_key(name):
                    continue
                yield full_path
                    
    def clean_result(self, key, value):
        # If the key starts with __NOKEY, then it
        # wasn't provided by the job. Just yield the
        # value
        if isinstance(key, basestring) and key.startswith('__NOKEY'):
            return value
        else:                        
            # If the key isn't a string then it must have been
            # provided by the user. Alternatively, they key 
            # didn't start with __NOKEY, so it must also
            # have been provided by the user.
            return key, value
                    
    def results(self, batches=[]):
        for full_path in self.list(batches):
            batch = self.open_storage(full_path)
            for key, value in batch.items():
                yield self.clean_result(key, value)
                
    def parse_batches(self, batch_spec):
        batch_ids = []
        comma_sep = batch_spec.split(',')
        available = (os.path.splitext(x.split(os.path.sep)[-1])[0] for x in self.list())

        for bit in comma_sep:
            bit = bit.strip()
            if not bit:
                continue
            if bit.find('..') > 0:
                # It's a range of batch numbers
                start, end = bit.split('..')
                try:
                    start = int(start)
                    end = int(end)
                    matched = [x for x in available if int(x) >= start and int(x) <= end]
                    logger.info('Matched %s batches using integer comparison' % len(matched))
                except ValueError:                
                    matched = [x for x in available if x >= start and x <= end]
                    logger.info('Matched %s batches using alpha comparison' % len(matched))
                batch_ids.extend(matched)
            elif bit.strip() == '*':
                batch_ids = available
                break
            else:
                # It's just an id by itself
                batch_ids.append(bit)
        return set(batch_ids)
                
    def delete(self):
        for each in self.list():
            os.unlink(each)


class PickleStorage(BaseStorage):
    """ PickleStorage is good for a large number of small batches. It also
    supports optional batch compression. """

    extension = '.pickle'
    
    def __init__(self, storage_path, get_next_batch_id=None):
        super(PickleStorage, self).__init__(storage_path, get_next_batch_id=get_next_batch_id)
        self._batch = {}

    def open(self, *args, **kwargs):
        return open(*args, **kwargs)
                        
    def open_storage(self, name):
        f = self.open(name, 'rb')
        try:
            batch = pickle.load(f)
        finally:
            f.close()
        return batch
        
    def save(self, value, key=_marker):
        key = self.clean_key(key)
        self._batch[key] = value

    def rollover(self):
        if not self._batch:
            return None
        batch_id = self.get_next_batch_id(self._last_batch_id)
        f = self.open(self.batch_path(batch_id), 'wb')
        pickle.dump(self._batch, f, pickle.HIGHEST_PROTOCOL)
        self._batch = {}
        self._last_batch_id = batch_id
        return batch_id
        
    def close(self):
        """ For PickleStorage, closing is the same as rollover() since
        rollover doesn't actually create a new store.
        """
        return self.rollover()
        
        
class CompressedPickleStorage(PickleStorage):

    extension = '.pickle.gz'

    def open(self, *args, **kwargs):
        import gzip
        return gzip.open(*args, **kwargs)

        
class DobbinStorage(BaseStorage):
    """ DobbinStorage is more memory efficient where there are large 
    numbers if items in a batch. Supports transactions. """

    extension = '.dobbin'

    _db = None
    
    def __init__(self, storage_path, get_next_batch_id=None):
        if not HAS_DOBBIN:
            raise ImportError, 'dobbin not available.'
        super(DobbinStorage, self).__init__(storage_path, get_next_batch_id)
    

    def save(self, value, key=_marker):
        if self._db is None:
            batch_id = self.get_next_batch_id(self._last_batch_id)
            self._db = database.Database(self.batch_path(batch_id))
            self._last_batch_id = batch_id
            if self._db.root is None:
                root = dobbin.persistent.PersistentDict()
                self._db.elect(root)
                transaction.commit()
            dobbin.persistent.checkout(self._db.root)
        key = self.clean_key(key)
        self._db.root[key] = value
        
    def rollover(self):
        transaction.commit()
        self._db = None
        return self._last_batch_id
        
    def close(self):
        return self.rollover()
        
    def open_storage(self, name):
        db = database.Database(name)
        return db.root
        
class InMemoryStorage(BaseStorage):
    
    def __init__(self, *args, **kw):
        super(InMemoryStorage, self).__init__(*args, **kw)
        self._current_batch_id = None
        self.delete()
    
    def batch_path(self, batch_id):
        return batch_id
        
    def list(self, batches=[]):
        batches = [b for b in batches if b is not None]
        if batches:
            return set(batches).intersection(set(self._batches.keys()))
        else:
            return set(self._batches.keys())

    def results(self, batches=[]):
        for batch in self.list(batches):
            for k in self._batches[batch].keys():
                yield self.clean_result(k, self._batches[batch][k])
            
    def delete(self):
        self._batches = collections.defaultdict(lambda: {})
        
    def save(self, value, key=_marker):
        if self._current_batch_id is None:
            self.rollover()
        key = self.clean_key(key)
        self._batches[self._current_batch_id][key] = value
        
    def rollover(self):
        new_id = self.get_next_batch_id(self._current_batch_id)
        old_id = self._current_batch_id
        self._batches[new_id] = {}
        self._current_batch_id = new_id
        return old_id
        
    def close(self):
        return self._current_batch_id
        
