import riak
import types
from property import Property
import threading
import logging
import datetime

class UndefinedPropertyError(Exception):
    pass

class ModelType(type):
    def __init__(cls, name, bases, attrs):
        fields = {}
        for base in bases:
            if hasattr(base, '__model__'):
                fields.update(base._fields)
                
        # mangle the class
        # remove any fields so that __setattr__/__getattr__ work in instances
        
        for k,v in attrs.iteritems():
            if isinstance(v, Property):
                v._set_id(k)
                fields[k] = v
                delattr(cls, k)
                continue
            
        type.__init__(cls, name, bases, attrs)
        
        # create maps for fast key->name and name->key
        cls._fields = {}
        for k,v in fields.iteritems():
            cls._fields[k] = v
            
    def __getattr__(self, attr):
        if attr in self._fields:
            return self._fields[attr]
        raise AttributeError(attr)
        
logger = logging.getLogger('riakmodels.model')
#logger.setLevel(logging.INFO)

from multiquery import MultiQuery

class Model(object):
    ''' A generic model class for riak. Similar to Appengine models interfaces.
    
        Arguments:
                key(:class:`str`): A unique key for the model
                
        Keyword Arguments:
            Key/value pairs are stored as model property key/values.
            
        Properties:
            key: required - a unique key for the object
            
    '''
    # used to identify model classes in the metaclass
    __model__ = True
    
    _connection = threading.local()
    _client = None
    __metaclass__ = ModelType
    
    # used to store local modifications
    _data = None
    
    # a RiakObject instance
    _riak_obj = None
    
    # a RiakBucket instance
    _bucketinst = None
    
    # a string bucket name
    _bucket = None
    
    _host = None
    
    # the objects key
    key = None
    
    # these are keys that we avoid in __get/setattr__
    __reserved__ = [
        '_data', 'key', '_has_field', '_get_field', '_set_field', 'get', 'set',
        '_prepare_for_storage', 'spec', 'set','_client','_bucket','_bucketinst','_get_bucket',
        'put','database','connect','serialize','_riak_obj','logger','query'
    ]
    
    def __init__(self, key, **kwargs):
        self.key = key
        self._data = {}
            
        if kwargs:
            for k, v in kwargs.iteritems():
                setattr(self, k, v)
        self.logger = logging.getLogger('riakmodels.Model[%s]' % self.__class__.__name__)
            
    @classmethod
    def _new_binary(cls, key, data):
        return cls._get_bucket().new_binary(key, data)
        
    @classmethod
    def _get_binary(cls, key):
        return cls._get_bucket().get_binary(key).get_data()
        
    @classmethod
    def new_client(cls, host):
        return riak.RiakClient(host, port=8087, transport_class=riak.RiakPbcTransport)
        
    @classmethod
    def connect(cls, host="localhost"):
        logger.debug("Connecting to %s", host)
        cls._host = host
        cls._client = cls.new_client(cls._host)
       
    @classmethod
    def get_client(cls):
        return cls._client
        
    @classmethod
    def get_bucket_name(self):
        if self._bucket is not None:
            return self._bucket
            
        return self.__name__
        
    @classmethod  
    def _get_bucket(self):
        if self._bucketinst is None:
            self._bucketinst = self.get_client().bucket(self.get_bucket_name())
            #self._bucketinst.set_encoder("application/json", )
        return self._bucketinst
        
    @classmethod  
    def _has_field(self, name):
        return name in self._fields
    
    @classmethod    
    def _get_field(self, name):
        return self._fields.get(name) 
        
    @classmethod  
    def _set_field(self, name, value):
        field = self._get_field(name)
        self[field.get_tag()] = value
        
    def __contains__(self, k):
        return k in self._data
        
    def __setattr__(self, name, value):
        if self._has_field(name):
            return self._set_field_value(name, value)
            
        if name in self.__reserved__:
            self.__dict__[name] = value
            return
        
        return self._set_field_value(name, value)
            
    def __getattr__(self, name):
        if name in self._data:
            val = self._get_field_value(name)        
            return val
        return self.__dict__[name]
   
    def __repr__(self):
        return '%s(%s, %s)' % (self.__class__.__name__, self.key, self._data)
        
    @classmethod
    def purge(self):
        ''' Empties the bucket. '''
        bucket = self._get_bucket()
        for key in bucket.get_keys():
            obj = bucket.get(key)
            obj.delete()
            
    def _get_field_value(self, name):
        if not self._has_field(name):
            if name in self._data:
                return self._data[name]
                
            raise UndefinedPropertyError("Property %s not defined." % name)
            
        field = self._get_field(name)
        field_id = field.get_id()
        field_type = field.get_type()
        field_repeats = field.get_repeated()
        field_default = field.get_default()
        
        if field.required is True:
            # ensure required are set
            if field_id not in self._data:
                raise Exception("Required field %s not set in model %s (id:%s)" % (name, self.__class__.__name__, self.get_id()))
                
            val = self._data.get(field_id)
        if field_id not in self._data:
            if field_repeats:
                return []
                
            # result a default value
            return field_default
         
        val = self._data.get(field_id)
        return field.decode(val)
        
    @classmethod
    def _prepare_object(self, key, _riak_obj):
        _data = _riak_obj.get_data()
        if _data:
            c = self(key)
            c._riak_obj = _riak_obj
            c._data = _data
            return c
            
    @classmethod
    def get(self, key):
        # make it look like the record
        _riak_obj = self._get_bucket().get(key)
        
        if _riak_obj.exists() is False:
            return None
            
        return self._prepare_object(key, _riak_obj)
        
    def _prepare_for_storage(self, name, field, value):
        ''' Validate and encode a field value.
        
            TODO: move to field
        '''
        field_type = field.get_type()
        field_repeats = field.get_repeated()
        
        # basic repeated field validation
        if field_repeats:
            if not isinstance(value, list):
                raise ValueError("Cannot set %s. Value must be a list of %s. Got %s: %s" % (name, field_type, type(value), value))
         
        val = field.encode(value)
        return val        
        
    def _set_field_value(self, name, value):
        if value is None:
            return
            
        if not self._has_field(name):
            self._data[name] = value
            return value
        
        field = self._get_field(name)
        val = self._prepare_for_storage(name, field, value)
        self._data[field.get_id()] = val
        return val
        
    def add_link(self, other, tag=None):
        ''' Adds a link to other.
                
            Arguments:
                other(:class:`riakmodels.api.Model`): An instance of a model to link to.
                
                tag(:class:`str`): A string used to differentiate links.
        '''
        if self._riak_obj is None:
            self._riak_obj = self._get_bucket().new(self.key)
            
        if isinstance(other, Model):
            other_riak_obj = getattr(other, '_riak_obj', None)
            
            if other_riak_obj is None:
                raise ValueError("other is not stored.")
                
            self._riak_obj.add_link(other_riak_obj, tag=tag)
        else:
            raise ValueError("other must be a Model")
                
    def get_links(self):
        if self._riak_obj is None:
            return []
        
        return self._riak_obj.get_links()
        
    def put(self):   
        is_new = False
        
        # use the existing RiakObject if in the db. merge with self._data 
        # otherwise, create a new RiakObject
        if self._riak_obj is None:
            self._riak_obj = self._get_bucket().get(self.key)
            if self._riak_obj.exists() is False:
                logger.debug('Creating new object %s', self.key)
                is_new = True
                self._riak_obj = self._get_bucket().new(self.key)
            else:
                logger.debug('Using existing object %s', self.key)
                d = self._riak_obj.get_data()
                if d is not None:
                    d.update(self._data)
                    self._data = d
                
            
        # go over fields that need defaults or secondary indexes
        for id, field in self._fields.iteritems():
            if id not in self._data and field.has_default:
                default = field.get_default()
                self.logger.debug('Setting default on %s: %s', id, default)
                self._set_field_value(id, default)
              
            # handle datetime defaults
            if field.autonowadd and is_new:
                self.logger.debug('Autonowadd %s', id)
                self._set_field_value(id, datetime.datetime.utcnow())
                
            if field.autonow:
                self.logger.debug('Autonow %s', id)
                self._set_field_value(id, datetime.datetime.utcnow())
         
            if id not in self._data:
                continue
                
            # add secondary indexes
            if field.get_indexed():
                val = self._get_field_value(id)
                for key, val in zip(field.get_index_keys(), field.encode_index_values(val)):
                    self.logger.debug('Adding index %s: %s', key, val)
                    self._riak_obj.add_index(key, val)
        
        self.logger.debug('Setting data: %s', self._data)
        self._riak_obj.set_data(self._data)
        self._riak_obj.store()
    
    @classmethod
    def _get_index_key(self, id, type):
        return '%s_%s' % (id, type)
   
    def get_indexes(self):
        return self._riak_obj.get_indexes()
        
    def serialize(self, symbolic=False):
        if symbolic:
            raise NotImplementedError
            
        return self._data
        
    @classmethod
    def secondary(self, field_name, *args):
        ''' XXX: Make sure to use leveldb for secondary indexes.
        
            Returns:
                a :class:`riak.mapreduce.RiakMapReduce`.
        
            https://github.com/basho/riak-ruby-client/wiki/Secondary-Indexes
        '''
        if field_name not in self._fields:
            raise Exception("undefined field")
            
        field = self._fields.get(field_name)
        
        if not field.get_indexed():
            raise Exception("field not indexed")
            
        for arg in args:
            atype = arg.__class__.__name__
            
            if atype != 'str' and field.get_index_types()[0] == 'bin':
                raise Exception("%s must be str. got %s" % (arg, atype))
            
        index_key = field.get_index_keys()[0]
        
        for link in self.get_client().index(self.get_bucket_name(), index_key, *args).run():
            yield self._prepare_object(link.get_key(), link.get())
    
    @classmethod
    def multiquery(cls):
        ''' Returns a :class:`riakmodels.api.MultiQuery` object.
        
        '''
        # give the query class its own client
        return MultiQuery(cls, cls.new_client(cls._host), cls.get_bucket_name())
    
    @classmethod
    def mapreduce(self,):
        ''' 
            Returns:
                a :class:`riak.mapreduce.RiakMapReduce`
        '''
        query = self.get_client().add(self.get_bucket_name())
        # Then, you supply a Javascript map function as the code to be executed.
        return query
        
        #query.map(mapcode)
        ##query.reduce("function(values) { return values.sort(); }")
        ## map: Riak.mapValuesJson
        ## reduce: Riak.reduceSort
        #logger.debug('Mapreduce %s',  self.get_bucket_name())
        #iterable = query.run()
        #if iterable is None:
        #    raise StopIteration
        #    
        #for result in iterable:
        #    # Print the key (``v.key``) and the value for that key (``data``).
        #    yield result
