from google.appengine.api import memcache

from itertools import imap as map, chain
import logging

#See: https://bitbucket.org/jespern/django-piston/wiki/Documentation#!throttling


class NonInstantiatable(type):
    '''Instances of this metaclass cannot be instantiated.'''
    def __new__(cls, name, bases, dict_):
        # Convert all instance methods to class methods
        for key, value in dict_.iteritems():
            if hasattr(value, '__call__'):
                dict_[key] = classmethod(value)
        return type.__new__(cls, name, bases, dict_)
                
    def __call__(self, *args, **kwargs):
        raise NotImplementedError("The class %s cannot be instantiated." % (self.__name__,))
                

class RateMeasurerMeta(NonInstantiatable):
    '''Instances of this metaclass are used to measure the rate of an \
    event represented  by a key. Simply call the touch method on a key \
    each time the event occurs. The touch method will return the rate.
    '''
    def __init__(self, name, bases, dict_):
        super(RateMeasurerMeta, self).__init__(name, bases, dict_)
        
        # Absolute time modulo. Time the memcache variables will be used
        # to count occurrences of the event, whose rate is being measured.
        self._abs_time_mod = self.time_delta / 3.0
        
        # Time the memcache variables will last.
        self._mem_time_delta = 2.0 * self._abs_time_mod
        
        # Namespace used for the memcache variables.
        self._namespace = self.__module__ + '.' + name
     
    def _get_index(self):
        # Returns the current index. The index is a number from 
        # 0, 1, 2 that is appended at the end of the key to get
        # the three names of the memcache variables used to store
        # the number of occurrences of the event. 
        from time import time
        return int((time() // self._abs_time_mod) % 3)
    
    def _get_all_names_from_key(self, key, _indexes=('0', '1', '2')):
        return map(key.__add__, _indexes)
            
    def _get_all_shards_from_key(self, key):
        return map((key + '$').__add__, map(str, xrange(self.sharded_keys[key])))
        
    def _get_all_shard_names_from_key(self, key):
        result = []
        for shard in self._get_all_shards_from_key(key):
            result.extend(self._get_all_names_from_key(shard))
        return result
        
    def occurrences_to_rate(self, occurrences):
        '''Takes as parameter the number of occurrences that took place within \
        at least 2/3 time_delta and at most time_delta. \
        Returns an approximation of the rate.''' 
        return 1.2 * occurrences / self.time_delta  
        
    def touch(self, key, _extra_names=()):
        '''Remeasures the rate and returns the new value. \
        For a key representing a very frequent event sharded_touch \
        should be used instead''' 
        prev_names = list(self._get_all_names_from_key(key))
        index = self._get_index()
        name = prev_names.pop(index)
        namespace = self._namespace
        occurrences = memcache.incr(name, namespace=namespace)
        if not occurrences:
            memcache.add(name, 1, namespace=namespace, time=self._mem_time_delta)
            occurrences = 1
            
        #
        result = self.occurrences_to_rate(occurrences + 
            sum(memcache.get_multi(chain(prev_names, _extra_names), namespace=namespace).itervalues()))
        #logging.debug('%s.touch(%r)=%r' % (self.__name__, key, result))
        return result
        #
        
    def sharded_touch(self, key):
        '''Like touch, but used for very frequent events.'''
        from random import randrange
        other_shards = list(self._get_all_shards_from_key(key))
        index = randrange(0, self.sharded_keys[key])
        shard = other_shards.pop(index)
        extra_names = []
        for k in other_shards:
            extra_names.extend(self._get_all_names_from_key(k))
        return self.touch(shard, extra_names)
        
    def measure(self, key, _all_names=None):
        all_names = _all_names or self._get_all_names_from_key(key)
        
        #
        result = self.occurrences_to_rate(sum(memcache.get_multi(all_names, namespace=self._namespace).itervalues()))
        #logging.debug('%s.measure(%r)=%r' % (self.__name__, key, result))
        return result
        #
        
    def sharded_measure(self, key):
        return self.measure(None, self._get_all_shard_names_from_key(key))
        
    def reset(self, key, _all_names=None):
        all_names = _all_names or self._get_all_names_from_key(key)
        return memcache.delete_multi(all_names, namespace=self._namespace)  
        
    def sharded_reset(self, key):
        return self.reset(None, self._get_all_shard_names_from_key(key))
        
        
class IntrusionDetectorMeta(type):
    '''Metaclass of Intrusion Detector'''
    def __init__(self, name, bases, dict_):
        super(IntrusionDetectorMeta, self).__init__(name, bases, dict_)
        
        # RateMeasurer is the class containing the methods used to measure
        # the rate of certain suspicious events such as failed logins with
        # common passwords, or with a particular username.
        rm_name = name + '_RM'
        # The base classes of the class that will replace self.RateMeasurer
        # will be the RateMeasurer classes of the base classes of 'self'
        # that are instances of IntrusionDetectorMeta.
        rm_bases= tuple(b.RateMeasurer for b in bases if isinstance(b, IntrusionDetectorMeta))
        rm_dict = self.RateMeasurer.__dict__
        rm_dict = rm_dict if isinstance(rm_dict, dict) else dict(rm_dict)
        self.RateMeasurer = RateMeasurerMeta(rm_name, rm_bases, rm_dict)
        
        # Namespace to place the memcache variables representing the 
        # strength of the security measures to be taken (i.e. delay level).
        self._namespace = self.__module__ + '.' + name
                                 
        
class IntrusionDetectorBase(object):
    '''Base for intrusion detectors. Useful for defending against brute force and DoS.'''
    __metaclass__ = IntrusionDetectorMeta
    
    # If a suspicious event occurs with a rate greater than max_rate, then be cautious.
    # A value of 0.1 means if more than 1 attempt per 8 or 12 seconds (depending on the 
    # distribution of the attempts), then take some security measures. 
    max_rate = 0.1
    
    # The time the caution scope name will be stored in memcache. For instance if the rate of
    # failed logins from a client is too big, then all attempts within the next caution_duration
    # seconds will need to be handled with caution.
    caution_duration = 30.0
    
    class RateMeasurer:
        # Max time over which the occurrences of a suspicious event will be counted in order
        # to compute its rate. The min time is 2/3 of the max time (as mentioned above).
        time_delta = 60.0
        
    def __init__(self, **scopes):
        # scopes example:
        # username = 'u_adrian' 
        # password = 'p_unsafe'
    
        # Store scope names as attributes of the instance for easy access.
        self.__dict__.update(scopes)
        
        # Scopes that will be rate limited.
        self._rate_limited = scopes.values()
        self._rate_limits = {}
        
        # The caution scopes will not change during the processing of a request,
        # so they are retrieved only once from memcache.
        self._caution_scopes = set(memcache.get_multi(self._rate_limited, namespace=self.__class__._namespace))
        
    def get_rate_exceeded_scopes(self):
        # Return just the scopes that have exceeded the max rate.
        return [scope for scope in self._rate_limited if self.RateMeasurer.touch(scope) > (self._rate_limits.get(scope) or self.max_rate)]
    
    def set_rate_limit(self, scope, rate):
        self._rate_limits[scope] = rate
        
    def is_caution_scope(self, scope):
        # Return True when caution should be exercised with a client that belongs to the scope.
        if scope:
            result = scope in self._caution_scopes
            if not result:
                result = memcache.get(scope, namespace=self.__class__._namespace) or False
                if result:
                    self._caution_scopes.add(scope)
            return result
            
    def add_caution_scopes(self, scopes):
        # new_scopes is only the scopes that are not already in memcache
        new_scopes = (scope for scope in scopes if scope not in self._caution_scopes)
        # Add the scopes to memcache. The return value is a list of scopes that were not added because they weren't
        # accepted by memcache.
        return memcache.add_multi(dict.fromkeys(new_scopes, True), time=self.caution_duration, namespace=self.__class__._namespace)
        
        
class LoginIntrusionDetector(IntrusionDetectorBase):
    ''' Steps:
    (1) Instantiate in the login view, before starting the login process.
    (2) Call caution_check.
    (3) Call caution_on_retry when the suspicious event occurs (i.e. correct user but wrong password).
    '''
    # Strategies to remember:
    # - If this does not go against the security policy, allow a "remember me" option
    #   so that distributed attacks become more obvious.
    # - Exclude very low rate users from security measures when appropriate.
    # - Sometimes use the same security measure, so that attackers cannot know what actually occurs.
    
    class RateMeasurer:
        # Key for failed login attempts irrespective of IP address, username or password.
        failed_key = 'f'
        # Key for all login attempts.
        all_key = 'a'
        
        # Mapping of frequent keys to number of shards.
        sharded_keys = {# This is enough for 1.5 million logins per hour as of 09/10.
                        all_key: 8,
                        failed_key: 8}
        
    # Cautious handling of client is required when the ratio of failed to all attempts is greater than this.                
    max_failed_to_all = 0.05 
    
    # Maximum non suspicious rate for login attempts with same username or password, etc...
    max_rate = 0.1
    
    def __init__(self, ip_address, username, password):
        super(LoginIntrusionDetector, self).__init__(ip_username='w%s|%s' % (ip_address, username),
                                                     ip_password='q%s|%s' % (ip_address, password),
                                                     ip_address='i%s' % (ip_address,),
                                                     username='u%s' % (username,),
                                                     password='p%s' % (password,))
        # 5 memcache variables will be used per client and most of them will be shared. 
        # For each "scope" variable we will have 3 short-lived variables that will be used to calculate the rate.                                          
        
        # The ip_address rate limit must be smaller than max_rate, so that for instance a distributed attack
        # to know a user's password won't be able to succeed by lowering the rate of each distict-IP client and 
        # making them look as usual login attempts.
        self.set_rate_limit(self.ip_address, 0.0)
                                                     
        # Measure the rate of all login attempts.
        RM = self.RateMeasurer
        self.all_rate = RM.sharded_touch(RM.all_key)

        # Scope to which all login attempts belong.
        self.all_attempts = 'a'
        
    def caution_on_retry(self):
        scopes = self.get_rate_exceeded_scopes()
        RM = self.RateMeasurer
        # Used for alerting about possible distributed attacks.
        if RM.sharded_touch(RM.failed_key) > self.max_failed_to_all * self.all_rate:
            scopes.append(self.all_attempts) 
        self.add_caution_scopes(scopes)
    
    def caution_check(self):
        # Start from gravest condition. 
        ip_user_cautious = self.is_caution_scope(self.ip_username)
        ip_pwd_cautious = self.is_caution_scope(self.ip_password)
        if ip_user_cautious or ip_pwd_cautious:
            raise IpRateExceededCredentialIntrusion
        else:
            ip_cautious = self.is_caution_scope(self.ip_address)
            user_cautious = self.is_caution_scope(self.username)
            pwd_cautious = self.is_caution_scope(self.password)
            if (user_cautious or pwd_cautious) and ip_cautious:
                raise RateExceededCredentialIntrusion
            else:
                all_attempts_cautious = self.is_caution_scope(self.all_attempts)
                if all_attempts_cautious and ip_cautious:
                    return RateExceededOverallAttemptsIntrusion


class Intrusion(Exception):
    pass

class RateExceededCredentialIntrusion(Intrusion):
    pass

class IpRateExceededCredentialIntrusion(Intrusion):
    pass

class RateExceededOverallAttemptsIntrusion(Exception):
    pass
  
  
#----------------------------Used for testing---------------------------------
#
#def rate_measurer_test(tdelta, event_key):
#    class TestRateMeasurer(object):
#        __metaclass__ = RateMeasurerMeta
#        time_delta = tdelta
# 
#    TestRateMeasurer.touch(event_key)
