# Bartleby version 0.1
# experimental type safety for python
# Copyright 2008, Ionous. All Rights Reserved.
# http://www.ionous.net

""" 
bartleby exists to add type checking to python's functions, classes, and class methods
    in case you'd prefer not to have hard to find type related errors

    similar to the Typechecking module for Python (http://oakwinter.com/code/typecheck/)

 checkparams:
    i believe bartleby's @checkparams has some advantages over @accepts, namely:
        . provides a more natural location for parameter definitions
        . allows for optional parameters
        . allows for procedural type checking (ex. ranges) 
        . allows for per parameter comments that ( with some work ) might be able to be sucked into doc strings
    that said bart works well in conjucture with @returns

    @returns(str)
    @checkparams
    def example( x=Type(str, "hello") ):
        return x

 typedobject:
    additionally:the same 'Type' classes for def()s work with 'typedobject's as well
    typeobjects allow you to predeclare your class members just like you are able to in many other languages

    class ExampleClass(typedobject):
        okay= Type(str, "default")

 future:
    . tuple pattern / type checking

    . primitives types to shrink decl space / improve readability (ala. the typecheck module)
        . String, Number, etc. ( ex. def example( name=String, age=Number, dist=String("far") ) )
    
    . at some point, i'd like to extend bart to have offline checking of files,
    running a special python interpreter ( or something ) that checks types statically
    ( if the term compiler scares you; think unit test for types )

  more examples:
    class ListRange(Type):
        def __init__( self, min=None, max=None):
            assert min and max, "this *is* for ranges after all"
            self.min, self.max= min, max
            
        def verify( self, name, value ):
            super( ListRange, self ).verify( name, value )
            assert value in range( self.min, self.max )

    @returns(str)    
    def example2( x=Type(str, "hello"), y=ListRange(int, min=10, max=20) ):
        paramcheck( example2, locals() )
        return "x:%s y:%s" % (x, y)
    print example2(y=6)

    class ExampleClass(typedobject):
        okay= Type(str, "default")
 
    g= ExampleClass()
    g.okay="instance"               # <- instance variable!
    g.okay= 5                       # <- not okay, wrong type.

    g.also_okay= Dynamic(int, 23)   # <- not declared, but passing a dynamic obj
    g.not_okay= 5                   # <- not declared, and not dynamic: so not okay.

    g.strict_typedobject= True
    g.not_okay= Dynamic(int, 23)    # <- not declared, and g is now strict
    
    # prints: started 'default' now 'instance'
    print "started '%s' now '%s'" % (ExampleClass.okay, g.okay)
"""
   
class Type(object):
    class Unspecified(object):
        """ differentiation b/t defaults of None and no defaults at all
            the default value of type is Unspecified,
            for functions and class methods that means the value is *required*
            for class members, members with Unspecified defaults are initialized as Undefined
        """
        def __new__(cls):
            raise Exception("unspecified is a marker only")
    
    class Undefined(object):
        """ essentially: a typed version of None;

        for instance:
            class Example():
                a= Type(int) # type default is 'Unspecified' so value becomes 'Undefined'

                @checkparams
                def m1( self, foo=Type(int) ):
                    foo= "hello" # this is okay! for now: locals can always be overriden(!)

                @checkparams    
                def m2( self, foo=Type(int,Type.Undefined) ):
                    print isundefined(foo)       # prints 'True'
                    print "yes" if foo else "no" # prints 'no'
            
            e= Example()
            print e.a            # 'Undefined'
            isundefined(e.a)     # True
            e.a= None            # error 'a' is of type 'int'
            e.a= 5               # rock on
            e.a= Type.Undefined  # error: can't go back again
            e.a= Type.Undefined()# error: can't go back again
            e.m1()               # error b/c 'foo' is unspecified
            e.m2()               # okay b/c 'foo' is explictly undefined
        """
        def __init__( self, typeclass ):
            object.__setattr__( self, 'typeclass', typeclass )
        # __getattr__ not needed, default behavior will already raise an exception
        def __setattr__( self, name, value ):
            raise AttributeError
        def __delattr__( self, name ):
            raise AttributeError
        def __str__( self ):
            return 'Undefined'
        def __nonzero__( self ):
            return False
        def verify( self, name, value ):
            self.typeclass.verify( name, value )

    class ParamCheckException(Exception):
        """internal exception for paramcheck();
        would put this in paramcheck() but it needs a consistant appearance across recursive calls
        """
        pass
   
    def verify( self, name, value ):
        """ you can inherit your type verifcation as well
        to avoid override having override 'init' see the range example
        """
        assert isinstance(value,self.vartype),"'%s' should be of %s but is %s <value '%s'>" % (name, self.vartype, type(value),value )
        # then pass off to any additional function
        return not self.check or self.check( name, value )

    @property
    def vartype( self ):
        """the underlying / desired type; ex. the 'int' in Type(int)"""
        return self.varclass.__bases__[0]

    def optional(self):
        """if a default value was specified, then by definition you are optional"""
        return self.default is not Type.Unspecified

    def __init__( self ):
        """Type init is admitedly a little strange b/c it doesn't match new.
        any parameters to your derived init should be passed as keywords, and used as such in your init.
        """
        pass

    @staticmethod
    def _classlike( vartype ):
        return isinstance(vartype, type) or type(vartype).__name__== 'classobj'
        
    def __new__( cls, vartype, default=Unspecified, doc=None, check=None, **keywords ):
        """
            vartype: either an actual python type, or an import+name path (ex. 'datetime.datetime')
            check: a Type.verify()-like function
            
        """
        # strlike?
        if not Type._classlike(vartype):
            name,t= str(vartype).rsplit('.',1)
            mod = __import__(name)
            components = name.split('.')
            for comp in components[1:]:
                mod = getattr(mod, comp)
            vartype= getattr(mod,t)

        # didnt resolve to a type?
        assert Type._classlike(vartype), "first parameter of Type() should be a type but is %r"
        
        # can this be done better via metaclass or the like?
        # it needs to derive from desired type, but yet implement 'Type' like methods
        # in order to allow the parameter to be used as the expected type when not specified
        class Variable( vartype ):
            def __new__( cls, typeclass, value ):
                typeclass.verify( "newvalue", value )
                return super( Variable, cls ).__new__( cls, value )
            def __init__( self, typeclass, value ):
                self.typeclass= typeclass
            def verify( self, name, value ):
                self.typeclass.verify( name, value )

        # create the type class object
        typeclass= super( Type, cls ).__new__( cls )

        # have to call init manually b/c we aren't returning it which would get it called automatically
        # to alleviate derived Type implementations, record the needed parameters and call init only with what remains
        typeclass.varclass= Variable
        typeclass.default= default
        typeclass.doc= str(doc) # cast to a string to make sure its okay before anyone uses it
        typeclass.check= check
        typeclass.__init__( **keywords )
        
        # unspecifed and undefined instances both become 'undefined'
        # at the type level, however, typeclass remembers which was which
        if typeclass.default is Type.Unspecified or typeclass.default is Type.Undefined:
            return Type.Undefined( typeclass )
        else:
        # return a new instance linked to the type
            return Variable( typeclass, typeclass.default )

def isundefined(typevar):
    """determine in a safe manner if an instance is undefined"""
    return isinstance(typevar,Type.Undefined)

class Dynamic(Type):
    """marker type for typedobject (below)"""
    def verify( self, name, value ):
        return True

class typedobject(object):
    """base class for classes with predeclared members
        only variable derived from the same real type as existing members can be assigned
        *except* for wrapped Dynamic assignments
    """
    class Safe:
        """any new attribute can be assigned; only (Dynamic?) and Type variables will be type checked"""
        pass
    class Strict:
        """only attributes set with wrapped Dynamic objects can be assigned"""
        pass
    class Solid:
        """no new members can be assigned to solid objects"""
        pass
    class Sealed(Solid):
        """future: sealed means solid + no derivation"""
        pass 

    typedobject_type= Safe

    def __new__( cls, *args, **kargs ):
        """ i might perfer life as a metaclass
            i this this could work by having the metaclass version *inject* typeobject into the chain
            probably could keep all the code here ( tho maybe as static methods ) and reuse the code
            for both typedobject derived and typedmeta
        """
        # new the return object
        ret= super( typedobject, cls ).__new__( cls, *args, **kargs )
        # get the class level members 
        props=  cls.__dict__
        # search for Type members
        for name, prop in props.iteritems():
            typeclass= getattr( prop, 'typeclass', None )
            # assign Type'd Varibles to the instance
            if isinstance( typeclass, Type ):
                super( typedobject,ret ).__setattr__( name, prop )
        return ret

    def __setattr__( self, name, value ):
        """ dynamic values can be assigned to both non-existant and dynamic attributes;
            but they cannot override fixed attributes; you have to delete them first
        """
        try:
            attr= getattr( self, name )
        except AttributeError:
        # assignment to a new attribute:
            typeclass= getattr( value, 'typeclass', None )
            assert self.typedobject_type is not typedobject.Solid, "attributes can't be assigned to solid classes"
            assert self.typedobject_type is not typedobject.Strict or isinstance( typeclass, Dynamic ), "'%s' neither dynamic nor already in '%s'" % ( name, self )
        else:
        # assignment to an existing attribute:
            typeclass= getattr( attr, 'typeclass', None )
            if isinstance( typeclass, Type ):
                # ASSERT: value creation raises error if value is of unexpected type
                # ( keep assignment bound to the original type )
                value= typeclass.varclass( typeclass, value ) 
        # got this far? it's a go.
        return super(typedobject, self).__setattr__( name, value )

def _runcheck( fname, specs, callparms ):
    for name, spec in specs.iteritems():
        # with paramcheck() callparams *always* has 'p', not so with @checkparams
        # but get + spec default makes checkparams conform
        value= callparms.get( name, spec )
        if value is spec:
            assert spec.typeclass.optional(), "call to '%s' missing required parameter '%s'" % (fname,name)
        else:
            spec.verify( name, value ) # raises assertion if not so.

def checkparams(defn):
    """@checkparams
    i think this is much cleaner than paramcheck (below), but perhaps a little slower.
    ( possible tho that building a locals() dictionary is just as slow as the callfunc build out )
    thanks to the thread http://www.faqts.com/knowledge_base/view.phtml/aid/5666 for good info decorators work
    """
    # skip any initial unnamed args
    skipargs= defn.func_code.co_argcount-len( defn.func_defaults )
    # chop off *args, and **keywords; then pull out just the named ones
    argnames= defn.func_code.co_varnames[ skipargs:defn.func_code.co_argcount ]
    # parameter dict out of those arguments that have a 'typename' attribute that's an instance of Type
    specs= dict( [ (name,spec) for name,spec in zip( argnames, defn.func_defaults ) if isinstance( getattr(spec,'typeclass',None),Type) ] )
    
    # no need ( unlike paramcheck() ) to store the params on the function object
    def callfunc( *arglist,**keywords ):
        # restore names to args that were passed in-order ( skipping initial unnamed args )
        callparms= dict( zip( argnames, arglist[skipargs:] ) )
        # and add in those actually named
        callparms.update( keywords )
        # now check
        _runcheck( defn.__name__, specs, callparms )
        # then, if all is well: call the original function
        return defn( *arglist, **keywords )
    
    return callfunc        

def paramcheck( func, callparms, specstorage='im_paramcheck' ):
    """the original non decorator way of checking parmeters; requires a kind of odd syntax;
        def example( x=Type(str, "hello") ):
            paramcheck( example, locals() )
            return x
    """
    # reliably pull the underlying function object ( whether a method or not )
    owner= getattr( func, 'im_func', func )

    # try to get the specsd default parameters
    specs= getattr( owner, specstorage, None )

    # stage 1: specs doesn't even exist; check has never been called for the passed function
    if specs is None:
        specs= {}
        setattr( owner, specstorage, specs )
        try:
            func()   # (recursive) call parameterless to get the default parameters
        except Type.ParamCheckException:
            pass
        # and we're back

    # stage 2: specs is empty; this is a recursive call from stage 1
    elif not specs:
        # fill specs with default parameters
        for p in callparms:
            if p != 'self':
                specs[p]= callparms[p]
        # gotta love exceptions: this will stop the function from actually running
        raise Type.ParamCheckException

    # gotta love recursion; specs is good to go.
    _runcheck( func.__name__, specs, callparms ) 

if __name__ == '__main__':
    @checkparams
    def example( x=Type(str, "hello") ):
        return x

    @checkparams
    def example2( x=Type(str), y=Type(int,10) ):
        return "%s.%d" % (x,y)

    try: example(5)
    except AssertionError,e: print "success: intentional failure caught: %s" % e
    else: raise Exception("failed: expected call to fail")

    try: example([5])
    except AssertionError,e: print "success: intentional failure caught: %s" % e
    else: raise Exception("failed: expected call to fail")    

    assert example("test") == "test", "return should have been the same"
    print "success: call succeeded and returned what was passed"

    class ListRange(Type):
        def __init__( self, min, max):
            assert min and max, "this *is* for ranges after all"
            self.min, self.max= min, max
            
        def verify( self, name, value ):
            super( ListRange, self ).verify( name, value )
            assert value in range( self.min, self.max ), "%s not in [%s, %s)" % (value, self.min, self.max)

    @checkparams
    def rangetest( x= ListRange( int, min=5, max=10) ):
        return x

    assert rangetest(5) == 5, "return should have been the same"
    try: rangetest(15)
    except AssertionError,e: print "success: intentional failure caught: %s" % e
    else: raise Exception("failed: expected call to fail")

    def makesureimlessthan5(name,value):
        assert value<5, "%s: %s should be less than 5 " % (name,value)

    @checkparams
    def checktest( x= Type( int, 4, check= makesureimlessthan5 ) ):
        return x

    try: print checktest(10)
    except AssertionError,e: print "success: intentional failure caught: %s" % e
    else: raise Exception("failed: expected call to fail")    
    

    class ExamplePerson(typedobject):
        name= Type(str, "default")
 
    g= ExamplePerson()
    g.name="instance"               # <- instance variable!

    try: g.name= 5            # <- not okay, wrong type.
    except AssertionError,e: print "success: intentional failure caught: %s" % e
    else: raise Exception("failed: expected call to fail")        

    g.also_okay= 5                  # <- not declared, but by default this is okay

    g.typedobject_type= typedobject.Strict
    try: g.now_not_okay= 5               # <- now dynamic assignment isn't okay
    except AssertionError,e: print  "success: intentional failure caught: %s" % e
    else: raise Exception("failed expected call to fail")
    
    g.dynamic_var= Dynamic(int, 23) # <- not declared but assigned with a Dynamic type

    g.typedobject_type= typedobject.Solid
    try: g.solid_dynamic_var= Dynamic(int, 23) # <- not valid when Solid
    except AssertionError,e: print  "success: intentional failure caught: %s" % e
    else: raise Exception("failed expected call to fail")

    # prints: started 'default' now 'instance'
    assert ExamplePerson.name=="default"
    assert g.name=="instance"
    print "started '%s' now '%s'" % (ExamplePerson.name, g.name)

    class Example(typedobject):
       cache= Type(list, Type.Undefined)
       list= Type(list,None)

    e= Example()
    print e.cache
       
