code_tokens
stringlengths
74
3.78k
def askopenfile ( mode = "r" , ** options ) : "Ask for a filename to open, and returned the opened file" filename = askopenfilename ( ** options ) if filename : return open ( filename , mode ) return None
def askopenfiles ( mode = "r" , ** options ) : files = askopenfilenames ( ** options ) if files : ofiles = [ ] for filename in files : ofiles . append ( open ( filename , mode ) ) files = ofiles return files
def asksaveasfile ( mode = "w" , ** options ) : "Ask for a filename to save as, and returned the opened file" filename = asksaveasfilename ( ** options ) if filename : return open ( filename , mode ) return None
def spaced_coordinate ( name , keys , ordered = True ) : def validate ( self ) : if set ( keys ) != set ( self ) : raise ValueError ( '{} needs keys {} and got {}' . format ( type ( self ) . __name__ , keys , tuple ( self ) ) ) new_class = type ( name , ( Coordinate , ) , { 'default_order' : keys if ordered else None , '_validate' : validate } ) return new_class
def norm ( self , order = 2 ) : return ( sum ( val ** order for val in abs ( self ) . values ( ) ) ) ** ( 1 / order )
def jsonify ( o , max_depth = - 1 , parse_enums = PARSE_KEEP ) : if max_depth == 0 : return o max_depth -= 1 if isinstance ( o , dict ) : keyattrs = getattr ( o . __class__ , '_altnames' , { } ) def _getter ( key , value ) : key = keyattrs . get ( key , key ) other = getattr ( o , key , value ) if callable ( other ) : other = value if isinstance ( key , Enum ) : key = key . name return key , jsonify ( other , max_depth = max_depth , parse_enums = parse_enums ) return dict ( _getter ( key , value ) for key , value in six . iteritems ( o ) ) elif isinstance ( o , list ) : return [ jsonify ( x , max_depth = max_depth , parse_enums = parse_enums ) for x in o ] elif isinstance ( o , tuple ) : return ( jsonify ( x , max_depth = max_depth , parse_enums = parse_enums ) for x in o ) elif isinstance ( o , Enum ) : o = _parse_enum ( o , parse_enums = parse_enums ) return o
def copy_files ( self ) : files = [ u'LICENSE' , u'CONTRIBUTING.rst' ] this_dir = dirname ( abspath ( __file__ ) ) for _file in files : sh . cp ( '{0}/templates/{1}' . format ( this_dir , _file ) , '{0}/' . format ( self . book . local_path ) ) if self . book . meta . rdf_path : sh . cp ( self . book . meta . rdf_path , '{0}/' . format ( self . book . local_path ) ) if 'GITenberg' not in self . book . meta . subjects : if not self . book . meta . subjects : self . book . meta . metadata [ 'subjects' ] = [ ] self . book . meta . metadata [ 'subjects' ] . append ( 'GITenberg' ) self . save_meta ( )
def _collate_data ( collation , first_axis , second_axis ) : if first_axis not in collation : collation [ first_axis ] = { } collation [ first_axis ] [ "create" ] = 0 collation [ first_axis ] [ "modify" ] = 0 collation [ first_axis ] [ "delete" ] = 0 first = collation [ first_axis ] first [ second_axis ] = first [ second_axis ] + 1 collation [ first_axis ] = first
def extract_changesets ( objects ) : def add_changeset_info ( collation , axis , item ) : if axis not in collation : collation [ axis ] = { } first = collation [ axis ] first [ "id" ] = axis first [ "username" ] = item [ "username" ] first [ "uid" ] = item [ "uid" ] first [ "timestamp" ] = item [ "timestamp" ] collation [ axis ] = first changeset_collation = { } for node in objects . nodes . values ( ) : _collate_data ( changeset_collation , node [ 'changeset' ] , node [ 'action' ] ) add_changeset_info ( changeset_collation , node [ 'changeset' ] , node ) for way in objects . ways . values ( ) : _collate_data ( changeset_collation , way [ 'changeset' ] , way [ 'action' ] ) add_changeset_info ( changeset_collation , way [ 'changeset' ] , way ) for relation in objects . relations . values ( ) : _collate_data ( changeset_collation , relation [ 'changeset' ] , relation [ 'action' ] ) add_changeset_info ( changeset_collation , relation [ 'changeset' ] , relation ) return changeset_collation
def to_str ( obj ) : if isinstance ( obj , str ) : return obj if isinstance ( obj , unicode ) : return obj . encode ( 'utf-8' ) return str ( obj )
def get_managed_zone ( self , zone ) : if zone . endswith ( '.in-addr.arpa.' ) : return self . reverse_prefix + '-' . join ( zone . split ( '.' ) [ - 5 : - 3 ] ) return self . forward_prefix + '-' . join ( zone . split ( '.' ) [ : - 1 ] )
async def get_records_for_zone ( self , dns_zone , params = None ) : managed_zone = self . get_managed_zone ( dns_zone ) url = f'{self._base_url}/managedZones/{managed_zone}/rrsets' if not params : params = { } if 'fields' not in params : params [ 'fields' ] = ( 'rrsets/name,rrsets/kind,rrsets/rrdatas,' 'rrsets/type,rrsets/ttl,nextPageToken' ) next_page_token = None records = [ ] while True : if next_page_token : params [ 'pageToken' ] = next_page_token response = await self . get_json ( url , params = params ) records . extend ( response [ 'rrsets' ] ) next_page_token = response . get ( 'nextPageToken' ) if not next_page_token : break logging . info ( f'Found {len(records)} rrsets for zone "{dns_zone}".' ) return records
async def is_change_done ( self , zone , change_id ) : zone_id = self . get_managed_zone ( zone ) url = f'{self._base_url}/managedZones/{zone_id}/changes/{change_id}' resp = await self . get_json ( url ) return resp [ 'status' ] == self . DNS_CHANGES_DONE
async def publish_changes ( self , zone , changes ) : zone_id = self . get_managed_zone ( zone ) url = f'{self._base_url}/managedZones/{zone_id}/changes' resp = await self . request ( 'post' , url , json = changes ) return json . loads ( resp ) [ 'id' ]
def leave ( self , reason = None , message = None ) : return self . _async_session . leave ( reason = reason , log_message = message )
def call ( self , procedure , * args , ** kwargs ) : return self . _async_session . call ( procedure , * args , ** kwargs )
def register ( self , endpoint , procedure = None , options = None ) : def proxy_endpoint ( * args , ** kwargs ) : return self . _callbacks_runner . put ( partial ( endpoint , * args , ** kwargs ) ) return self . _async_session . register ( proxy_endpoint , procedure = procedure , options = options )
def publish ( self , topic , * args , ** kwargs ) : return self . _async_session . publish ( topic , * args , ** kwargs )
def subscribe ( self , handler , topic = None , options = None ) : def proxy_handler ( * args , ** kwargs ) : return self . _callbacks_runner . put ( partial ( handler , * args , ** kwargs ) ) return self . _async_session . subscribe ( proxy_handler , topic = topic , options = options )
def b58encode ( val , charset = DEFAULT_CHARSET ) : def _b58encode_int ( int_ , default = bytes ( [ charset [ 0 ] ] ) ) : if not int_ and default : return default output = b'' while int_ : int_ , idx = divmod ( int_ , base ) output = charset [ idx : idx + 1 ] + output return output if not isinstance ( val , bytes ) : raise TypeError ( "a bytes-like object is required, not '%s', " "use .encode('ascii') to encode unicode strings" % type ( val ) . __name__ ) if isinstance ( charset , str ) : charset = charset . encode ( 'ascii' ) base = len ( charset ) if not base == 58 : raise ValueError ( 'charset base must be 58, not %s' % base ) pad_len = len ( val ) val = val . lstrip ( b'\0' ) pad_len -= len ( val ) p , acc = 1 , 0 for char in deque ( reversed ( val ) ) : acc += p * char p = p << 8 result = _b58encode_int ( acc , default = False ) prefix = bytes ( [ charset [ 0 ] ] ) * pad_len return prefix + result
def b58decode ( val , charset = DEFAULT_CHARSET ) : def _b58decode_int ( val ) : output = 0 for char in val : output = output * base + charset . index ( char ) return output if isinstance ( val , str ) : val = val . encode ( ) if isinstance ( charset , str ) : charset = charset . encode ( ) base = len ( charset ) if not base == 58 : raise ValueError ( 'charset base must be 58, not %s' % base ) pad_len = len ( val ) val = val . lstrip ( bytes ( [ charset [ 0 ] ] ) ) pad_len -= len ( val ) acc = _b58decode_int ( val ) result = deque ( ) while acc > 0 : acc , mod = divmod ( acc , 256 ) result . appendleft ( mod ) prefix = b'\0' * pad_len return prefix + bytes ( result )
def wait_for_edge ( self ) : GPIO . remove_event_detect ( self . _pin ) GPIO . wait_for_edge ( self . _pin , self . _edge )
def request_finished_callback ( sender , ** kwargs ) : logger = logging . getLogger ( __name__ ) level = settings . AUTOMATED_LOGGING [ 'loglevel' ] [ 'request' ] user = get_current_user ( ) uri , application , method , status = get_current_environ ( ) excludes = settings . AUTOMATED_LOGGING [ 'exclude' ] [ 'request' ] if status and status in excludes : return if method and method . lower ( ) in excludes : return if not settings . AUTOMATED_LOGGING [ 'request' ] [ 'query' ] : uri = urllib . parse . urlparse ( uri ) . path logger . log ( level , ( '%s performed request at %s (%s %s)' % ( user , uri , method , status ) ) . replace ( " " , " " ) , extra = { 'action' : 'request' , 'data' : { 'user' : user , 'uri' : uri , 'method' : method , 'application' : application , 'status' : status } } )
def request_exception ( sender , request , ** kwargs ) : if not isinstance ( request , WSGIRequest ) : logger = logging . getLogger ( __name__ ) level = CRITICAL if request . status_code <= 500 else WARNING logger . log ( level , '%s exception occured (%s)' , request . status_code , request . reason_phrase ) else : logger = logging . getLogger ( __name__ ) logger . log ( WARNING , 'WSGIResponse exception occured' )
def source_start ( base = '' , book_id = 'book' ) : repo_htm_path = "{book_id}-h/{book_id}-h.htm" . format ( book_id = book_id ) possible_paths = [ "book.asciidoc" , repo_htm_path , "{}-0.txt" . format ( book_id ) , "{}-8.txt" . format ( book_id ) , "{}.txt" . format ( book_id ) , "{}-pdf.pdf" . format ( book_id ) , ] for path in possible_paths : fullpath = os . path . join ( base , path ) if os . path . exists ( fullpath ) : return path return None
def pretty_dump ( fn ) : @ wraps ( fn ) def pretty_dump_wrapper ( * args , ** kwargs ) : response . content_type = "application/json; charset=utf-8" return json . dumps ( fn ( * args , ** kwargs ) , indent = 4 , separators = ( ',' , ': ' ) ) return pretty_dump_wrapper
def decode_json_body ( ) : raw_data = request . body . read ( ) try : return json . loads ( raw_data ) except ValueError as e : raise HTTPError ( 400 , e . __str__ ( ) )
def handle_type_error ( fn ) : @ wraps ( fn ) def handle_type_error_wrapper ( * args , ** kwargs ) : def any_match ( string_list , obj ) : return filter ( lambda x : x in obj , string_list ) try : return fn ( * args , ** kwargs ) except TypeError as e : message = e . __str__ ( ) str_list = [ "takes exactly" , "got an unexpected" , "takes no argument" , ] if fn . __name__ in message and any_match ( str_list , message ) : raise HTTPError ( 400 , message ) raise return handle_type_error_wrapper
def json_to_params ( fn = None , return_json = True ) : def json_to_params_decorator ( fn ) : @ handle_type_error @ wraps ( fn ) def json_to_params_wrapper ( * args , ** kwargs ) : data = decode_json_body ( ) if type ( data ) in [ tuple , list ] : args = list ( args ) + data elif type ( data ) == dict : allowed_keys = set ( data . keys ( ) ) - set ( kwargs . keys ( ) ) for key in allowed_keys : kwargs [ key ] = data [ key ] elif type ( data ) in PRIMITIVE_TYPES : args = list ( args ) args . append ( data ) if not return_json : return fn ( * args , ** kwargs ) return encode_json_body ( fn ( * args , ** kwargs ) ) return json_to_params_wrapper if fn : return json_to_params_decorator ( fn ) return json_to_params_decorator
def json_to_data ( fn = None , return_json = True ) : def json_to_data_decorator ( fn ) : @ handle_type_error @ wraps ( fn ) def get_data_wrapper ( * args , ** kwargs ) : kwargs [ "data" ] = decode_json_body ( ) if not return_json : return fn ( * args , ** kwargs ) return encode_json_body ( fn ( * args , ** kwargs ) ) return get_data_wrapper if fn : return json_to_data_decorator ( fn ) return json_to_data_decorator
def form_to_params ( fn = None , return_json = True ) : def forms_to_params_decorator ( fn ) : @ handle_type_error @ wraps ( fn ) def forms_to_params_wrapper ( * args , ** kwargs ) : kwargs . update ( dict ( request . forms ) ) if not return_json : return fn ( * args , ** kwargs ) return encode_json_body ( fn ( * args , ** kwargs ) ) return forms_to_params_wrapper if fn : return forms_to_params_decorator ( fn ) return forms_to_params_decorator
def fetch ( sequence , time = 'hour' ) : import StringIO import gzip import requests if time not in [ 'minute' , 'hour' , 'day' ] : raise ValueError ( 'The supplied type of replication file does not exist.' ) sqn = str ( sequence ) . zfill ( 9 ) url = "https://planet.osm.org/replication/%s/%s/%s/%s.osc.gz" % ( time , sqn [ 0 : 3 ] , sqn [ 3 : 6 ] , sqn [ 6 : 9 ] ) content = requests . get ( url ) if content . status_code == 404 : raise EnvironmentError ( 'Diff file cannot be found.' ) content = StringIO . StringIO ( content . content ) data_stream = gzip . GzipFile ( fileobj = content ) return data_stream
def m2m_callback ( sender , instance , action , reverse , model , pk_set , using , ** kwargs ) : if validate_instance ( instance ) and settings . AUTOMATED_LOGGING [ 'to_database' ] : if action in [ "post_add" , 'post_remove' ] : modification = [ model . objects . get ( pk = x ) for x in pk_set ] if 'al_chl' in instance . __dict__ . keys ( ) and instance . al_chl : changelog = instance . al_chl else : changelog = ModelChangelog ( ) changelog . information = ModelObject ( ) changelog . information . value = repr ( instance ) changelog . information . type = ContentType . objects . get_for_model ( instance ) changelog . information . save ( ) changelog . save ( ) for f in modification : obj = ModelObject ( ) obj . value = repr ( f ) try : obj . type = ContentType . objects . get_for_model ( f ) except Exception : logger = logging . getLogger ( __name__ ) logger . debug ( 'Could not determin the type of the modification.' ) obj . save ( ) if action == 'post_add' : changelog . inserted . add ( obj ) else : changelog . removed . add ( obj ) changelog . save ( ) instance . al_chl = changelog if 'al_evt' in instance . __dict__ . keys ( ) : target = instance . al_evt else : target = Model ( ) target . user = get_current_user ( ) target . action = 2 if action == 'post_add' else 2 target . save ( ) ct = ContentType . objects . get_for_model ( instance ) . app_label target . application = Application . objects . get_or_create ( name = ct ) [ 0 ] target . information = ModelObject ( ) target . information . value = repr ( instance ) target . information . type = ContentType . objects . get_for_model ( instance ) target . information . save ( ) instance . al_evt = target target . modification = changelog target . save ( )
def font ( self , name , properties ) : size , slant , weight = ( properties ) return ( name , ( self . ty ( size ) , slant , weight ) )
def async_update ( self ) : _LOGGER . debug ( 'Calling update on Alarm.com' ) response = None if not self . _login_info : yield from self . async_login ( ) try : with async_timeout . timeout ( 10 , loop = self . _loop ) : response = yield from self . _websession . get ( self . ALARMDOTCOM_URL + '{}/main.aspx' . format ( self . _login_info [ 'sessionkey' ] ) , headers = { 'User-Agent' : 'Mozilla/5.0 ' '(Windows NT 6.1; ' 'WOW64; rv:40.0) ' 'Gecko/20100101 ' 'Firefox/40.1' } ) _LOGGER . debug ( 'Response from Alarm.com: %s' , response . status ) text = yield from response . text ( ) _LOGGER . debug ( text ) tree = BeautifulSoup ( text , 'html.parser' ) try : self . state = tree . select ( self . ALARM_STATE ) [ 0 ] . get_text ( ) _LOGGER . debug ( 'Current alarm state: %s' , self . state ) self . sensor_status = tree . select ( self . SENSOR_STATUS ) [ 0 ] . get_text ( ) _LOGGER . debug ( 'Current sensor status: %s' , self . sensor_status ) except IndexError : self . state = None self . sensor_status = None self . _login_info = None yield from self . async_update ( ) except ( asyncio . TimeoutError , aiohttp . ClientError ) : _LOGGER . error ( "Can not load login page from Alarm.com" ) return False finally : if response is not None : yield from response . release ( )
def create_api_handler ( self ) : try : self . github = github3 . login ( username = config . data [ 'gh_user' ] , password = config . data [ 'gh_password' ] ) except KeyError as e : raise config . NotConfigured ( e ) logger . info ( "ratelimit remaining: {}" . format ( self . github . ratelimit_remaining ) ) if hasattr ( self . github , 'set_user_agent' ) : self . github . set_user_agent ( '{}: {}' . format ( self . org_name , self . org_homepage ) ) try : self . org = self . github . organization ( self . org_name ) except github3 . GitHubError : logger . error ( "Possibly the github ratelimit has been exceeded" ) logger . info ( "ratelimit: " + str ( self . github . ratelimit_remaining ) )
def _validate_func_args ( func , kwargs ) : args , varargs , varkw , defaults = inspect . getargspec ( func ) if set ( kwargs . keys ( ) ) != set ( args [ 1 : ] ) : raise TypeError ( "decorator kwargs do not match %s()'s kwargs" % func . __name__ )
def enclosing_frame ( frame = None , level = 2 ) : frame = frame or sys . _getframe ( level ) while frame . f_globals . get ( '__name__' ) == __name__ : frame = frame . f_back return frame
def get_event_consumer ( config , success_channel , error_channel , metrics , ** kwargs ) : builder = event_consumer . GPSEventConsumerBuilder ( config , success_channel , error_channel , metrics , ** kwargs ) return builder . build_event_consumer ( )
def get_enricher ( config , metrics , ** kwargs ) : builder = enricher . GCEEnricherBuilder ( config , metrics , ** kwargs ) return builder . build_enricher ( )
def get_gdns_publisher ( config , metrics , ** kwargs ) : builder = gdns_publisher . GDNSPublisherBuilder ( config , metrics , ** kwargs ) return builder . build_publisher ( )
def normalize_allele_name ( raw_allele , omit_dra1 = False , infer_class2_pair = True ) : cache_key = ( raw_allele , omit_dra1 , infer_class2_pair ) if cache_key in _normalized_allele_cache : return _normalized_allele_cache [ cache_key ] parsed_alleles = parse_classi_or_classii_allele_name ( raw_allele , infer_pair = infer_class2_pair ) species = parsed_alleles [ 0 ] . species normalized_list = [ species ] if omit_dra1 and len ( parsed_alleles ) == 2 : alpha , beta = parsed_alleles if alpha == _DRA1_0101 : parsed_alleles = [ beta ] for parsed_allele in parsed_alleles : if len ( parsed_allele . allele_family ) > 0 : normalized_list . append ( "%s*%s:%s" % ( parsed_allele . gene , parsed_allele . allele_family , parsed_allele . allele_code ) ) else : normalized_list . append ( "%s%s" % ( parsed_allele . gene , parsed_allele . allele_code ) ) normalized = "-" . join ( normalized_list ) _normalized_allele_cache [ cache_key ] = normalized return normalized
def getVersion ( data ) : data = data . splitlines ( ) return next ( ( v for v , u in zip ( data , data [ 1 : ] ) if len ( v ) == len ( u ) and allSame ( u ) and hasDigit ( v ) and "." in v ) )
def split_species_prefix ( name , seps = "-:_ " ) : species = None name_upper = name . upper ( ) name_len = len ( name ) for curr_prefix in _all_prefixes : n = len ( curr_prefix ) if name_len <= n : continue if name_upper . startswith ( curr_prefix . upper ( ) ) : species = curr_prefix name = name [ n : ] . strip ( seps ) break return ( species , name )
def formatFlow ( s ) : result = "" shifts = [ ] pos = 0 nextIsList = False def IsNextList ( index , maxIndex , buf ) : if index == maxIndex : return False if buf [ index + 1 ] == '<' : return True if index < maxIndex - 1 : if buf [ index + 1 ] == '\n' and buf [ index + 2 ] == '<' : return True return False maxIndex = len ( s ) - 1 for index in range ( len ( s ) ) : sym = s [ index ] if sym == "\n" : lastShift = shifts [ - 1 ] result += sym + lastShift * " " pos = lastShift if index < maxIndex : if s [ index + 1 ] not in "<>" : result += " " pos += 1 continue if sym == "<" : if nextIsList == False : shifts . append ( pos ) else : nextIsList = False pos += 1 result += sym continue if sym == ">" : shift = shifts [ - 1 ] result += '\n' result += shift * " " pos = shift result += sym pos += 1 if IsNextList ( index , maxIndex , s ) : nextIsList = True else : del shifts [ - 1 ] nextIsList = False continue result += sym pos += 1 return result
def train ( self , training_set , iterations = 500 ) : if len ( training_set ) > 2 : self . __X = np . matrix ( [ example [ 0 ] for example in training_set ] ) if self . __num_labels == 1 : self . __y = np . matrix ( [ example [ 1 ] for example in training_set ] ) . reshape ( ( - 1 , 1 ) ) else : eye = np . eye ( self . __num_labels ) self . __y = np . matrix ( [ eye [ example [ 1 ] ] for example in training_set ] ) else : self . __X = np . matrix ( training_set [ 0 ] ) if self . __num_labels == 1 : self . __y = np . matrix ( training_set [ 1 ] ) . reshape ( ( - 1 , 1 ) ) else : eye = np . eye ( self . __num_labels ) self . __y = np . matrix ( [ eye [ index ] for sublist in training_set [ 1 ] for index in sublist ] ) self . __m = self . __X . shape [ 0 ] self . __input_layer_size = self . __X . shape [ 1 ] self . __sizes = [ self . __input_layer_size ] self . __sizes . extend ( self . __hidden_layers ) self . __sizes . append ( self . __num_labels ) initial_theta = [ ] for count in range ( len ( self . __sizes ) - 1 ) : epsilon = np . sqrt ( 6 ) / np . sqrt ( self . __sizes [ count ] + self . __sizes [ count + 1 ] ) initial_theta . append ( np . random . rand ( self . __sizes [ count + 1 ] , self . __sizes [ count ] + 1 ) * 2 * epsilon - epsilon ) initial_theta = self . __unroll ( initial_theta ) self . __thetas = self . __roll ( fmin_bfgs ( self . __cost_function , initial_theta , fprime = self . __cost_grad_function , maxiter = iterations ) )
def predict ( self , X ) : return self . __cost ( self . __unroll ( self . __thetas ) , 0 , np . matrix ( X ) )
def __cost ( self , params , phase , X ) : params = self . __roll ( params ) a = np . concatenate ( ( np . ones ( ( X . shape [ 0 ] , 1 ) ) , X ) , axis = 1 ) calculated_a = [ a ] calculated_z = [ 0 ] for i , theta in enumerate ( params ) : z = calculated_a [ - 1 ] * theta . transpose ( ) calculated_z . append ( z ) a = self . sigmoid ( z ) if i != len ( params ) - 1 : a = np . concatenate ( ( np . ones ( ( a . shape [ 0 ] , 1 ) ) , a ) , axis = 1 ) calculated_a . append ( a ) if phase == 0 : if self . __num_labels > 1 : return np . argmax ( calculated_a [ - 1 ] , axis = 1 ) return np . round ( calculated_a [ - 1 ] ) J = np . sum ( - np . multiply ( self . __y , np . log ( calculated_a [ - 1 ] ) ) - np . multiply ( 1 - self . __y , np . log ( 1 - calculated_a [ - 1 ] ) ) ) / self . __m if self . __lambda != 0 : J += np . sum ( [ np . sum ( np . power ( theta [ : , 1 : ] , 2 ) ) for theta in params ] ) * self . __lambda / ( 2.0 * self . __m ) if phase == 1 : return J reversed_d = [ ] reversed_theta_grad = [ ] for i in range ( len ( params ) ) : if i == 0 : d = calculated_a [ - 1 ] - self . __y else : d = np . multiply ( reversed_d [ - 1 ] * params [ - i ] [ : , 1 : ] , self . sigmoid_grad ( calculated_z [ - 1 - i ] ) ) reversed_d . append ( d ) theta_grad = reversed_d [ - 1 ] . transpose ( ) * calculated_a [ - i - 2 ] / self . __m if self . __lambda != 0 : theta_grad += np . concatenate ( ( np . zeros ( ( params [ - 1 - i ] . shape [ 0 ] , 1 ) ) , params [ - 1 - i ] [ : , 1 : ] ) , axis = 1 ) * self . __lambda / self . __m reversed_theta_grad . append ( theta_grad ) theta_grad = self . __unroll ( reversed ( reversed_theta_grad ) ) return theta_grad
def __roll ( self , unrolled ) : rolled = [ ] index = 0 for count in range ( len ( self . __sizes ) - 1 ) : in_size = self . __sizes [ count ] out_size = self . __sizes [ count + 1 ] theta_unrolled = np . matrix ( unrolled [ index : index + ( in_size + 1 ) * out_size ] ) theta_rolled = theta_unrolled . reshape ( ( out_size , in_size + 1 ) ) rolled . append ( theta_rolled ) index += ( in_size + 1 ) * out_size return rolled
def __unroll ( self , rolled ) : return np . array ( np . concatenate ( [ matrix . flatten ( ) for matrix in rolled ] , axis = 1 ) ) . reshape ( - 1 )
def sigmoid_grad ( self , z ) : return np . multiply ( self . sigmoid ( z ) , 1 - self . sigmoid ( z ) )
def grad ( self , params , epsilon = 0.0001 ) : grad = [ ] for x in range ( len ( params ) ) : temp = np . copy ( params ) temp [ x ] += epsilon temp2 = np . copy ( params ) temp2 [ x ] -= epsilon grad . append ( ( self . __cost_function ( temp ) - self . __cost_function ( temp2 ) ) / ( 2 * epsilon ) ) return np . array ( grad )
def postWebhook ( self , dev_id , external_id , url , event_types ) : path = 'notification/webhook' payload = { 'device' : { 'id' : dev_id } , 'externalId' : external_id , 'url' : url , 'eventTypes' : event_types } return self . rachio . post ( path , payload )
def putWebhook ( self , hook_id , external_id , url , event_types ) : path = 'notification/webhook' payload = { 'id' : hook_id , 'externalId' : external_id , 'url' : url , 'eventTypes' : event_types } return self . rachio . put ( path , payload )
def deleteWebhook ( self , hook_id ) : path = '/' . join ( [ 'notification' , 'webhook' , hook_id ] ) return self . rachio . delete ( path )
def get ( self , hook_id ) : path = '/' . join ( [ 'notification' , 'webhook' , hook_id ] ) return self . rachio . get ( path )
def connect ( self ) : self . con = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) self . con . connect ( ( self . ip , self . port ) ) log . debug ( 'Connected with set-top box at %s:%s.' , self . ip , self . port )
def disconnect ( self ) : if self . con is not None : self . con . close ( ) log . debug ( 'Closed connection with with set-top box at %s:%s.' , self . ip , self . port )
def authorize ( self ) : version = self . con . makefile ( ) . readline ( ) self . con . send ( version . encode ( ) ) self . con . recv ( 2 ) self . con . send ( struct . pack ( '>B' , 1 ) ) msg = self . con . recv ( 4 ) response = struct . unpack ( ">I" , msg ) if response [ 0 ] != 0 : log . debug ( "Failed to authorize with set-top at %s:%s." , self . ip , self . port ) raise AuthenticationError ( ) self . con . send ( b'0' ) log . debug ( 'Authorized succesfully with set-top box at %s:%s.' , self . ip , self . port )
def send_key ( self , key ) : cmd = struct . pack ( ">BBBBBBH" , 4 , 1 , 0 , 0 , 0 , 0 , key ) self . con . send ( cmd ) cmd = struct . pack ( ">BBBBBBH" , 4 , 0 , 0 , 0 , 0 , 0 , key ) self . con . send ( cmd )
def is_powered_on ( self ) : host = '{0}:62137' . format ( self . ip ) try : HTTPConnection ( host , timeout = 2 ) . request ( 'GET' , '/DeviceDescription.xml' ) except ( ConnectionRefusedError , socket . timeout ) : log . debug ( 'Set-top box at %s:%s is powered off.' , self . ip , self . port ) return False log . debug ( 'Set-top box at %s:%s is powered on.' , self . ip , self . port ) return True
def power_on ( self ) : if not self . is_powered_on ( ) : log . debug ( 'Powering on set-top box at %s:%s.' , self . ip , self . port ) self . send_key ( keys . POWER )
def select_channel ( self , channel ) : for i in str ( channel ) : key = int ( i ) + 0xe300 self . send_key ( key )
def get_hmac ( message ) : key = current_app . config [ 'WEBHOOKS_SECRET_KEY' ] hmac_value = hmac . new ( key . encode ( 'utf-8' ) if hasattr ( key , 'encode' ) else key , message . encode ( 'utf-8' ) if hasattr ( message , 'encode' ) else message , sha1 ) . hexdigest ( ) return hmac_value
def check_x_hub_signature ( signature , message ) : hmac_value = get_hmac ( message ) if hmac_value == signature or ( signature . find ( '=' ) > - 1 and hmac_value == signature [ signature . find ( '=' ) + 1 : ] ) : return True return False
async def list_all_active_projects ( self , page_size = 1000 ) : url = f'{self.BASE_URL}/{self.api_version}/projects' params = { 'pageSize' : page_size } responses = await self . list_all ( url , params ) projects = self . _parse_rsps_for_projects ( responses ) return [ project for project in projects if project . get ( 'lifecycleState' , '' ) . lower ( ) == 'active' ]
def install ( self , binder , module ) : ModuleAdapter ( module , self . _injector ) . configure ( binder )
def expose ( self , binder , interface , annotation = None ) : private_module = self class Provider ( object ) : def get ( self ) : return private_module . private_injector . get_instance ( interface , annotation ) self . original_binder . bind ( interface , annotated_with = annotation , to_provider = Provider )
def _call_validators ( self ) : msg = [ ] msg . extend ( self . _validate_keyfile ( ) ) msg . extend ( self . _validate_dns_zone ( ) ) msg . extend ( self . _validate_retries ( ) ) msg . extend ( self . _validate_project ( ) ) return msg
def parse_rdf ( self ) : try : self . metadata = pg_rdf_to_json ( self . rdf_path ) except IOError as e : raise NoRDFError ( e ) if not self . authnames ( ) : self . author = '' elif len ( self . authnames ( ) ) == 1 : self . author = self . authnames ( ) [ 0 ] else : self . author = "Various"
def download_rdf ( self , force = False ) : if self . downloading : return True if not force and ( os . path . exists ( RDF_PATH ) and ( time . time ( ) - os . path . getmtime ( RDF_PATH ) ) < RDF_MAX_AGE ) : return False self . downloading = True logging . info ( 'Re-downloading RDF library from %s' % RDF_URL ) try : shutil . rmtree ( os . path . join ( self . rdf_library_dir , 'cache' ) ) except OSError as e : if e . errno != errno . ENOENT : raise try : with open ( RDF_PATH , 'w' ) as f : with requests . get ( RDF_URL , stream = True ) as r : shutil . copyfileobj ( r . raw , f ) except requests . exceptions . RequestException as e : logging . error ( e ) return True try : with tarfile . open ( RDF_PATH , 'r' ) as f : f . extractall ( self . rdf_library_dir ) except tarfile . TarError as e : logging . error ( e ) try : os . unlink ( RDF_PATH ) except : pass return True self . downloading = False return False
def run ( self , url = DEFAULT_AUTOBAHN_ROUTER , realm = DEFAULT_AUTOBAHN_REALM , authmethods = None , authid = None , authrole = None , authextra = None , blocking = False , callback = None , ** kwargs ) : _init_crochet ( in_twisted = False ) self . _bootstrap ( blocking , url = url , realm = realm , authmethods = authmethods , authid = authid , authrole = authrole , authextra = authextra , ** kwargs ) if callback : callback ( ) self . _callbacks_runner . start ( )
def stop ( self ) : if not self . _started : raise NotRunningError ( "This AutobahnSync instance is not started" ) self . _callbacks_runner . stop ( ) self . _started = False
def process_event ( self , event_id ) : with db . session . begin_nested ( ) : event = Event . query . get ( event_id ) event . _celery_task = self event . receiver . run ( event ) flag_modified ( event , 'response' ) flag_modified ( event , 'response_headers' ) db . session . add ( event ) db . session . commit ( )
def _json_column ( ** kwargs ) : return db . Column ( JSONType ( ) . with_variant ( postgresql . JSON ( none_as_null = True ) , 'postgresql' , ) , nullable = True , ** kwargs )
def delete ( self , event ) : assert self . receiver_id == event . receiver_id event . response = { 'status' : 410 , 'message' : 'Gone.' } event . response_code = 410
def get_hook_url ( self , access_token ) : if ( current_app . debug or current_app . testing ) and current_app . config . get ( 'WEBHOOKS_DEBUG_RECEIVER_URLS' , None ) : url_pattern = current_app . config [ 'WEBHOOKS_DEBUG_RECEIVER_URLS' ] . get ( self . receiver_id , None ) if url_pattern : return url_pattern % dict ( token = access_token ) return url_for ( 'invenio_webhooks.event_list' , receiver_id = self . receiver_id , access_token = access_token , _external = True )
def check_signature ( self ) : if not self . signature : return True signature_value = request . headers . get ( self . signature , None ) if signature_value : validator = 'check_' + re . sub ( r'[-]' , '_' , self . signature ) . lower ( ) check_signature = getattr ( signatures , validator ) if check_signature ( signature_value , request . data ) : return True return False
def extract_payload ( self ) : if not self . check_signature ( ) : raise InvalidSignature ( 'Invalid Signature' ) if request . is_json : delete_cached_json_for ( request ) return request . get_json ( silent = False , cache = False ) elif request . content_type == 'application/x-www-form-urlencoded' : return dict ( request . form ) raise InvalidPayload ( request . content_type )
def delete ( self , event ) : super ( CeleryReceiver , self ) . delete ( event ) AsyncResult ( event . id ) . revoke ( terminate = True )
def validate_receiver ( self , key , value ) : if value not in current_webhooks . receivers : raise ReceiverDoesNotExist ( self . receiver_id ) return value
def create ( cls , receiver_id , user_id = None ) : event = cls ( id = uuid . uuid4 ( ) , receiver_id = receiver_id , user_id = user_id ) event . payload = event . receiver . extract_payload ( ) return event
def receiver ( self ) : try : return current_webhooks . receivers [ self . receiver_id ] except KeyError : raise ReceiverDoesNotExist ( self . receiver_id )
def receiver ( self , value ) : assert isinstance ( value , Receiver ) self . receiver_id = value . receiver_id
def process ( self ) : try : self . receiver ( self ) except Exception as e : current_app . logger . exception ( 'Could not process event.' ) self . response_code = 500 self . response = dict ( status = 500 , message = str ( e ) ) return self
def register_bootstrap_functions ( ) : global _registered if _registered : return _registered = True from wrapt import discover_post_import_hooks for name in os . environ . get ( 'AUTOWRAPT_BOOTSTRAP' , '' ) . split ( ',' ) : discover_post_import_hooks ( name )
def bootstrap ( ) : global _patched if _patched : return _patched = True site . execsitecustomize = _execsitecustomize_wrapper ( site . execsitecustomize ) site . execusercustomize = _execusercustomize_wrapper ( site . execusercustomize )
def get_rules ( license ) : can = [ ] cannot = [ ] must = [ ] req = requests . get ( "{base_url}/licenses/{license}" . format ( base_url = BASE_URL , license = license ) , headers = _HEADERS ) if req . status_code == requests . codes . ok : data = req . json ( ) can = data [ "permitted" ] cannot = data [ "forbidden" ] must = data [ "required" ] return can , cannot , must
def main ( ) : all_summary = { } for license in RESOURCES : req = requests . get ( RESOURCES [ license ] ) if req . status_code == requests . codes . ok : summary = get_summary ( req . text ) can , cannot , must = get_rules ( license ) all_summary [ license ] = { "summary" : summary , "source" : RESOURCES [ license ] , "can" : can , "cannot" : cannot , "must" : must } with open ( 'summary.json' , 'w+' ) as f : f . write ( json . dumps ( all_summary , indent = 4 ) )
def get_arguments ( ) : parser = argparse . ArgumentParser ( description = 'Handles bumping of the artifact version' ) parser . add_argument ( '--log-config' , '-l' , action = 'store' , dest = 'logger_config' , help = 'The location of the logging config json file' , default = '' ) parser . add_argument ( '--log-level' , '-L' , help = 'Provide the log level. Defaults to INFO.' , dest = 'log_level' , action = 'store' , default = 'INFO' , choices = [ 'DEBUG' , 'INFO' , 'WARNING' , 'ERROR' , 'CRITICAL' ] ) parser . add_argument ( '--major' , help = 'Bump the major version' , dest = 'bump_major' , action = 'store_true' , default = False ) parser . add_argument ( '--minor' , help = 'Bump the minor version' , dest = 'bump_minor' , action = 'store_true' , default = False ) parser . add_argument ( '--patch' , help = 'Bump the patch version' , dest = 'bump_patch' , action = 'store_true' , default = False ) parser . add_argument ( '--version' , help = 'Set the version' , dest = 'version' , action = 'store' , default = False ) args = parser . parse_args ( ) return args
def setup_logging ( args ) : handler = logging . StreamHandler ( ) handler . setLevel ( args . log_level ) formatter = logging . Formatter ( ( '%(asctime)s - ' '%(name)s - ' '%(levelname)s - ' '%(message)s' ) ) handler . setFormatter ( formatter ) LOGGER . addHandler ( handler )
def make_response ( event ) : code , message = event . status response = jsonify ( ** event . response ) response . headers [ 'X-Hub-Event' ] = event . receiver_id response . headers [ 'X-Hub-Delivery' ] = event . id if message : response . headers [ 'X-Hub-Info' ] = message add_link_header ( response , { 'self' : url_for ( '.event_item' , receiver_id = event . receiver_id , event_id = event . id , _external = True ) } ) return response , code
def error_handler ( f ) : @ wraps ( f ) def inner ( * args , ** kwargs ) : try : return f ( * args , ** kwargs ) except ReceiverDoesNotExist : return jsonify ( status = 404 , description = 'Receiver does not exists.' ) , 404 except InvalidPayload as e : return jsonify ( status = 415 , description = 'Receiver does not support the' ' content-type "%s".' % e . args [ 0 ] ) , 415 except WebhooksError : return jsonify ( status = 500 , description = 'Internal server error' ) , 500 return inner
def post ( self , receiver_id = None ) : try : user_id = request . oauth . access_token . user_id except AttributeError : user_id = current_user . get_id ( ) event = Event . create ( receiver_id = receiver_id , user_id = user_id ) db . session . add ( event ) db . session . commit ( ) event . process ( ) db . session . commit ( ) return make_response ( event )
def _get_event ( receiver_id , event_id ) : event = Event . query . filter_by ( receiver_id = receiver_id , id = event_id ) . first_or_404 ( ) try : user_id = request . oauth . access_token . user_id except AttributeError : user_id = current_user . get_id ( ) if event . user_id != int ( user_id ) : abort ( 401 ) return event
def get ( self , receiver_id = None , event_id = None ) : event = self . _get_event ( receiver_id , event_id ) return make_response ( event )
def delete ( self , receiver_id = None , event_id = None ) : event = self . _get_event ( receiver_id , event_id ) event . delete ( ) db . session . commit ( ) return make_response ( event )
def _stripslashes ( s ) : r = re . sub ( r"\\(n|r)" , "\n" , s ) r = re . sub ( r"\\" , "" , r ) return r
def _get_config_name ( ) : p = subprocess . Popen ( 'git config --get user.name' , shell = True , stdout = subprocess . PIPE , stderr = subprocess . STDOUT ) output = p . stdout . readlines ( ) return _stripslashes ( output [ 0 ] )
def _get_licences ( ) : licenses = _LICENSES for license in licenses : print ( "{license_name} [{license_code}]" . format ( license_name = licenses [ license ] , license_code = license ) )