idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
500
def _inplace_sort_by_id ( unsorted_list ) : if not isinstance ( unsorted_list , list ) : return sorted_list = [ ( i . get ( '@id' ) , i ) for i in unsorted_list ] sorted_list . sort ( ) del unsorted_list [ : ] unsorted_list . extend ( [ i [ 1 ] for i in sorted_list ] )
Takes a list of dicts each of which has an
501
def cull_nonmatching_trees ( nexson , tree_id , curr_version = None ) : if curr_version is None : curr_version = detect_nexson_version ( nexson ) if not _is_by_id_hbf ( curr_version ) : nexson = convert_nexson_format ( nexson , BY_ID_HONEY_BADGERFISH ) nexml_el = get_nexml_el ( nexson ) tree_groups = nexml_el [ 'treesById' ] tree_groups_to_del = [ ] for tgi , tree_group in tree_groups . items ( ) : tbi = tree_group [ 'treeById' ] if tree_id in tbi : trees_to_del = [ i for i in tbi . keys ( ) if i != tree_id ] for tid in trees_to_del : tree_group [ '^ot:treeElementOrder' ] . remove ( tid ) del tbi [ tid ] else : tree_groups_to_del . append ( tgi ) for tgid in tree_groups_to_del : nexml_el [ '^ot:treesElementOrder' ] . remove ( tgid ) del tree_groups [ tgid ] return nexson
Modifies nexson and returns it in version 1 . 2 . 1 with any tree that does not match the ID removed .
502
def phylesystem_api_url ( self , base_url , study_id ) : p = self . _phylesystem_api_params ( ) e = self . _phylesystem_api_ext ( ) if self . content == 'study' : return '{d}/study/{i}{e}' . format ( d = base_url , i = study_id , e = e ) , p elif self . content == 'tree' : if self . content_id is None : return '{d}/study/{i}/tree{e}' . format ( d = base_url , i = study_id , e = e ) , p return '{d}/study/{i}/tree/{t}{e}' . format ( d = base_url , i = study_id , t = self . content_id , e = e ) , p elif self . content == 'subtree' : assert self . content_id is not None t , n = self . content_id p [ 'subtree_id' ] = n return '{d}/study/{i}/subtree/{t}{e}' . format ( d = base_url , i = study_id , t = t , e = e ) , p elif self . content == 'meta' : return '{d}/study/{i}/meta{e}' . format ( d = base_url , i = study_id , e = e ) , p elif self . content == 'otus' : if self . content_id is None : return '{d}/study/{i}/otus{e}' . format ( d = base_url , i = study_id , e = e ) , p return '{d}/study/{i}/otus/{t}{e}' . format ( d = base_url , i = study_id , t = self . content_id , e = e ) , p elif self . content == 'otu' : if self . content_id is None : return '{d}/study/{i}/otu{e}' . format ( d = base_url , i = study_id , e = e ) , p return '{d}/study/{i}/otu/{t}{e}' . format ( d = base_url , i = study_id , t = self . content_id , e = e ) , p elif self . content == 'otumap' : return '{d}/otumap/{i}{e}' . format ( d = base_url , i = study_id , e = e ) , p else : assert False
Returns URL and param dict for a GET call to phylesystem_api
503
def _is_valid ( self , log : Optional [ Logger ] = None ) -> bool : return self . _validate ( self , log ) [ 0 ]
Determine whether the current contents are valid
504
def _validate ( self , val : list , log : Optional [ Logger ] = None ) -> Tuple [ bool , List [ str ] ] : errors = [ ] if not isinstance ( val , list ) : errors . append ( f"{self._variable_name}: {repr(val)} is not an array" ) else : for i in range ( 0 , len ( val ) ) : v = val [ i ] if not conforms ( v , self . _type , self . _context . NAMESPACE ) : errors . append ( f"{self._variable_name} element {i}: {v} is not a {self._type.__name__}" ) if len ( val ) < self . _min : errors . append ( f"{self._variable_name}: at least {self._min} value{'s' if self._min > 1 else ''} required - " f"element has {len(val) if len(val) else 'none'}" ) if self . _max is not None and len ( val ) > self . _max : errors . append ( f"{self._variable_name}: no more than {self._max} values permitted - element has {len(val)}" ) if log : for error in errors : log . log ( error ) return not bool ( errors ) , errors
Determine whether val is a valid instance of this array
505
def tree_iter_nexson_proxy ( nexson_proxy ) : nexml_el = nexson_proxy . _nexml_el tg_order = nexml_el [ '^ot:treesElementOrder' ] tgd = nexml_el [ 'treesById' ] for tg_id in tg_order : tg = tgd [ tg_id ] tree_order = tg [ '^ot:treeElementOrder' ] tbid = tg [ 'treeById' ] otus = tg [ '@otus' ] for k in tree_order : v = tbid [ k ] yield nexson_proxy . _create_tree_proxy ( tree_id = k , tree = v , otus = otus )
Iterates over NexsonTreeProxy objects in order determined by the nexson blob
506
def main ( ) : p = argparse . ArgumentParser ( ) p . add_argument ( "--host" , default = "localhost" ) p . add_argument ( "--port" , type = int , default = 3551 ) p . add_argument ( "--strip-units" , action = "store_true" , default = False ) args = p . parse_args ( ) status . print_status ( status . get ( args . host , args . port ) , strip_units = args . strip_units )
Get status from APC NIS and print output on stdout .
507
def wsgi_app ( self , environ , start_response ) : @ _LOCAL_MANAGER . middleware def _wrapped_app ( environ , start_response ) : request = Request ( environ ) setattr ( _local , _CURRENT_REQUEST_KEY , request ) response = self . _dispatch_request ( request ) return response ( environ , start_response ) return _wrapped_app ( environ , start_response )
A basic WSGI app
508
def run ( self , host , port , ** options ) : self . registry . debug = True debugged = DebuggedJsonRpcApplication ( self , evalex = True ) run_simple ( host , port , debugged , use_reloader = True , ** options )
For debugging purposes you can run this as a standalone server .
509
def _try_trigger_before_first_request_funcs ( self ) : if self . _after_first_request_handled : return else : with self . _before_first_request_lock : if self . _after_first_request_handled : return for func in self . _before_first_request_funcs : func ( ) self . _after_first_request_handled = True
Runs each function from self . before_first_request_funcs once and only once .
510
def debug_application ( self , environ , start_response ) : adapter = self . _debug_map . bind_to_environ ( environ ) if adapter . test ( ) : _ , args = adapter . match ( ) return self . handle_debug ( environ , start_response , args [ "traceback_id" ] ) else : return super ( DebuggedJsonRpcApplication , self ) . debug_application ( environ , start_response )
Run the application and preserve the traceback frames .
511
def handle_debug ( self , environ , start_response , traceback_id ) : if traceback_id not in self . app . registry . tracebacks : abort ( 404 ) self . _copy_over_traceback ( traceback_id ) traceback = self . tracebacks [ traceback_id ] rendered = traceback . render_full ( evalex = self . evalex , secret = self . secret ) response = Response ( rendered . encode ( 'utf-8' , 'replace' ) , headers = [ ( 'Content-Type' , 'text/html; charset=utf-8' ) , ( 'X-XSS-Protection' , '0' ) ] ) return response ( environ , start_response )
Handles the debug endpoint for inspecting previous errors .
512
def register_signals ( self , app ) : before_record_index . connect ( inject_provisional_community ) if app . config [ 'COMMUNITIES_OAI_ENABLED' ] : listen ( Community , 'after_insert' , create_oaipmh_set ) listen ( Community , 'after_delete' , destroy_oaipmh_set ) inclusion_request_created . connect ( new_request )
Register the signals .
513
def genargs ( ) -> ArgumentParser : parser = ArgumentParser ( ) parser . add_argument ( "spec" , help = "JSG specification - can be file name, URI or string" ) parser . add_argument ( "-o" , "--outfile" , help = "Output python file - if omitted, python is not saved" ) parser . add_argument ( "-p" , "--print" , help = "Print python file to stdout" ) parser . add_argument ( "-id" , "--inputdir" , help = "Input directory with JSON files" ) parser . add_argument ( "-i" , "--json" , help = "URL, file name or json text" , nargs = '*' ) return parser
Create a command line parser
514
def _to_string ( inp : str ) -> str : if '://' in inp : req = requests . get ( inp ) if not req . ok : raise ValueError ( f"Unable to read {inp}" ) return req . text else : with open ( inp ) as infile : return infile . read ( )
Convert a URL or file name to a string
515
def conforms ( self , json : str , name : str = "" , verbose : bool = False ) -> ValidationResult : json = self . _to_string ( json ) if not self . is_json ( json ) else json try : self . json_obj = loads ( json , self . module ) except ValueError as v : return ValidationResult ( False , str ( v ) , name , None ) logfile = StringIO ( ) logger = Logger ( cast ( TextIO , logfile ) ) if not is_valid ( self . json_obj , logger ) : return ValidationResult ( False , logfile . getvalue ( ) . strip ( '\n' ) , name , None ) return ValidationResult ( True , "" , name , type ( self . json_obj ) . __name__ )
Determine whether json conforms with the JSG specification
516
async def _sync_revoc ( self , rr_id : str , rr_size : int = None ) -> None : LOGGER . debug ( 'Issuer._sync_revoc >>> rr_id: %s, rr_size: %s' , rr_id , rr_size ) ( cd_id , tag ) = rev_reg_id2cred_def_id__tag ( rr_id ) try : await self . get_cred_def ( cd_id ) except AbsentCredDef : LOGGER . debug ( 'Issuer._sync_revoc: <!< tails tree %s may be for another ledger; no cred def found on %s' , self . _dir_tails , cd_id ) raise AbsentCredDef ( 'Tails tree {} may be for another ledger; no cred def found on {}' . format ( self . _dir_tails , cd_id ) ) with REVO_CACHE . lock : revo_cache_entry = REVO_CACHE . get ( rr_id , None ) tails = None if revo_cache_entry is None else revo_cache_entry . tails if tails is None : try : tails = await Tails ( self . _dir_tails , cd_id , tag ) . open ( ) except AbsentTails : await self . _create_rev_reg ( rr_id , rr_size ) tails = await Tails ( self . _dir_tails , cd_id , tag ) . open ( ) if revo_cache_entry is None : REVO_CACHE [ rr_id ] = RevoCacheEntry ( None , tails ) else : REVO_CACHE [ rr_id ] . tails = tails LOGGER . debug ( 'Issuer._sync_revoc <<<' )
Create revoc registry if need be for input revocation registry identifier ; open and cache tails file reader .
517
def quote_xml ( text ) : text = _coerce_unicode ( text ) if text . startswith ( CDATA_START ) : return text return saxutils . escape ( text )
Format a value for display as an XML text node .
518
def __construct_from_components ( self , ns_uri , prefix = None , schema_location = None ) : assert ns_uri self . uri = ns_uri self . schema_location = schema_location or None self . prefixes = OrderedSet ( ) if prefix : self . prefixes . add ( prefix ) self . preferred_prefix = prefix or None
Initialize this instance from a namespace URI and optional prefix and schema location URI .
519
def namespace_for_prefix ( self , prefix ) : try : ni = self . __lookup_prefix ( prefix ) except PrefixNotFoundError : return None else : return ni . uri
Get the namespace the given prefix maps to .
520
def set_preferred_prefix_for_namespace ( self , ns_uri , prefix , add_if_not_exist = False ) : ni = self . __lookup_uri ( ns_uri ) if not prefix : ni . preferred_prefix = None elif prefix in ni . prefixes : ni . preferred_prefix = prefix elif add_if_not_exist : self . add_prefix ( ns_uri , prefix , set_as_preferred = True ) else : raise PrefixNotFoundError ( prefix )
Sets the preferred prefix for ns_uri . If add_if_not_exist is True the prefix is added if it s not already registered . Otherwise setting an unknown prefix as preferred is an error . The default is False . Setting to None always works and indicates a preference to use the namespace as a default . The given namespace must already be in this set .
521
def __merge_schema_locations ( self , ni , incoming_schemaloc ) : if ni . schema_location == incoming_schemaloc : return elif not ni . schema_location : ni . schema_location = incoming_schemaloc or None elif not incoming_schemaloc : return else : raise ConflictingSchemaLocationError ( ni . uri , ni . schema_location , incoming_schemaloc )
Merge incoming_schemaloc into the given _NamespaceInfo ni . If we don t have one yet and the incoming value is non - None update ours with theirs . This modifies ni .
522
def add_namespace_uri ( self , ns_uri , prefix = None , schema_location = None ) : assert ns_uri if ns_uri in self . __ns_uri_map : ni = self . __lookup_uri ( ns_uri ) new_ni = copy . deepcopy ( ni ) if prefix : self . __check_prefix_conflict ( ni , prefix ) new_ni . prefixes . add ( prefix ) self . __merge_schema_locations ( new_ni , schema_location ) for p in new_ni . prefixes : self . __prefix_map [ p ] = new_ni self . __ns_uri_map [ new_ni . uri ] = new_ni else : if prefix : self . __check_prefix_conflict ( ns_uri , prefix ) ni = _NamespaceInfo ( ns_uri , prefix , schema_location ) self . __add_namespaceinfo ( ni )
Adds a new namespace to this set optionally with a prefix and schema location URI .
523
def remove_namespace ( self , ns_uri ) : if not self . contains_namespace ( ns_uri ) : return ni = self . __ns_uri_map . pop ( ns_uri ) for prefix in ni . prefixes : del self . __prefix_map [ prefix ]
Removes the indicated namespace from this set .
524
def add_prefix ( self , ns_uri , prefix , set_as_preferred = False ) : assert prefix ni = self . __lookup_uri ( ns_uri ) self . __check_prefix_conflict ( ni , prefix ) ni . prefixes . add ( prefix ) self . __prefix_map [ prefix ] = ni if set_as_preferred : ni . preferred_prefix = prefix
Adds prefix for the given namespace URI . The namespace must already exist in this set . If set_as_preferred is True also set this namespace as the preferred one .
525
def prefix_iter ( self , ns_uri ) : ni = self . __lookup_uri ( ns_uri ) return iter ( ni . prefixes )
Gets an iterator over the prefixes for the given namespace .
526
def remove_prefix ( self , prefix ) : if prefix not in self . __prefix_map : return ni = self . __lookup_prefix ( prefix ) ni . prefixes . discard ( prefix ) del self . __prefix_map [ prefix ] if ni . preferred_prefix == prefix : ni . preferred_prefix = next ( iter ( ni . prefixes ) , None )
Removes prefix from this set . This is a no - op if the prefix doesn t exist in it .
527
def set_schema_location ( self , ns_uri , schema_location , replace = False ) : ni = self . __lookup_uri ( ns_uri ) if ni . schema_location == schema_location : return elif replace or ni . schema_location is None : ni . schema_location = schema_location elif schema_location is None : ni . schema_location = None else : raise ConflictingSchemaLocationError ( ns_uri , ni . schema_location , schema_location )
Sets the schema location of the given namespace .
528
def get_schemaloc_string ( self , ns_uris = None , sort = False , delim = "\n" ) : if not ns_uris : ns_uris = six . iterkeys ( self . __ns_uri_map ) if sort : ns_uris = sorted ( ns_uris ) schemalocs = [ ] for ns_uri in ns_uris : ni = self . __lookup_uri ( ns_uri ) if ni . schema_location : schemalocs . append ( "{0.uri} {0.schema_location}" . format ( ni ) ) if not schemalocs : return "" return 'xsi:schemaLocation="{0}"' . format ( delim . join ( schemalocs ) )
Constructs and returns a schemalocation attribute . If no namespaces in this set have any schema locations defined returns an empty string .
529
def get_uri_prefix_map ( self ) : mapping = { } for ni in six . itervalues ( self . __ns_uri_map ) : if ni . preferred_prefix : mapping [ ni . uri ] = ni . preferred_prefix elif len ( ni . prefixes ) > 0 : mapping [ ni . uri ] = next ( iter ( ni . prefixes ) ) else : raise NoPrefixesError ( ni . uri ) return mapping
Constructs and returns a map from namespace URI to prefix representing all namespaces in this set . The prefix chosen for each namespace is its preferred prefix if it s not None . If the preferred prefix is None one is chosen from the set of registered prefixes . In the latter situation if no prefixes are registered an exception is raised .
530
def get_uri_schemaloc_map ( self ) : mapping = { } for ni in six . itervalues ( self . __ns_uri_map ) : if ni . schema_location : mapping [ ni . uri ] = ni . schema_location return mapping
Constructs and returns a map from namespace URI to schema location URI . Namespaces without schema locations are excluded .
531
def subset ( self , ns_uris ) : sub_ns = NamespaceSet ( ) for ns_uri in ns_uris : ni = self . __lookup_uri ( ns_uri ) new_ni = copy . deepcopy ( ni ) sub_ns . _NamespaceSet__add_namespaceinfo ( new_ni ) return sub_ns
Return a subset of this NamespaceSet containing only data for the given namespaces .
532
def import_from ( self , other_ns , replace = False ) : for other_ns_uri in other_ns . namespace_uris : ni = self . __ns_uri_map . get ( other_ns_uri ) if ni is None : other_ni = other_ns . _NamespaceSet__ns_uri_map [ other_ns_uri ] for other_prefix in other_ni . prefixes : self . __check_prefix_conflict ( other_ns_uri , other_prefix ) cloned_ni = copy . deepcopy ( other_ni ) self . __add_namespaceinfo ( cloned_ni ) elif replace : other_ni = other_ns . _NamespaceSet__ns_uri_map [ other_ns_uri ] for other_prefix in other_ni . prefixes : self . __check_prefix_conflict ( ni , other_prefix ) cloned_ni = copy . deepcopy ( other_ni ) self . remove_namespace ( other_ns_uri ) self . __add_namespaceinfo ( cloned_ni ) else : continue
Imports namespaces into this set from other_ns .
533
def _get_version ( self , root ) : version = self . get_version ( root ) if version : return StrictVersion ( version ) raise UnknownVersionError ( "Unable to determine the version of the input document. No " "version information found on the root element." )
Return the version of the root element passed in .
534
def _check_version ( self , root ) : version = self . _get_version ( root ) supported = [ StrictVersion ( x ) for x in self . supported_versions ( root . tag ) ] if version in supported : return error = "Document version ({0}) not in supported versions ({1})" raise UnsupportedVersionError ( message = error . format ( version , supported ) , expected = supported , found = version )
Ensure the root element is a supported version .
535
def _check_root_tag ( self , root ) : supported = self . supported_tags ( ) if root . tag in supported : return error = "Document root element ({0}) not one of ({1})" raise UnsupportedRootElementError ( message = error . format ( root . tag , supported ) , expected = supported , found = root . tag , )
Check that the XML element tree has a supported root element .
536
def parse_xml_to_obj ( self , xml_file , check_version = True , check_root = True , encoding = None ) : root = get_etree_root ( xml_file , encoding = encoding ) if check_root : self . _check_root_tag ( root ) if check_version : self . _check_version ( root ) entity_class = self . get_entity_class ( root . tag ) entity_obj = entity_class . _binding_class . factory ( ) entity_obj . build ( root ) return entity_obj
Creates a STIX binding object from the supplied xml file .
537
def parse_xml ( self , xml_file , check_version = True , check_root = True , encoding = None ) : xml_etree = get_etree ( xml_file , encoding = encoding ) entity_obj = self . parse_xml_to_obj ( xml_file = xml_etree , check_version = check_version , check_root = check_root ) xml_root_node = xml_etree . getroot ( ) entity = self . get_entity_class ( xml_root_node . tag ) . from_obj ( entity_obj ) entity . __input_namespaces__ = dict ( iteritems ( xml_root_node . nsmap ) ) with ignored ( KeyError ) : pairs = get_schemaloc_pairs ( xml_root_node ) entity . __input_schemalocations__ = dict ( pairs ) return entity
Creates a python - stix STIXPackage object from the supplied xml_file .
538
def get_logo_url ( self , obj ) : if current_app and obj . logo_url : return u'{site_url}{path}' . format ( site_url = current_app . config . get ( 'THEME_SITEURL' ) , path = obj . logo_url , )
Get the community logo URL .
539
def item_links_addition ( self , data ) : links_item_factory = self . context . get ( 'links_item_factory' , default_links_item_factory ) data [ 'links' ] = links_item_factory ( data ) return data
Add the links for each community .
540
def envelope ( self , data , many ) : if not many : return data result = dict ( hits = dict ( hits = data , total = self . context . get ( 'total' , len ( data ) ) ) ) page = self . context . get ( 'page' ) if page : links_pagination_factory = self . context . get ( 'links_pagination_factory' , default_links_pagination_factory ) urlkwargs = self . context . get ( 'urlkwargs' , { } ) result [ 'links' ] = links_pagination_factory ( page , urlkwargs ) return result
Wrap result in envelope .
541
def parse_datetime ( value ) : if not value : return None elif isinstance ( value , datetime . datetime ) : return value return dateutil . parser . parse ( value )
Attempts to parse value into an instance of datetime . datetime . If value is None this function will return None .
542
def parse_date ( value ) : if not value : return None if isinstance ( value , datetime . date ) : return value return parse_datetime ( value ) . date ( )
Attempts to parse value into an instance of datetime . date . If value is None this function will return None .
543
def correct_word ( word_string ) : if word_string is None : return "" elif isinstance ( word_string , str ) : return max ( find_candidates ( word_string ) , key = find_word_prob ) else : raise InputError ( "string or none type variable not passed as argument to correct_word" )
Finds all valid one and two letter corrections for word_string returning the word with the highest relative probability as type str .
544
def find_candidates ( word_string ) : if word_string is None : return { } elif isinstance ( word_string , str ) : return ( validate_words ( [ word_string ] ) or validate_words ( list ( find_one_letter_edits ( word_string ) ) ) or validate_words ( list ( find_two_letter_edits ( word_string ) ) ) or set ( [ word_string ] ) ) else : raise InputError ( "string or none type variable not passed as argument to find_candidates" )
Finds all potential words word_string could have intended to mean . If a word is not incorrectly spelled it will return this word first else if will look for one letter edits that are correct . If there are no valid one letter edits it will perform a two letter edit search .
545
def find_word_prob ( word_string , word_total = sum ( WORD_DISTRIBUTION . values ( ) ) ) : if word_string is None : return 0 elif isinstance ( word_string , str ) : return WORD_DISTRIBUTION [ word_string ] / word_total else : raise InputError ( "string or none type variable not passed as argument to find_word_prob" )
Finds the relative probability of the word appearing given context of a base corpus . Returns this probability value as a float instance .
546
def validate_words ( word_list ) : if word_list is None : return { } elif isinstance ( word_list , list ) : if not word_list : return { } else : return set ( word for word in word_list if word in WORD_DISTRIBUTION ) else : raise InputError ( "list variable not passed as argument to validate_words" )
Checks for each edited word in word_list if that word is a valid english word . abs Returns all validated words as a set instance .
547
def search_star ( star ) : base_url = "http://star-api.herokuapp.com/api/v1/stars/" if not isinstance ( star , str ) : raise ValueError ( "The star arg you provided is not the type of str" ) else : base_url += star return dispatch_http_get ( base_url )
It is also possible to query the stars by label here is an example of querying for the star labeled as Sun .
548
def search_exoplanet ( exoplanet ) : base_url = "http://star-api.herokuapp.com/api/v1/exo_planets/" if not isinstance ( exoplanet , str ) : raise ValueError ( "The exoplanet arg you provided is not the type of str" ) else : base_url += exoplanet return dispatch_http_get ( base_url )
It is also possible to query the exoplanets by label here is an example of querying for the exoplanet labeled as 11 Com
549
def search_local_galaxies ( galaxy ) : base_url = "http://star-api.herokuapp.com/api/v1/local_groups/" if not isinstance ( galaxy , str ) : raise ValueError ( "The galaxy arg you provided is not the type of str" ) else : base_url += galaxy return dispatch_http_get ( base_url )
It is also possible to query the local galaxies by label here is an example of querying for the local galaxy labeled IC 10
550
def search_star_cluster ( cluster ) : base_url = "http://star-api.herokuapp.com/api/v1/open_cluster/" if not isinstance ( cluster , str ) : raise ValueError ( "The cluster arg you provided is not the type of str" ) else : base_url += cluster return dispatch_http_get ( base_url )
It is also possible to query the star clusters by label here is an example of querying for the star cluster labeled Berkeley 59
551
def as_python ( self , name : str ) -> str : if self . _ruleTokens : pattern = "jsg.JSGPattern(r'{}'.format({}))" . format ( self . _rulePattern , ', ' . join ( [ '{v}={v}.pattern' . format ( v = v ) for v in sorted ( self . _ruleTokens ) ] ) ) else : pattern = "jsg.JSGPattern(r'{}')" . format ( self . _rulePattern ) base_type = self . _jsontype . signature_type ( ) if self . _jsontype else "jsg.JSGString" return python_template . format ( name = name , base_type = base_type , pattern = pattern )
Return the python representation
552
def increment_slug ( s ) : slug_parts = s . split ( '-' ) try : slug_parts [ - 1 ] = str ( 1 + int ( slug_parts [ - 1 ] ) ) except : slug_parts . append ( '2' ) return '-' . join ( slug_parts )
Generate next slug for a series .
553
def underscored2camel_case ( v ) : vlist = v . split ( '_' ) c = [ ] for n , el in enumerate ( vlist ) : if el : if n == 0 : c . append ( el ) else : c . extend ( [ el [ 0 ] . upper ( ) , el [ 1 : ] ] ) return '' . join ( c )
converts ott_id to ottId .
554
def unvalidated_parm ( self , parm : str ) -> bool : return parm . startswith ( "_" ) or parm == self . TYPE or parm in self . IGNORE or ( self . JSON_LD and parm . startswith ( '@' ) )
Return true if the pair name should be ignored
555
def dispatch ( self , request ) : def _wrapped ( ) : messages = self . _get_request_messages ( request ) results = [ self . _dispatch_and_handle_errors ( message ) for message in messages ] non_notification_results = [ x for x in results if x is not None ] if len ( non_notification_results ) == 0 : return None elif len ( messages ) == 1 : return non_notification_results [ 0 ] else : return non_notification_results result , _ = self . _handle_exceptions ( _wrapped ) if result is not None : return self . _encode_complete_result ( result )
Takes a request and dispatches its data to a jsonrpc method .
556
def register ( self , name , method , method_signature = None ) : if inspect . ismethod ( method ) : raise Exception ( "typedjsonrpc does not support making class methods into endpoints" ) self . _name_to_method_info [ name ] = MethodInfo ( name , method , method_signature )
Registers a method with a given name and signature .
557
def method ( self , returns , ** parameter_types ) : @ wrapt . decorator def type_check_wrapper ( method , instance , args , kwargs ) : if instance is not None : raise Exception ( "Instance shouldn't be set." ) parameter_names = inspect . getargspec ( method ) . args defaults = inspect . getargspec ( method ) . defaults parameters = self . _collect_parameters ( parameter_names , args , kwargs , defaults ) parameter_checker . check_types ( parameters , parameter_types , self . _strict_floats ) result = method ( * args , ** kwargs ) parameter_checker . check_return_type ( result , returns , self . _strict_floats ) return result def register_method ( method ) : parameter_names = inspect . getargspec ( method ) . args parameter_checker . check_type_declaration ( parameter_names , parameter_types ) wrapped_method = type_check_wrapper ( method , None , None , None ) fully_qualified_name = "{}.{}" . format ( method . __module__ , method . __name__ ) self . register ( fully_qualified_name , wrapped_method , MethodSignature . create ( parameter_names , parameter_types , returns ) ) return wrapped_method return register_method
Syntactic sugar for registering a method
558
def _collect_parameters ( parameter_names , args , kwargs , defaults ) : parameters = { } if defaults is not None : zipped_defaults = zip ( reversed ( parameter_names ) , reversed ( defaults ) ) for name , default in zipped_defaults : parameters [ name ] = default for name , value in zip ( parameter_names , args ) : parameters [ name ] = value for name , value in kwargs . items ( ) : parameters [ name ] = value return parameters
Creates a dictionary mapping parameters names to their values in the method call .
559
def _get_request_messages ( self , request ) : data = request . get_data ( as_text = True ) try : msg = self . json_decoder . decode ( data ) except Exception : raise ParseError ( "Could not parse request data '{}'" . format ( data ) ) if isinstance ( msg , list ) : return msg else : return [ msg ]
Parses the request as a json message .
560
def _check_request ( self , msg ) : if "jsonrpc" not in msg : raise InvalidRequestError ( "'\"jsonrpc\": \"2.0\"' must be included." ) if msg [ "jsonrpc" ] != "2.0" : raise InvalidRequestError ( "'jsonrpc' must be exactly the string '2.0', but it was '{}'." . format ( msg [ "jsonrpc" ] ) ) if "method" not in msg : raise InvalidRequestError ( "No method specified." ) if "id" in msg : if msg [ "id" ] is None : raise InvalidRequestError ( "typedjsonrpc does not allow id to be None." ) if isinstance ( msg [ "id" ] , float ) : raise InvalidRequestError ( "typedjsonrpc does not support float ids." ) if not isinstance ( msg [ "id" ] , ( six . string_types , six . integer_types ) ) : raise InvalidRequestError ( "id must be a string or integer; '{}' is of type {}." . format ( msg [ "id" ] , type ( msg [ "id" ] ) ) ) if msg [ "method" ] not in self . _name_to_method_info : raise MethodNotFoundError ( "Could not find method '{}'." . format ( msg [ "method" ] ) )
Checks that the request json is well - formed .
561
def render_template_to_string ( input , _from_string = False , ** context ) : if _from_string : template = current_app . jinja_env . from_string ( input ) else : template = current_app . jinja_env . get_or_select_template ( input ) return template . render ( context )
Render a template from the template folder with the given context .
562
def save_and_validate_logo ( logo_stream , logo_filename , community_id ) : cfg = current_app . config logos_bucket_id = cfg [ 'COMMUNITIES_BUCKET_UUID' ] logo_max_size = cfg [ 'COMMUNITIES_LOGO_MAX_SIZE' ] logos_bucket = Bucket . query . get ( logos_bucket_id ) ext = os . path . splitext ( logo_filename ) [ 1 ] ext = ext [ 1 : ] if ext . startswith ( '.' ) else ext logo_stream . seek ( SEEK_SET , SEEK_END ) logo_size = logo_stream . tell ( ) if logo_size > logo_max_size : return None if ext in cfg [ 'COMMUNITIES_LOGO_EXTENSIONS' ] : key = "{0}/logo.{1}" . format ( community_id , ext ) logo_stream . seek ( 0 ) ObjectVersion . create ( logos_bucket , key , stream = logo_stream , size = logo_size ) return ext else : return None
Validate if communities logo is in limit size and save it .
563
def initialize_communities_bucket ( ) : bucket_id = UUID ( current_app . config [ 'COMMUNITIES_BUCKET_UUID' ] ) if Bucket . query . get ( bucket_id ) : raise FilesException ( "Bucket with UUID {} already exists." . format ( bucket_id ) ) else : storage_class = current_app . config [ 'FILES_REST_DEFAULT_STORAGE_CLASS' ] location = Location . get_default ( ) bucket = Bucket ( id = bucket_id , location = location , default_storage_class = storage_class ) db . session . add ( bucket ) db . session . commit ( )
Initialize the communities file bucket .
564
def format_request_email_templ ( increq , template , ** ctx ) : curate_link = '{site_url}/communities/{id}/curate/' . format ( site_url = current_app . config [ 'THEME_SITEURL' ] , id = increq . community . id ) min_ctx = dict ( record = Record . get_record ( increq . record . id ) , requester = increq . user , community = increq . community , curate_link = curate_link , ) for k , v in min_ctx . items ( ) : if k not in ctx : ctx [ k ] = v msg_element = render_template_to_string ( template , ** ctx ) return msg_element
Format the email message element for inclusion request notification .
565
def format_request_email_title ( increq , ** ctx ) : template = current_app . config [ "COMMUNITIES_REQUEST_EMAIL_TITLE_TEMPLATE" ] , return format_request_email_templ ( increq , template , ** ctx )
Format the email message title for inclusion request notification .
566
def format_request_email_body ( increq , ** ctx ) : template = current_app . config [ "COMMUNITIES_REQUEST_EMAIL_BODY_TEMPLATE" ] , return format_request_email_templ ( increq , template , ** ctx )
Format the email message body for inclusion request notification .
567
def send_community_request_email ( increq ) : from flask_mail import Message from invenio_mail . tasks import send_email msg_body = format_request_email_body ( increq ) msg_title = format_request_email_title ( increq ) sender = current_app . config [ 'COMMUNITIES_REQUEST_EMAIL_SENDER' ] msg = Message ( msg_title , sender = sender , recipients = [ increq . community . owner . email , ] , body = msg_body ) send_email . delay ( msg . __dict__ )
Signal for sending emails after community inclusion request .
568
def modifydocs ( a , b , desc = '' ) : newdoc = a . func_doc . replace ( '\t\t' , '\t' ) newdoc += "Documentation from " + desc + ":\n" + b . func_doc return newdoc
Convenience function for writing documentation .
569
def tab_join ( ToMerge , keycols = None , nullvals = None , renamer = None , returnrenaming = False , Names = None ) : [ Result , Renaming ] = spreadsheet . join ( ToMerge , keycols = keycols , nullvals = nullvals , renamer = renamer , returnrenaming = True , Names = Names ) if isinstance ( ToMerge , dict ) : Names = ToMerge . keys ( ) else : Names = range ( len ( ToMerge ) ) Colorings = dict ( [ ( k , ToMerge [ k ] . coloring ) if 'coloring' in dir ( ToMerge [ k ] ) else { } for k in Names ] ) for k in Names : if k in Renaming . keys ( ) : l = ToMerge [ k ] Colorings [ k ] = dict ( [ ( g , [ n if not n in Renaming [ k ] . keys ( ) else Renaming [ k ] [ n ] for n in l . coloring [ g ] ] ) for g in Colorings [ k ] . keys ( ) ] ) Coloring = { } for k in Colorings . keys ( ) : for j in Colorings [ k ] . keys ( ) : if j in Coloring . keys ( ) : Coloring [ j ] = utils . uniqify ( Coloring [ j ] + Colorings [ k ] [ j ] ) else : Coloring [ j ] = utils . uniqify ( Colorings [ k ] [ j ] ) Result = Result . view ( tabarray ) Result . coloring = Coloring if returnrenaming : return [ Result , Renaming ] else : return Result
Database - join for tabular arrays .
570
def extract ( self ) : return np . vstack ( [ self [ r ] for r in self . dtype . names ] ) . T . squeeze ( )
Creates a copy of this tabarray in the form of a numpy ndarray .
571
def addrecords ( self , new ) : data = spreadsheet . addrecords ( self , new ) data = data . view ( tabarray ) data . coloring = self . coloring return data
Append one or more records to the end of the array .
572
def addcols ( self , cols , names = None ) : data = spreadsheet . addcols ( self , cols , names ) data = data . view ( tabarray ) data . coloring = self . coloring return data
Add one or more new columns .
573
def renamecol ( self , old , new ) : spreadsheet . renamecol ( self , old , new ) for x in self . coloring . keys ( ) : if old in self . coloring [ x ] : ind = self . coloring [ x ] . index ( old ) self . coloring [ x ] [ ind ] = new
Rename column or color in - place .
574
def colstack ( self , new , mode = 'abort' ) : if isinstance ( new , list ) : return tab_colstack ( [ self ] + new , mode ) else : return tab_colstack ( [ self , new ] , mode )
Horizontal stacking for tabarrays .
575
def rowstack ( self , new , mode = 'nulls' ) : if isinstance ( new , list ) : return tab_rowstack ( [ self ] + new , mode ) else : return tab_rowstack ( [ self , new ] , mode )
Vertical stacking for tabarrays .
576
def aggregate ( self , On = None , AggFuncDict = None , AggFunc = None , AggList = None , returnsort = False , KeepOthers = True , keyfuncdict = None ) : if returnsort : [ data , s ] = spreadsheet . aggregate ( X = self , On = On , AggFuncDict = AggFuncDict , AggFunc = AggFunc , AggList = AggList , returnsort = returnsort , keyfuncdict = keyfuncdict ) else : data = spreadsheet . aggregate ( X = self , On = On , AggFuncDict = AggFuncDict , AggFunc = AggFunc , AggList = AggList , returnsort = returnsort , KeepOthers = KeepOthers , keyfuncdict = keyfuncdict ) data = data . view ( tabarray ) data . coloring = self . coloring if returnsort : return [ data , s ] else : return data
Aggregate a tabarray on columns for given functions .
577
def aggregate_in ( self , On = None , AggFuncDict = None , AggFunc = None , AggList = None , interspersed = True ) : data = spreadsheet . aggregate_in ( Data = self , On = On , AggFuncDict = AggFuncDict , AggFunc = AggFunc , AggList = AggList , interspersed = interspersed ) data = data . view ( tabarray ) data . view = self . coloring return data
Aggregate a tabarray and include original data in the result .
578
def pivot ( self , a , b , Keep = None , NullVals = None , order = None , prefix = '_' ) : [ data , coloring ] = spreadsheet . pivot ( X = self , a = a , b = b , Keep = Keep , NullVals = NullVals , order = order , prefix = prefix ) data = data . view ( tabarray ) data . coloring = coloring return data
Pivot with a as the row axis and b values as the column axis .
579
def join ( self , ToMerge , keycols = None , nullvals = None , renamer = None , returnrenaming = False , selfname = None , Names = None ) : if isinstance ( ToMerge , np . ndarray ) : ToMerge = [ ToMerge ] if isinstance ( ToMerge , dict ) : assert selfname not in ToMerge . keys ( ) , ( 'Can\'t use "' , selfname + '" for name of one of the things to ' 'merge, since it is the same name as the self object.' ) if selfname == None : try : selfname = self . name except AttributeError : selfname = 'self' ToMerge . update ( { selfname : self } ) else : ToMerge = [ self ] + ToMerge return tab_join ( ToMerge , keycols = keycols , nullvals = nullvals , renamer = renamer , returnrenaming = returnrenaming , Names = Names )
Wrapper for spreadsheet . join but handles coloring attributes .
580
def argsort ( self , axis = - 1 , kind = 'quicksort' , order = None ) : index_array = np . core . fromnumeric . _wrapit ( self , 'argsort' , axis , kind , order ) index_array = index_array . view ( np . ndarray ) return index_array
Returns the indices that would sort an array .
581
def matches ( self , txt : str ) -> bool : if r'\\u' in self . pattern_re . pattern : txt = txt . encode ( 'utf-8' ) . decode ( 'unicode-escape' ) match = self . pattern_re . match ( txt ) return match is not None and match . end ( ) == len ( txt )
Determine whether txt matches pattern
582
def Point2HexColor ( a , lfrac , tfrac ) : [ H , S , V ] = [ math . floor ( 360 * a ) , lfrac , tfrac ] RGB = hsvToRGB ( H , S , V ) H = [ hex ( int ( math . floor ( 255 * x ) ) ) for x in RGB ] HEX = [ a [ a . find ( 'x' ) + 1 : ] for a in H ] HEX = [ '0' + h if len ( h ) == 1 else h for h in HEX ] return '#' + '' . join ( HEX )
Return web - safe hex triplets .
583
def warn_from_util_logger ( msg ) : global _LOG if _LOG is None and _LOGGING_CONF is None : sys . stderr . write ( 'WARNING: (from peyotl before logging is configured) {}\n' . format ( msg ) ) return if _LOG is None : _LOG = get_logger ( "peyotl.utility" ) _LOG . warn ( msg )
Only to be used in this file and peyotl . utility . get_config
584
def state_delta ( self , selector = 'all' , power = None , duration = 1.0 , infrared = None , hue = None , saturation = None , brightness = None , kelvin = None ) : argument_tuples = [ ( "power" , power ) , ( "duration" , duration ) , ( "infrared" , infrared ) , ( "hue" , hue ) , ( "saturation" , saturation ) , ( "brightness" , brightness ) , ( "kelvin" , kelvin ) ] return self . client . perform_request ( method = 'post' , endpoint = 'lights/{}/state/delta' , endpoint_args = [ selector ] , argument_tuples = argument_tuples )
Given a state delta apply the modifications to lights state over a given period of time .
585
def breathe_lights ( self , color , selector = 'all' , from_color = None , period = 1.0 , cycles = 1.0 , persist = False , power_on = True , peak = 0.5 ) : argument_tuples = [ ( "color" , color ) , ( "from_color" , from_color ) , ( "period" , period ) , ( "cycles" , cycles ) , ( "persist" , persist ) , ( "power_on" , power_on ) , ( "peak" , peak ) , ] return self . client . perform_request ( method = 'post' , endpoint = 'lights/{}/effects/breathe' , endpoint_args = [ selector ] , argument_tuples = argument_tuples )
Perform breathe effect on lights .
586
def cycle_lights ( self , states , defaults , direction = 'forward' , selector = 'all' ) : argument_tuples = [ ( "states" , states ) , ( "defaults" , defaults ) , ( "direction" , direction ) ] return self . client . perform_request ( method = 'post' , endpoint = 'lights/{}/cycle' , endpoint_args = [ selector ] , argument_tuples = argument_tuples , json_body = True )
Cycle through list of effects .
587
def activate_scene ( self , scene_uuid , duration = 1.0 ) : argument_tuples = [ ( "duration" , duration ) , ] return self . client . perform_request ( method = 'put' , endpoint = 'scenes/scene_id:{}/activate' , endpoint_args = [ scene_uuid ] , argument_tuples = argument_tuples )
Activate a scene .
588
def count_num_trees ( nexson , nexson_version = None ) : if nexson_version is None : nexson_version = detect_nexson_version ( nexson ) nex = get_nexml_el ( nexson ) num_trees_by_group = [ ] if _is_by_id_hbf ( nexson_version ) : for tree_group in nex . get ( 'treesById' , { } ) . values ( ) : nt = len ( tree_group . get ( 'treeById' , { } ) ) num_trees_by_group . append ( nt ) else : trees_group = nex . get ( 'trees' , [ ] ) if isinstance ( trees_group , dict ) : trees_group = [ trees_group ] for tree_group in trees_group : t = tree_group . get ( 'tree' ) if isinstance ( t , list ) : nt = len ( t ) else : nt = 1 num_trees_by_group . append ( nt ) return sum ( num_trees_by_group )
Returns the number of trees summed across all tree groups .
589
def TreeCollectionStore ( repos_dict = None , repos_par = None , with_caching = True , assumed_doc_version = None , git_ssh = None , pkey = None , git_action_class = TreeCollectionsGitAction , mirror_info = None , infrastructure_commit_author = 'OpenTree API <api@opentreeoflife.org>' ) : global _THE_TREE_COLLECTION_STORE if _THE_TREE_COLLECTION_STORE is None : _THE_TREE_COLLECTION_STORE = _TreeCollectionStore ( repos_dict = repos_dict , repos_par = repos_par , with_caching = with_caching , assumed_doc_version = assumed_doc_version , git_ssh = git_ssh , pkey = pkey , git_action_class = git_action_class , mirror_info = mirror_info , infrastructure_commit_author = infrastructure_commit_author ) return _THE_TREE_COLLECTION_STORE
Factory function for a _TreeCollectionStore object .
590
def _slugify_internal_collection_name ( self , json_repr ) : collection = self . _coerce_json_to_collection ( json_repr ) if collection is None : return None internal_name = collection [ 'name' ] return slugify ( internal_name )
Parse the JSON find its name return a slug of its name
591
def discover_roku ( ) : print ( "Searching for Roku devices within LAN ..." ) rokus = Roku . discover ( ) if not rokus : print ( "Unable to discover Roku devices. " + "Try again, or manually specify the IP address with " + "\'roku <ipaddr>\' (e.g. roku 192.168.1.130)" ) return None print ( "Found the following Roku devices:" ) for i , r in enumerate ( rokus ) : dinfo = '' print ( "[" + str ( i + 1 ) + "] " + str ( r . host ) + ":" + str ( r . port ) + ' (' + dinfo + ')' ) print ( "" ) if len ( rokus ) == 1 : print ( "Selecting Roku 1 by default" ) return rokus [ 0 ] else : print ( "Multiple Rokus found. Select the index of the Roku to control:" ) while True : try : query = "Select (1 to " + str ( len ( rokus ) ) + ") > " sel = int ( input ( query ) ) - 1 if sel >= len ( rokus ) : raise ValueError else : break except ValueError : print ( "Invalid selection" ) return rokus [ sel ]
Search LAN for available Roku devices . Returns a Roku object .
592
def ot_tnrs_match_names ( name_list , context_name = None , do_approximate_matching = True , include_dubious = False , include_deprecated = True , tnrs_wrapper = None ) : if tnrs_wrapper is None : from peyotl . sugar import tnrs tnrs_wrapper = tnrs match_obj = tnrs_wrapper . match_names ( name_list , context_name = context_name , do_approximate_matching = do_approximate_matching , include_deprecated = include_deprecated , include_dubious = include_dubious , wrap_response = True ) return match_obj
Uses a peyotl wrapper around an Open Tree web service to get a list of OTT IDs matching the name_list . The tnrs_wrapper can be None ( in which case the default wrapper from peyotl . sugar will be used . All other arguments correspond to the arguments of the web - service call . A ValueError will be raised if the context_name does not match one of the valid names for a taxonomic context . This uses the wrap_response option to create and return a TNRSRespose object around the response .
593
def _objectify ( field , value , ns_info ) : if ( getattr ( field . type_ , "_treat_none_as_empty_list" , False ) and value is None ) : return [ ] if value is None : return None elif field . type_ : return value . to_obj ( ns_info = ns_info ) return field . binding_value ( value )
Make value suitable for a binding object .
594
def _dictify ( field , value ) : if value is None : return None elif field . type_ : return value . to_dict ( ) return field . dict_value ( value )
Make value suitable for a dictionary .
595
def from_dict ( cls , cls_dict , fallback_xsi_type = None ) : if not cls_dict : return None if isinstance ( cls_dict , six . string_types ) : if not getattr ( cls , "_convert_strings" , False ) : return cls_dict try : typekey = cls . dictkey ( cls_dict ) except TypeError : typekey = fallback_xsi_type klass = cls . entity_class ( typekey ) return klass . from_dict ( cls_dict )
Parse the dictionary and return an Entity instance .
596
def from_obj ( cls , cls_obj ) : if not cls_obj : return None typekey = cls . objkey ( cls_obj ) klass = cls . entity_class ( typekey ) return klass . from_obj ( cls_obj )
Parse the generateDS object and return an Entity instance .
597
def typed_fields ( cls ) : klassdict = cls . __dict__ try : return klassdict [ "_typed_fields" ] except KeyError : fields = cls . typed_fields_with_attrnames ( ) cls . _typed_fields = tuple ( field for _ , field in fields ) return cls . _typed_fields
Return a tuple of this entity s TypedFields .
598
def to_obj ( self , ns_info = None ) : if ns_info : ns_info . collect ( self ) if not hasattr ( self , "_binding_class" ) : return None entity_obj = self . _binding_class ( ) for field , val in six . iteritems ( self . _fields ) : if isinstance ( val , EntityList ) and len ( val ) == 0 : val = None elif field . multiple : if val : val = [ _objectify ( field , x , ns_info ) for x in val ] else : val = [ ] else : val = _objectify ( field , val , ns_info ) setattr ( entity_obj , field . name , val ) self . _finalize_obj ( entity_obj ) return entity_obj
Convert to a GenerateDS binding object .
599
def to_dict ( self ) : entity_dict = { } for field , val in six . iteritems ( self . _fields ) : if field . multiple : if val : val = [ _dictify ( field , x ) for x in val ] else : val = [ ] else : val = _dictify ( field , val ) if val is not None and val != [ ] : entity_dict [ field . key_name ] = val self . _finalize_dict ( entity_dict ) return entity_dict
Convert to a dict