signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def main ( ) : """Start the poor _ consumer ."""
try : opts , args = getopt . getopt ( sys . argv [ 1 : ] , "h:v" , [ "help" , "nack=" , "servers=" , "queues=" ] ) except getopt . GetoptError as err : print str ( err ) usage ( ) sys . exit ( ) # defaults nack = 0.0 verbose = False servers = "localhost:7712,localhost:7711" queues = "test" for o , a in ...
def eval_string ( stri ) : 'evaluate expressions passed as string'
tokens = shlex . split ( stri ) return run_write_read ( [ 'plash' , 'eval' ] , '\n' . join ( tokens ) . encode ( ) ) . decode ( )
async def get_offers ( connection : Connection ) -> dict : """Retrieves all pending credential offers for a given connection . : param connection : A connection handle : return : A list of dictionary objects representing offers from a given connection . Example : credential = await Credential . create _ wit...
if not hasattr ( Credential . get_offers , "cb" ) : Credential . get_offers . cb = create_cb ( CFUNCTYPE ( None , c_uint32 , c_uint32 , c_char_p ) ) c_connection_handle = c_uint32 ( connection . handle ) data = await do_call ( 'vcx_credential_get_offers' , c_connection_handle , Credential . get_offers . cb ) return...
def dirsplit ( path ) : r"""Args : path ( str ) : Returns : list : components of the path CommandLine : python - m utool . util _ path - - exec - dirsplit Example : > > > # DISABLE _ DOCTEST > > > from utool . util _ path import * # NOQA > > > paths = [ ] > > > paths . append ( ' E : / window fi...
# return path . split ( os . sep ) parts = [ ] remain = path part = True # while True : while part != '' and remain != '' : remain , part = split ( remain ) parts . append ( part ) parts = [ p for p in parts if p != '' ] if remain != '' : parts . append ( remain ) parts = parts [ : : - 1 ] return parts
def write_from_sid_df_pairs ( self , country_code , data , scaling_factors = None ) : """Parameters country _ code : str The ISO 3166 alpha - 2 country code for this country . data : iterable [ tuple [ int , pandas . DataFrame ] ] The data chunks to write . Each chunk should be a tuple of sid and the data...
data = list ( data ) if not data : empty_frame = pd . DataFrame ( data = None , index = np . array ( [ ] , dtype = 'datetime64[ns]' ) , columns = np . array ( [ ] , dtype = 'int64' ) , ) return self . write ( country_code , { f : empty_frame . copy ( ) for f in FIELDS } , scaling_factors , ) sids , frames = zip...
def scores_to_preds ( self , threshold , use_probs = True ) : """use _ probs : boolean , default True if True , use probabilities for predictions , else use scores ."""
self . threshold = threshold if use_probs : if self . probs is None : raise DataError ( "Probabilities are not available to make " "predictions." ) else : word = "probabilities" scores = self . probs else : if self . scores is None : raise DataError ( "Scores are not availabl...
def encode_string ( self , value ) : """Convert ASCII , Latin - 1 or UTF - 8 to pure Unicode"""
if not isinstance ( value , str ) : return value try : return unicode ( value , 'utf-8' ) except : # really , this should throw an exception . # in the interest of not breaking current # systems , however : arr = [ ] for ch in value : arr . append ( unichr ( ord ( ch ) ) ) return u"" . join ...
def _setauto ( self , s , length , offset ) : """Set bitstring from a bitstring , file , bool , integer , array , iterable or string ."""
# As s can be so many different things it ' s important to do the checks # in the correct order , as some types are also other allowed types . # So basestring must be checked before Iterable # and bytes / bytearray before Iterable but after basestring ! if isinstance ( s , Bits ) : if length is None : lengt...
def nvlist2 ( thelist , names = None ) : '''Like nvlist but applied one more time to each returned value . So , given a list , args , of arguments to a state like this : : - name : echo test - cwd : / - require : - file : test . sh nvlist2 ( args , [ ' require ' ] ) would yield the tuple , ( dict _ it...
for _ , _ , value in nvlist ( thelist , names ) : for each in nvlist ( value ) : yield each
def join ( self , other , attrlist = None , auto_create_indexes = True , ** kwargs ) : """Join the objects of one table with the objects of another , based on the given matching attributes in the named arguments . The attrlist specifies the attributes to be copied from the source tables - if omitted , all attri...
if not kwargs : raise TypeError ( "must specify at least one join attribute as a named argument" ) thiscol , othercol = next ( iter ( kwargs . items ( ) ) ) retname = ( "(%s:%s^%s:%s)" % ( self . table_name , thiscol , other . table_name , othercol ) ) # make sure both tables contain records to join - if not , just...
def cylindrical_window ( self , radius , lat0 , long0 ) : '''Cylindrical projection of a window centered at ( lat0 , long0 ) with a given radius ( km ) . Args : radius ( float ) : Radius of the window ( km ) . lat0 ( float ) : Latitude at the center ( degree ) . long0 ( float ) : Longitude at the center (...
# Passage en radian radi = radius * 2 * np . pi / ( 2 * 1734.4 * np . pi ) lamb0 = long0 * np . pi / 180.0 phi0 = lat0 * np . pi / 180.0 # Long / lat min ( voir wikipedia ) longll = - radi / np . cos ( phi0 ) + lamb0 latll = np . arcsin ( ( - radi + np . sin ( phi0 ) / np . cos ( phi0 ) ) * np . cos ( phi0 ) ) if np . ...
def get ( self , objectType , * args , ** coolArgs ) : """Raba Magic inside . This is th function that you use for querying pyGeno ' s DB . Usage examples : * myGenome . get ( " Gene " , name = ' TPST2 ' ) * myGene . get ( Protein , id = ' ENSID . . . ' ) * myGenome . get ( Transcript , { ' start > ' : x ...
ret = [ ] for e in self . _makeLoadQuery ( objectType , * args , ** coolArgs ) . iterRun ( ) : if issubclass ( objectType , pyGenoRabaObjectWrapper ) : ret . append ( objectType ( wrapped_object_and_bag = ( e , self . bagKey ) ) ) else : ret . append ( e ) return ret
def attachPurrlog ( self , purrlog , watchdirs = [ ] ) : """Attaches Purr to the given purrlog directory . Arguments are passed to Purrer object as is ."""
# check purrer stack for a Purrer already watching this directory dprint ( 1 , "attaching to purrlog" , purrlog ) for i , purrer in enumerate ( self . purrer_stack ) : if os . path . samefile ( purrer . logdir , purrlog ) : dprint ( 1 , "Purrer object found on stack (#%d),reusing\n" , i ) # found ? ...
def _search_url ( search_term : str , size : str = '>400*300' , format : str = 'jpg' ) -> str : "Return a Google Images Search URL for a given search term ."
return ( 'https://www.google.com/search?q=' + quote ( search_term ) + '&espv=2&biw=1366&bih=667&site=webhp&source=lnms&tbm=isch' + _url_params ( size , format ) + '&sa=X&ei=XosDVaCXD8TasATItgE&ved=0CAcQ_AUoAg' )
def load_package_template ( license , header = False ) : """Load license template distributed with package ."""
content = StringIO ( ) filename = 'template-%s-header.txt' if header else 'template-%s.txt' with resource_stream ( __name__ , filename % license ) as licfile : for line in licfile : content . write ( line . decode ( "utf-8" ) ) # write utf - 8 string return content
def get_template_for_path ( path , use_cache = True ) : '''Convenience method that retrieves a template given a direct path to it .'''
dmp = apps . get_app_config ( 'django_mako_plus' ) app_path , template_name = os . path . split ( path ) return dmp . engine . get_template_loader_for_path ( app_path , use_cache = use_cache ) . get_template ( template_name )
def create_list_stories ( list_id_stories , number_of_stories , shuffle , max_threads ) : """Show in a formatted way the stories for each item of the list ."""
list_stories = [ ] with ThreadPoolExecutor ( max_workers = max_threads ) as executor : futures = { executor . submit ( get_story , new ) for new in list_id_stories [ : number_of_stories ] } for future in tqdm ( as_completed ( futures ) , desc = 'Getting results' , unit = ' news' , ) : list_stories . app...
def render ( msgpack_data , saltenv = 'base' , sls = '' , ** kws ) : '''Accepts a message pack string or a file object , renders said data back to a python dict . . . note : This renderer is NOT intended for use in creating sls files by hand , but exists to allow for data backends to serialize the highdata ...
if not isinstance ( msgpack_data , six . string_types ) : msgpack_data = msgpack_data . read ( ) if msgpack_data . startswith ( '#!' ) : msgpack_data = msgpack_data [ ( msgpack_data . find ( '\n' ) + 1 ) : ] if not msgpack_data . strip ( ) : return { } return salt . utils . msgpack . loads ( msgpack_data )
def fill_empty ( self , fixed_values , input ) : """Fill in random values for all empty - valued ItemData elements in an ODM document"""
odm_elements = etree . fromstring ( input ) for v in odm_elements . iter ( E_ODM . ITEM_DATA . value ) : if v . get ( A_ODM . VALUE . value ) == "" : oid = v . get ( A_ODM . ITEM_OID . value ) if fixed_values is not None and oid in fixed_values : d = fixed_values [ oid ] else : ...
def set_trig_start ( self , time , pass_to_command_line = True ) : """Set the trig start time of the analysis node by setting a - - trig - start - time option to the node when it is executed . @ param time : trig start time of job . @ bool pass _ to _ command _ line : add trig - start - time as a variable opt...
if pass_to_command_line : self . add_var_opt ( 'trig-start-time' , time ) self . __trig_start = time
def get_mean_and_stddevs ( self , sites , rup , dists , imt , stddev_types ) : """See : meth : ` superclass method < . base . GroundShakingIntensityModel . get _ mean _ and _ stddevs > ` for spec of input and result values ."""
assert all ( stddev_type in self . DEFINED_FOR_STANDARD_DEVIATION_TYPES for stddev_type in stddev_types ) # GMPE differentiates strike - slip , reverse and normal ruptures , # but combines normal and strike - slip into one category . See page 180. is_reverse = ( 45 <= rup . rake <= 135 ) stddevs = [ numpy . zeros_like ...
def default_run_conf ( self ) : '''Default run configuration ( namedtuple )'''
default_run_conf = namedtuple ( 'default_run_conf' , field_names = self . _default_run_conf . keys ( ) ) return default_run_conf ( ** self . _default_run_conf )
def bind ( self , ** fields ) : """Return a new L { Message } with this message ' s contents plus the additional given bindings ."""
contents = self . _contents . copy ( ) contents . update ( fields ) return Message ( contents , self . _serializer )
def get_mean_and_stddevs ( self , sites , rup , dists , imt , stddev_types ) : """See : meth : ` superclass method < . base . GroundShakingIntensityModel . get _ mean _ and _ stddevs > ` for spec of input and result values ."""
# extracting dictionary of coefficients specific to required # intensity measure type . C = self . COEFFS [ imt ] mean = ( self . _get_magnitude_scaling_term ( C , rup . mag ) + self . _get_distance_scaling_term ( C , rup . mag , dists . rrup ) + self . _get_style_of_faulting_term ( C , rup . rake ) + self . _get_site_...
def number_peaks ( self , x , n = None ) : """As in tsfresh ` number _ peaks < https : / / github . com / blue - yonder / tsfresh / blob / master / tsfresh / feature _ extraction / feature _ calculators . py # L1003 > ` _ Calculates the number of peaks of at least support n in the time series x . A peak of suppor...
if n is None : n = 5 peaks = feature_calculators . number_peaks ( x , n ) logging . debug ( "agg linear trend by tsfresh calculated" ) return peaks
def get_version ( ) : """Loads the current module version from version . py and returns it . : returns : module version identifier . : rtype : str"""
local_results = { } version_file_path = os . path . join ( 'pytextql' , 'version.py' ) # This is compatible with py3k which removed execfile . with open ( version_file_path , 'rb' ) as fin : # Compiling instead of passing the text straight to exec # associates any errors with the correct file name . code = compile ...
def get_template_loaders ( ) : """Compatibility method to fetch the template loaders . Source : https : / / github . com / django - debug - toolbar / django - debug - toolbar / blob / ece1c2775af108a92a0ef59636266b49e286e916 / debug _ toolbar / compat . py"""
try : from django . template . engine import Engine except ImportError : # Django < 1.8 Engine = None if Engine : try : engine = Engine . get_default ( ) except ImproperlyConfigured : loaders = [ ] else : loaders = engine . template_loaders else : # Django < 1.8 from djan...
def _key_period ( self , text ) : """Action for ' . '"""
self . insert_text ( text ) if self . codecompletion_auto : # Enable auto - completion only if last token isn ' t a float last_obj = self . get_last_obj ( ) if last_obj and not last_obj . isdigit ( ) : self . show_code_completion ( )
def list_clusters ( self ) : """List the clusters in this instance . For example : . . literalinclude : : snippets . py : start - after : [ START bigtable _ list _ clusters _ on _ instance ] : end - before : [ END bigtable _ list _ clusters _ on _ instance ] : rtype : tuple : returns : ( clusters , fa...
resp = self . _client . instance_admin_client . list_clusters ( self . name ) clusters = [ Cluster . from_pb ( cluster , self ) for cluster in resp . clusters ] return clusters , resp . failed_locations
def service_start ( name ) : '''Start a " service " on the ssh server . . versionadded : : 2015.8.2'''
cmd = 'start ' + name # Send the command to execute out , err = DETAILS [ 'server' ] . sendline ( cmd ) # " scrape " the output and return the right fields as a dict return parse ( out )
def scroll_down ( lines = 1 , file = sys . stdout ) : """Scroll the whole page down a number of lines , new lines are added to the top . Esc [ < lines > T"""
scroll . down ( lines ) . write ( file = file )
def bounds ( self ) : """The bounds of the random variable . Set ` self . i = 0.95 ` to return the 95 % interval if this is used for setting bounds on optimizers / etc . where infinite bounds may not be useful ."""
return [ scipy . stats . norm . interval ( self . i , loc = m , scale = s ) for s , m in zip ( self . sigma , self . mu ) ]
def aloha_to_etree ( html_source ) : """Converts HTML5 from Aloha editor output to a lxml etree ."""
xml = _tidy2xhtml5 ( html_source ) for i , transform in enumerate ( ALOHA2HTML_TRANSFORM_PIPELINE ) : xml = transform ( xml ) return xml
def fingerprint_from_var ( var ) : """Extract a fingerprint from a GPG public key"""
vsn = gpg_version ( ) cmd = flatten ( [ gnupg_bin ( ) , gnupg_home ( ) ] ) if vsn [ 0 ] >= 2 and vsn [ 1 ] < 1 : cmd . append ( "--with-fingerprint" ) output = polite_string ( stderr_with_input ( cmd , var ) ) . split ( '\n' ) if not output [ 0 ] . startswith ( 'pub' ) : raise CryptoritoError ( 'probably an inv...
async def iter_lines ( self , chunk_size = 1024 ) : """Return an iterator to yield lines from the raw stream . This is achieved by reading chunk of bytes ( of size chunk _ size ) at a time from the raw stream , and then yielding lines from there ."""
pending = b'' async for chunk in self . iter_chunks ( chunk_size ) : lines = ( pending + chunk ) . splitlines ( True ) for line in lines [ : - 1 ] : await yield_ ( line . splitlines ( ) [ 0 ] ) pending = lines [ - 1 ] if pending : await yield_ ( pending . splitlines ( ) [ 0 ] )
def VerifyMessageSignature ( self , unused_response_comms , packed_message_list , cipher , cipher_verified , api_version , remote_public_key ) : """Verify the message list signature . This is the way the messages are verified in the client . In the client we also check that the nonce returned by the server is c...
# This is not used atm since we only support a single api version ( 3 ) . _ = api_version result = rdf_flows . GrrMessage . AuthorizationState . UNAUTHENTICATED if cipher_verified or cipher . VerifyCipherSignature ( remote_public_key ) : stats_collector_instance . Get ( ) . IncrementCounter ( "grr_authenticated_mes...
def reconstructImage ( self ) : '''do inverse Fourier transform and return result'''
f_ishift = np . fft . ifftshift ( self . fshift ) return np . real ( np . fft . ifft2 ( f_ishift ) )
def check ( self , request , secret ) : """Verifies whether or not the request bears an authorization appropriate and valid for this version of the signature . This verifies every element of the signature , including the timestamp ' s value . Does not alter the request . Keyword arguments : request - - A re...
if request . get_header ( "Authorization" ) == "" : return False ah = self . parse_auth_headers ( request . get_header ( "Authorization" ) ) if "signature" not in ah : return False if request . get_header ( 'x-authorization-timestamp' ) == '' : raise KeyError ( "X-Authorization-Timestamp is required." ) tim...
def energy_error ( NAV_CONTROLLER_OUTPUT , VFR_HUD ) : '''return energy error matching APM internals This is positive when we are too low or going too slow'''
aspeed_energy_error = airspeed_energy_error ( NAV_CONTROLLER_OUTPUT , VFR_HUD ) alt_error = NAV_CONTROLLER_OUTPUT . alt_error * 100 energy_error = aspeed_energy_error + alt_error * 0.098 return energy_error
def _build_named_object_ids ( parameters ) : """Builds a list of NamedObjectId ."""
if isinstance ( parameters , str ) : return [ _build_named_object_id ( parameters ) ] return [ _build_named_object_id ( parameter ) for parameter in parameters ]
def profile ( request ) : '''Get or set user profile .'''
serializer_class = registration_settings . PROFILE_SERIALIZER_CLASS if request . method in [ 'POST' , 'PUT' , 'PATCH' ] : partial = request . method == 'PATCH' serializer = serializer_class ( instance = request . user , data = request . data , partial = partial , ) serializer . is_valid ( raise_exception = ...
def edit ( self , description = '' , files = { } ) : """Edit this gist . : param str description : ( optional ) , description of the gist : param dict files : ( optional ) , files that make up this gist ; the key ( s ) should be the file name ( s ) and the values should be another ( optional ) dictionary wi...
data = { } json = None if description : data [ 'description' ] = description if files : data [ 'files' ] = files if data : json = self . _json ( self . _patch ( self . _api , data = dumps ( data ) ) , 200 ) if json : self . _update_ ( json ) return True return False
def velocity_embedding ( data , basis = None , vkey = 'velocity' , scale = 10 , self_transitions = True , use_negative_cosines = True , direct_projection = None , pca_transform = None , retain_scale = False , autoscale = True , all_comps = True , T = None , copy = False ) : """Computes the single cell velocities in...
adata = data . copy ( ) if copy else data if basis is None : keys = [ key for key in [ 'pca' , 'tsne' , 'umap' ] if 'X_' + key in adata . obsm . keys ( ) ] if len ( keys ) > 0 : basis = keys [ - 1 ] else : raise ValueError ( 'No basis specified' ) if 'X_' + basis not in adata . obsm_keys ( )...
def to_alu_hlu_map ( input_str ) : """Converter for alu hlu map Convert following input into a alu - > hlu map : Sample input : HLU Number ALU Number 0 12 1 23 ALU stands for array LUN number hlu stands for host LUN number : param input _ str : raw input from naviseccli : return : alu - > hlu map"...
ret = { } if input_str is not None : pattern = re . compile ( r'(\d+)\s*(\d+)' ) for line in input_str . split ( '\n' ) : line = line . strip ( ) if len ( line ) == 0 : continue matched = re . search ( pattern , line ) if matched is None or len ( matched . groups ( ) ...
def series_in_dir ( self ) : """input is dcmdir , not dirpath"""
# none _ count = 0 countsd = { } # dcmdirseries = [ ] for line in self . files_with_info : if "SeriesNumber" in line : sn = line [ 'SeriesNumber' ] else : sn = None if sn in countsd : countsd [ sn ] += 1 else : countsd [ sn ] = 1 bins = list ( countsd ) counts = list ( co...
def load ( self , spec ) : """Find and return the template associated to a TemplateSpec instance . Returns the template as a unicode string . Arguments : spec : a TemplateSpec instance ."""
if spec . template is not None : return self . loader . unicode ( spec . template , spec . template_encoding ) path = self . _find ( spec ) return self . loader . read ( path , spec . template_encoding )
def zip2bytes ( compressed ) : """UNZIP DATA"""
if hasattr ( compressed , "read" ) : return gzip . GzipFile ( fileobj = compressed , mode = 'r' ) buff = BytesIO ( compressed ) archive = gzip . GzipFile ( fileobj = buff , mode = 'r' ) from pyLibrary . env . big_data import safe_size return safe_size ( archive )
def delete_poll ( args ) : """Deletes a poll ."""
if not args . isadmin : return "Nope, not gonna do it." if not args . msg : return "Syntax: !poll delete <pollnum>" if not args . msg . isdigit ( ) : return "Not A Valid Positive Integer." poll = args . session . query ( Polls ) . filter ( Polls . accepted == 1 , Polls . id == int ( args . msg ) ) . first (...
def _pick_exit ( self , block_address , stmt_idx , target_ips ) : """Include an exit in the final slice . : param block _ address : Address of the basic block . : param stmt _ idx : ID of the exit statement . : param target _ ips : The target address of this exit statement ."""
# TODO : Support context - sensitivity tpl = ( stmt_idx , target_ips ) if tpl not in self . chosen_exits [ block_address ] : self . chosen_exits [ block_address ] . append ( tpl )
def get_defaults ( path ) : '''Reads file for configuration defaults . Arguments : - path ( str ) Absolute filepath ( usually ~ / . licenser ) Returns : - ( dict ) Defaults for name , email , license , . txt extension'''
defaults = { } if os . path . isfile ( path ) : with open ( path ) as f : for line in f : line = line . strip ( ) if '=' not in line or line . startswith ( '#' ) : continue k , v = line . split ( '=' , 1 ) v = v . strip ( '"' ) . strip ( "'" ) ...
def maybe_download ( url , filename ) : """Download the data from Yann ' s website , unless it ' s already here ."""
if not os . path . exists ( WORK_DIRECTORY ) : os . mkdir ( WORK_DIRECTORY ) filepath = os . path . join ( WORK_DIRECTORY , filename ) if not os . path . exists ( filepath ) : filepath , _ = request . urlretrieve ( url + filename , filepath ) statinfo = os . stat ( filepath ) print ( 'Successfully downl...
def dumpBlock ( self , block_name ) : """This method is used at source server and gets the information on a single block that is being migrated . Try to return in a format to be ready for insert calls"""
if '%' in block_name or '*' in block_name : msg = "No wildcard is allowed in block_name for dumpBlock API" dbsExceptionHandler ( 'dbsException-invalid-input' , msg , self . logger . exception ) conn = self . dbi . connection ( ) try : # block name is unique block1 = self . blocklist . execute ( conn , block...
def uninit ( self ) : """! @ brief Uninitialize the flash algo . Before further operations are executed , the algo must be reinited . The target is left in a state where algo does not have to be reloaded when init ( ) is called . @ exception FlashFailure"""
if self . _active_operation is None : return if self . _is_api_valid ( 'pc_unInit' ) : # update core register to execute the uninit subroutine result = self . _call_function_and_wait ( self . flash_algo [ 'pc_unInit' ] , r0 = self . _active_operation . value ) # check the return code if result != 0 : ...
def from_wif_file ( path : str ) -> SigningKeyType : """Return SigningKey instance from Duniter WIF file : param path : Path to WIF file"""
with open ( path , 'r' ) as fh : wif_content = fh . read ( ) # check data field regex = compile ( 'Data: ([1-9A-HJ-NP-Za-km-z]+)' , MULTILINE ) match = search ( regex , wif_content ) if not match : raise Exception ( 'Error: Bad format WIF v1 file' ) # capture hexa wif key wif_hex = match . groups ( ) [ 0 ] retu...
def put_blob ( storage_conn = None , ** kwargs ) : '''. . versionadded : : 2015.8.0 Upload a blob'''
if not storage_conn : storage_conn = get_storage_conn ( opts = kwargs ) if 'container' not in kwargs : raise SaltSystemExit ( code = 42 , msg = 'The blob container name must be specified as "container"' ) if 'name' not in kwargs : raise SaltSystemExit ( code = 42 , msg = 'The blob name must be specified as ...
def create_dataset_synchronous ( self , file_url , dataset_type = 'image' , token = None , url = API_CREATE_DATASET ) : """Creates a dataset so you can train models from it : param file _ url : string , url to an accessible zip file containing the necessary image files and folder structure indicating the labels...
auth = 'Bearer ' + self . check_for_token ( token ) m = MultipartEncoder ( fields = { 'type' : dataset_type , 'path' : file_url } ) h = { 'Authorization' : auth , 'Cache-Control' : 'no-cache' , 'Content-Type' : m . content_type } the_url = url r = requests . post ( the_url , headers = h , data = m ) return r
def desc_for ( self , obj : Element , doing_descs : bool ) -> str : """Return a description for object if it is unique ( different than its parent ) @ param obj : object to be described @ param doing _ descs : If false , always return an empty string @ return : text or empty string"""
if obj . description and doing_descs : if isinstance ( obj , SlotDefinition ) and obj . is_a : parent = self . schema . slots [ obj . is_a ] elif isinstance ( obj , ClassDefinition ) and obj . is_a : parent = self . schema . classes [ obj . is_a ] else : parent = None return '' i...
def params ( self ) : """: return : A dictionary of SSOS query parameters . : rtype : dict"""
params = dict ( format = RESPONSE_FORMAT , verbose = self . verbose , epoch1 = str ( self . search_start_date ) , epoch2 = str ( self . search_end_date ) , search = self . orbit_method , eunits = self . error_units , eellipse = self . error_ellipse , extres = self . resolve_extension , xyres = self . resolve_position ,...
def get_scheme ( self ) : """When Splunk starts , it looks for all the modular inputs defined by its configuration , and tries to run them with the argument - - scheme . Splunkd expects the modular inputs to print a description of the input in XML on stdout . The modular input framework takes care of all th...
# Splunk will display " Github Repository Forks " to users for this input scheme = Scheme ( "Github Repository Forks" ) scheme . description = "Streams events giving the number of forks of a GitHub repository." # If you set external validation to True , without overriding validate _ input , # the script will accept any...
def createPerson ( self , nickname , vip = _NO_VIP ) : """Create a new L { Person } with the given name in this organizer . @ type nickname : C { unicode } @ param nickname : The value for the new person ' s C { name } attribute . @ type vip : C { bool } @ param vip : Value to set the created person ' s C {...
for person in ( self . store . query ( Person , attributes . AND ( Person . name == nickname , Person . organizer == self ) ) ) : raise ValueError ( "Person with name %r exists already." % ( nickname , ) ) person = Person ( store = self . store , created = extime . Time ( ) , organizer = self , name = nickname ) if...
def _load_modules_from_entry_points ( self , entry_point_group ) : """Load modules from the entry _ points ( slower ) . Entry points can be used to add new commands to the CLI . Usage : entry _ points = { ' softlayer . cli ' : [ ' new - cmd = mymodule . new _ cmd . cli ' ] }"""
for obj in pkg_resources . iter_entry_points ( group = entry_point_group , name = None ) : self . commands [ obj . name ] = obj
def cancel ( self , timeperiods , hosts , services ) : """Remove ref in scheduled downtime and raise downtime log entry ( cancel ) : param hosts : hosts objects to get item ref : type hosts : alignak . objects . host . Hosts : param services : services objects to get item ref : type services : alignak . obj...
if self . ref in hosts : item = hosts [ self . ref ] else : item = services [ self . ref ] broks = [ ] self . is_in_effect = False item . scheduled_downtime_depth -= 1 if item . scheduled_downtime_depth == 0 : item . raise_cancel_downtime_log_entry ( ) item . in_scheduled_downtime = False if self . ...
def lemmatize ( ambiguous_word : str , pos : str = None , neverstem = False , lemmatizer = wnl , stemmer = porter ) -> str : """Tries to convert a surface word into lemma , and if lemmatize word is not in wordnet then try and convert surface word into its stem . This is to handle the case where users input a su...
# Try to be a little smarter and use most frequent POS . pos = pos if pos else penn2morphy ( pos_tag ( [ ambiguous_word ] ) [ 0 ] [ 1 ] , default_to_noun = True ) lemma = lemmatizer . lemmatize ( ambiguous_word , pos = pos ) stem = stemmer . stem ( ambiguous_word ) # Ensure that ambiguous word is a lemma . if not wn . ...
def get_asset_mdata ( ) : """Return default mdata map for Asset"""
return { 'copyright_registration' : { 'element_label' : { 'text' : 'copyright registration' , 'languageTypeId' : str ( DEFAULT_LANGUAGE_TYPE ) , 'scriptTypeId' : str ( DEFAULT_SCRIPT_TYPE ) , 'formatTypeId' : str ( DEFAULT_FORMAT_TYPE ) , } , 'instructions' : { 'text' : 'enter no more than 256 characters.' , 'languageT...
def new_param ( name , type , value , start = None , scale = None , unit = None , dataunit = None , comment = None ) : """Construct a LIGO Light Weight XML Param document subtree . FIXME : document keyword arguments ."""
elem = Param ( ) elem . Name = name elem . Type = type elem . pcdata = value # FIXME : I have no idea how most of the attributes should be # encoded , I don ' t even know what they ' re supposed to be . if dataunit is not None : elem . DataUnit = dataunit if scale is not None : elem . Scale = scale if start is ...
def clear ( self , name ) : """Clears ( resets ) a counter specified by its name . : param name : a counter name to clear ."""
self . _lock . acquire ( ) try : del self . _cache [ name ] finally : self . _lock . release ( )
def scale_timeseries_unit ( tsunit , scaling = 'density' ) : """Scale the unit of a ` TimeSeries ` to match that of a ` FrequencySeries ` Parameters tsunit : ` ~ astropy . units . UnitBase ` input unit from ` TimeSeries ` scaling : ` str ` type of frequency series , either ' density ' for a PSD , or ' s...
# set units if scaling == 'density' : baseunit = units . Hertz elif scaling == 'spectrum' : baseunit = units . dimensionless_unscaled else : raise ValueError ( "Unknown scaling: %r" % scaling ) if tsunit : specunit = tsunit ** 2 / baseunit else : specunit = baseunit ** - 1 return specunit
def iter_valid_fields ( meta ) : """walk through the available valid fields . ."""
# fetch field configuration and always add the id _ field as exclude meta_fields = getattr ( meta , 'fields' , ( ) ) meta_exclude = getattr ( meta , 'exclude' , ( ) ) meta_exclude += ( meta . document . _meta . get ( 'id_field' ) , ) # walk through meta _ fields or through the document fields to keep # meta _ fields or...
def H_donor_count ( mol ) : """Hydrogen bond donor count"""
mol . require ( "Valence" ) return sum ( 1 for _ , a in mol . atoms_iter ( ) if a . H_donor )
def create_ec2_role ( self , role , bound_ami_id = None , bound_account_id = None , bound_iam_role_arn = None , bound_iam_instance_profile_arn = None , bound_ec2_instance_id = None , bound_region = None , bound_vpc_id = None , bound_subnet_id = None , role_tag = None , ttl = None , max_ttl = None , period = None , poli...
params = { 'role' : role , 'auth_type' : 'ec2' , 'disallow_reauthentication' : disallow_reauthentication , 'allow_instance_migration' : allow_instance_migration } if bound_ami_id is not None : params [ 'bound_ami_id' ] = bound_ami_id if bound_account_id is not None : params [ 'bound_account_id' ] = bound_accoun...
def _default_node_visitor ( self , node ) : """Generates a dictionary representation of the given : class : ` CTENode ` ` node ` , which consists of the node itself under the key ` ` node ` ` , as well as structural information under the keys ` ` depth ` ` , ` ` path ` ` , ` ` ordering ` ` , ` ` leaf ` ` , an...
return { "depth" : getattr ( node , node . _cte_node_depth ) , "path" : [ str ( c ) for c in getattr ( node , node . _cte_node_path ) ] , "ordering" : getattr ( node , node . _cte_node_ordering ) , "leaf" : node . is_leaf ( ) , "branch" : node . is_branch ( ) , "node" : node , }
def create_int ( help_string = NO_HELP , default = NO_DEFAULT ) : # type : ( str , Union [ int , NO _ DEFAULT _ TYPE ] ) - > int """Create an int parameter : param help _ string : : param default : : return :"""
# noinspection PyTypeChecker return ParamFunctions ( help_string = help_string , default = default , type_name = "int" , function_s2t = convert_string_to_int , function_t2s = convert_int_to_string , function_s2t_generate_from_default = convert_string_to_int_default , )
def _make_scaled_srcmap ( self ) : """Make an exposure cube with the same binning as the counts map ."""
self . logger . info ( 'Computing scaled source map.' ) bexp0 = fits . open ( self . files [ 'bexpmap_roi' ] ) bexp1 = fits . open ( self . config [ 'gtlike' ] [ 'bexpmap' ] ) srcmap = fits . open ( self . config [ 'gtlike' ] [ 'srcmap' ] ) if bexp0 [ 0 ] . data . shape != bexp1 [ 0 ] . data . shape : raise Excepti...
def aux_dict ( self ) : """Get dictionary representation of auxiliary states arrays . Returns aux _ dict : dict of str to NDArray The dictionary that maps name of auxiliary states to NDArrays . Raises ValueError : if there are duplicated names in the auxiliary states ."""
if self . _aux_dict is None : self . _aux_dict = Executor . _get_dict ( self . _symbol . list_auxiliary_states ( ) , self . aux_arrays ) return self . _aux_dict
def template_global ( self , name : Optional [ str ] = None ) -> Callable : """Add a template global . This is designed to be used as a decorator . An example usage , . . code - block : : python @ app . template _ global ( ' name ' ) def five ( ) : return 5 Arguments : name : The global name ( default...
def decorator ( func : Callable ) -> Callable : self . add_template_global ( func , name = name ) return func return decorator
def send_invoice ( self , chat_id , title , description , payload , provider_token , start_parameter , currency , prices , provider_data = None , photo_url = None , photo_size = None , photo_width = None , photo_height = None , need_name = None , need_phone_number = None , need_email = None , need_shipping_address = No...
from pytgbot . api_types . sendable . payments import LabeledPrice from pytgbot . api_types . sendable . reply_markup import InlineKeyboardMarkup assert_type_or_raise ( chat_id , int , parameter_name = "chat_id" ) assert_type_or_raise ( title , unicode_type , parameter_name = "title" ) assert_type_or_raise ( descriptio...
def store_layout ( self , name , * args ) : """Stores given layout . : param name : Layout name . : type name : unicode : param \ * args : Arguments . : type \ * args : \ * : return : Method success . : rtype : bool"""
layout = self . __layouts . get ( name ) if not layout : raise umbra . exceptions . LayoutExistError ( "{0} | '{1}' layout isn't registered!" . format ( self . __class__ . __name__ , name ) ) LOGGER . debug ( "> Storing layout '{0}'." . format ( name ) ) self . __current_layout = name self . __settings . set_key ( ...
def load ( self , filename , set_current = True , add_where = 'end' ) : """Load filename , create an editor instance and return it * Warning * This is loading file , creating editor but not executing the source code analysis - - the analysis must be done by the editor plugin ( in case multiple editorstack ins...
filename = osp . abspath ( to_text_string ( filename ) ) self . starting_long_process . emit ( _ ( "Loading %s..." ) % filename ) text , enc = encoding . read ( filename ) finfo = self . create_new_editor ( filename , enc , text , set_current , add_where = add_where ) index = self . data . index ( finfo ) self . _refre...
def create_role ( self , name , bound_service_account_names , bound_service_account_namespaces , ttl = "" , max_ttl = "" , period = "" , policies = None , mount_point = DEFAULT_MOUNT_POINT ) : """Create a role in the method . Registers a role in the auth method . Role types have specific entities that can perform...
list_of_strings_params = { 'bound_service_account_names' : bound_service_account_names , 'bound_service_account_namespaces' : bound_service_account_namespaces , 'policies' : policies } for param_name , param_argument in list_of_strings_params . items ( ) : validate_list_of_strings_param ( param_name = param_name , ...
def apply_grad_zmat_tensor ( grad_C , construction_table , cart_dist ) : """Apply the gradient for transformation to Zmatrix space onto cart _ dist . Args : grad _ C ( : class : ` numpy . ndarray ` ) : A ` ` ( 3 , n , n , 3 ) ` ` array . The mathematical details of the index layout is explained in : meth : ...
if ( construction_table . index != cart_dist . index ) . any ( ) : message = "construction_table and cart_dist must use the same index" raise ValueError ( message ) X_dist = cart_dist . loc [ : , [ 'x' , 'y' , 'z' ] ] . values . T C_dist = np . tensordot ( grad_C , X_dist , axes = ( [ 3 , 2 ] , [ 0 , 1 ] ) ) . ...
def clean_all_trash_pages_from_all_spaces ( confluence ) : """Main function for retrieve space keys and provide space for cleaner : param confluence : : return :"""
limit = 50 flag = True i = 0 while flag : space_lists = confluence . get_all_spaces ( start = i * limit , limit = limit ) if space_lists and len ( space_lists ) != 0 : i += 1 for space_list in space_lists : print ( "Start review the space with key = " + space_list [ 'key' ] ) ...
def _update ( self , commit = False ) : """Forces an update of this rating ( useful for when Vote objects are removed ) ."""
votes = Vote . objects . filter ( content_type = self . get_content_type ( ) , object_id = self . instance . pk , key = self . field . key , ) obj_score = sum ( [ v . score for v in votes ] ) obj_votes = len ( votes ) score , created = Score . objects . get_or_create ( content_type = self . get_content_type ( ) , objec...
def is_color_supported ( ) : "Find out if your terminal environment supports color ."
# shinx . util . console if not hasattr ( sys . stdout , 'isatty' ) : return False if not sys . stdout . isatty ( ) and 'TERMINAL-COLOR' not in os . environ : return False if sys . platform == 'win32' : # pragma : no cover try : import colorama colorama . init ( ) return True exc...
def add_contents ( self , dest , contents ) : """Add file contents to the archive under ` ` dest ` ` . If ` ` dest ` ` is a path , it will be added compressed and world - readable ( user - writeable ) . You may also pass a : py : class : ` ~ zipfile . ZipInfo ` for custom behavior ."""
assert not self . _closed , "Archive closed" if not isinstance ( dest , zipfile . ZipInfo ) : dest = zinfo ( dest ) # see for some caveats # Ensure we apply the compression dest . compress_type = self . zip_compression # Mark host OS as Linux for all archives dest . create_system = 3 self . _zip_file . writestr...
def to_dict ( self ) : """to _ dict : puts data in format CC expects Args : None Returns : dict of channel data"""
return { "title" : self . title , "language" : self . language , "description" : self . description , "node_id" : self . get_node_id ( ) . hex , "content_id" : self . get_content_id ( ) . hex , "source_domain" : self . domain_ns . hex , "source_id" : self . source_id , "author" : self . author , "aggregator" : self . a...
def _insert_additionals ( self , fmtos , seen = None ) : """Insert additional formatoptions into ` fmtos ` . This method inserts those formatoptions into ` fmtos ` that are required because one of the following criteria is fullfilled : 1 . The : attr : ` replot ` attribute is True 2 . Any formatoption with ...
def get_dependencies ( fmto ) : if fmto is None : return [ ] return fmto . dependencies + list ( chain ( * map ( lambda key : get_dependencies ( getattr ( self , key , None ) ) , fmto . dependencies ) ) ) seen = seen or { fmto . key for fmto in fmtos } keys = { fmto . key for fmto in fmtos } self . repl...
def s ( self , data , errors = 'strict' ) : """Decode value using correct Python 2/3 method . This method is intended to replace the : py : meth : ` ~ tcex . tcex . TcEx . to _ string ` method with better logic to handle poorly encoded unicode data in Python2 and still work in Python3. Args : data ( any ) :...
try : if data is None or isinstance ( data , ( int , list , dict ) ) : pass # Do nothing with these types elif isinstance ( data , unicode ) : try : data . decode ( 'utf-8' ) except UnicodeEncodeError : # 2to3 converts unicode to str # 2to3 converts unicode to...
def decode_dict_keys_to_str ( src ) : '''Convert top level keys from bytes to strings if possible . This is necessary because Python 3 makes a distinction between these types .'''
if not six . PY3 or not isinstance ( src , dict ) : return src output = { } for key , val in six . iteritems ( src ) : if isinstance ( key , bytes ) : try : key = key . decode ( ) except UnicodeError : pass output [ key ] = val return output
def search ( self , Queue = None , order = None , raw_query = None , Format = 'l' , ** kwargs ) : """Search arbitrary needles in given fields and queue . Example : : > > > tracker = Rt ( ' http : / / tracker . example . com / REST / 1.0 / ' , ' rt - username ' , ' top - secret ' ) > > > tracker . login ( ) ...
get_params = { } query = [ ] url = 'search/ticket' if Queue is not ALL_QUEUES : query . append ( "Queue=\'{}\'" . format ( Queue or self . default_queue ) ) if not raw_query : operators_map = { 'gt' : '>' , 'lt' : '<' , 'exact' : '=' , 'notexact' : '!=' , 'like' : ' LIKE ' , 'notlike' : ' NOT LIKE ' } for k...
def ekf1_pos ( EKF1 ) : '''calculate EKF position when EKF disabled'''
global ekf_home from . import mavutil self = mavutil . mavfile_global if ekf_home is None : if not 'GPS' in self . messages or self . messages [ 'GPS' ] . Status != 3 : return None ekf_home = self . messages [ 'GPS' ] ( ekf_home . Lat , ekf_home . Lng ) = gps_offset ( ekf_home . Lat , ekf_home . Lng...
def init ( * , output_dir = FS_DEFAULT_OUTPUT_DIR , dry_run = False , ** kwargs ) : """Set up output directory : param output _ dir ( str , optional ) : Output dir for holding temporary files : param \ * \ * kwargs : arbitrary keyword arguments"""
# Output directory global _output_dir _output_dir = output_dir # Dry run mode global _dry_run _dry_run = dry_run # Type checks utils . chkstr ( _output_dir , 'output_dir' ) # log the thing log . msg ( "Output directory will be: {output_dir}" . format ( output_dir = _output_dir ) ) # Create output directory if it does n...
def _set_optimizer_param ( optimizer , param_group , param_name , value ) : """Set a parameter on an all or a specific parameter group of an optimizer instance . To select all param groups , use ` ` param _ group = ' all ' ` ` ."""
if param_group == 'all' : groups = optimizer . param_groups else : groups = [ optimizer . param_groups [ int ( param_group ) ] ] for group in groups : group [ param_name ] = value
def filter_create ( self , phrase , context , irreversible = False , whole_word = True , expires_in = None ) : """Creates a new keyword filter . ` phrase ` is the phrase that should be filtered out , ` context ` specifies from where to filter the keywords . Valid contexts are ' home ' , ' notifications ' , ' pu...
params = self . __generate_params ( locals ( ) ) for context_val in context : if not context_val in [ 'home' , 'notifications' , 'public' , 'thread' ] : raise MastodonIllegalArgumentError ( 'Invalid filter context.' ) return self . __api_request ( 'POST' , '/api/v1/filters' , params )
def execOnSubArrays ( arrs , fn , splitX , splitY ) : """execute a function ( on or multiple arrays ) only on sub sections works only on 2d arrays at the moment > > > a1 = np . ones ( ( 1000,1000 ) ) > > > a2 = np . ones ( ( 1000,1000 ) ) > > > out = execOnSubArrays ( ( a1 , a2 ) , lambda sa1 , sa2 : sa1 ...
if type ( arrs ) not in ( tuple , list ) : arrs = ( arrs , ) s0 , s1 = arrs [ 0 ] . shape ss0 = s0 // splitX ss1 = s1 // splitY px , py = 0 , 0 out = None for ix in range ( splitX ) : if ix == splitX - 1 : ss0 = s0 - px for iy in range ( splitY ) : if iy == splitY - 1 : ss1 = s1 ...
def import_stringified_func ( funcstring ) : """Import a string that represents a module and function , e . g . { module } . { funcname } . Given a function f , import _ stringified _ func ( stringify _ func ( f ) ) will return the same function . : param funcstring : String to try to import : return : callab...
assert isinstance ( funcstring , str ) modulestring , funcname = funcstring . rsplit ( '.' , 1 ) mod = importlib . import_module ( modulestring ) func = getattr ( mod , funcname ) return func
def delete ( self , num_iid , properties , session , item_price = None , item_num = None , lang = None ) : '''taobao . item . sku . delete 删除SKU 删除一个sku的数据 需要删除的sku通过属性properties进行匹配查找'''
request = TOPRequest ( 'taobao.item.sku.delete' ) request [ 'num_iid' ] = num_iid request [ 'properties' ] = properties if item_num != None : request [ 'item_num' ] = item_num if item_price != None : request [ 'item_price' ] = item_price if lang != None : request [ 'lang' ] = lang self . create ( self . exe...
def extract_odd_numbers ( numbers ) : """This function uses a lambda function to filter out and return the odd numbers from a list of integers . Examples : > > > extract _ odd _ numbers ( [ 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 ] ) [1 , 3 , 5 , 7 , 9] > > > extract _ odd _ numbers ( [ 10 , 20 , 45 , 67 , 8...
return list ( filter ( lambda n : n % 2 != 0 , numbers ) )
def _get_mass_by_index ( self , index ) : """where index can either by an integer or a list of integers ( returns some of masses )"""
if hasattr ( index , '__iter__' ) : return sum ( [ self . masses [ i ] for i in index ] ) else : return self . masses [ index ]
def k_depth ( d , depth , _counter = 1 ) : """Iterate keys on specific depth . depth has to be greater equal than 0. Usage reference see : meth : ` DictTree . kv _ depth ( ) < DictTree . kv _ depth > `"""
if depth == 0 : yield d [ _meta ] [ "_rootname" ] else : if _counter == depth : for key in DictTree . k ( d ) : yield key else : _counter += 1 for node in DictTree . v ( d ) : for key in DictTree . k_depth ( node , depth , _counter ) : yield ke...
def get_users_info ( self , usernames ) : """: param usernames : a list of usernames : return : a dict , in the form { username : val } , where val is either None if the user cannot be found , or a tuple ( realname , email )"""
retval = { username : None for username in usernames } remaining_users = usernames infos = self . _database . users . find ( { "username" : { "$in" : remaining_users } } ) for info in infos : retval [ info [ "username" ] ] = ( info [ "realname" ] , info [ "email" ] ) return retval