signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def send_raw_email ( self , raw_message , source = None , destinations = None ) : """Sends an email message , with header and content specified by the client . The SendRawEmail action is useful for sending multipart MIME emails , with attachments or inline content . The raw text of the message must comply wit...
params = { 'RawMessage.Data' : base64 . b64encode ( raw_message ) , } if source : params [ 'Source' ] = source if destinations : self . _build_list_params ( params , destinations , 'Destinations.member' ) return self . _make_request ( 'SendRawEmail' , params )
def witness_tx ( tx_ins , tx_outs , tx_witnesses , ** kwargs ) : '''Construct a fully - signed segwit transaction Args : tx _ ins list ( TxIn instances ) : list of transaction inputs tx _ outs list ( TxOut instances ) : list of transaction outputs tx _ witnesses list ( TxWitness instances ) : list of transa...
# Parse legacy scripts AND witness scripts for OP _ CLTV deser = [ script_ser . deserialize ( tx_in . redeem_script ) for tx_in in tx_ins if tx_in is not None ] for w in tx_witnesses : try : deser . append ( script_ser . deserialize ( w . stack [ - 1 ] . item ) ) except ( NotImplementedError , ValueErro...
def t_octalValue ( t ) : r'[ + - ] ? 0[0-9 ] +'
# We must match [ 0-9 ] , and then check the validity of the octal number . # If we match [ 0-7 ] , the invalid octal number " 08 " would match # ' decimalValue ' 0 and ' decimalValue ' 8. if re . search ( r'[8-9]' , t . value ) is not None : msg = _format ( "Invalid octal number {0!A}" , t . value ) t . lexer ...
def subdomains_init ( blockstack_opts , working_dir , atlas_state ) : """Set up subdomain state Returns a SubdomainIndex object that has been successfully connected to Atlas"""
if not is_subdomains_enabled ( blockstack_opts ) : return None subdomain_state = SubdomainIndex ( blockstack_opts [ 'subdomaindb_path' ] , blockstack_opts = blockstack_opts ) atlas_node_add_callback ( atlas_state , 'store_zonefile' , subdomain_state . enqueue_zonefile ) return subdomain_state
def get_under_hollow ( self ) : """Return HCP if an atom is present below the adsorbate in the subsurface layer and FCC if not"""
C0 = self . B [ - 1 : ] * ( 3 , 3 , 1 ) ads_pos = C0 . positions [ 4 ] C = self . get_subsurface_layer ( ) * ( 3 , 3 , 1 ) ret = 'FCC' if np . any ( [ np . linalg . norm ( ads_pos [ : 2 ] - ele . position [ : 2 ] ) < 0.5 * cradii [ ele . number ] for ele in C ] ) : ret = 'HCP' return ret
def get_error ( self , error ) : """A helper function , gets standard information from the error ."""
error_type = type ( error ) if error . error_type == ET_CLIENT : error_type_name = 'Client' else : error_type_name = 'Server' return { 'type' : error_type_name , 'name' : error_type . __name__ , 'prefix' : getattr ( error_type , '__module__' , '' ) , 'message' : unicode ( error ) , 'params' : error . args , }
def dumps ( self ) : """Return path command representation ."""
ret_str = self . path_type if self . options is not None : ret_str += self . options . dumps ( ) return ret_str
def on_connect ( self , connection ) : "Called when the stream connects"
self . _stream = connection . _reader self . _buffer = SocketBuffer ( self . _stream , self . _read_size ) if connection . decode_responses : self . encoding = connection . encoding
def cat_colors ( N : int = 1 , * , hue : str = None , luminosity : str = None , bgvalue : int = None , loop : bool = False , seed : str = "cat" ) -> Union [ List [ Any ] , colors . LinearSegmentedColormap ] : """Return a colormap suitable for N categorical values , optimized to be both aesthetically pleasing and pe...
c : List [ str ] = [ ] if N <= 25 and hue is None and luminosity is None : c = _color_alphabet [ : N ] elif not loop : c = RandomColor ( seed = seed ) . generate ( count = N , hue = hue , luminosity = luminosity , format_ = "hex" ) else : n = N while n > 0 : c += _color_alphabet [ : n ] ...
def ParseContactRow ( self , parser_mediator , query , row , ** unused_kwargs ) : """Parses a contact row from the database . Args : parser _ mediator ( ParserMediator ) : mediates interactions between parsers and other components , such as storage and dfvfs . query ( str ) : query that created the row . ...
query_hash = hash ( query ) event_data = TangoAndroidContactEventData ( ) first_name = self . _GetRowValue ( query_hash , row , 'first_name' ) try : decoded_text = base64_decode ( first_name ) event_data . first_name = codecs . decode ( decoded_text , 'utf-8' ) except ValueError : event_data . first_name = ...
def aes_ecb_decrypt ( self , key_handle , ciphertext ) : """AES ECB decrypt using a key handle . @ warning : Please be aware of the known limitations of AES ECB mode before using it ! @ param key _ handle : Key handle to use for AES ECB decryption @ param ciphertext : Data to decrypt @ type key _ handle : i...
return pyhsm . aes_ecb_cmd . YHSM_Cmd_AES_ECB_Decrypt ( self . stick , key_handle , ciphertext ) . execute ( )
def absent ( name , protocol = None , service_address = None ) : '''Ensure the LVS service is absent . name The name of the LVS service protocol The service protocol service _ address The LVS service address'''
ret = { 'name' : name , 'changes' : { } , 'result' : True , 'comment' : '' } # check if service exists and remove it service_check = __salt__ [ 'lvs.check_service' ] ( protocol = protocol , service_address = service_address ) if service_check is True : if __opts__ [ 'test' ] : ret [ 'result' ] = None ...
def summary_engine ( ** kwargs ) : """engine to extract summary data"""
logger . debug ( "summary_engine" ) # farms = kwargs [ " farms " ] farms = [ ] experiments = kwargs [ "experiments" ] for experiment in experiments : if experiment . selected_summaries is None : selected_summaries = [ "discharge_capacity" , "charge_capacity" , "coulombic_efficiency" , "cumulated_coulombic_e...
def _GetPropertyValue ( self , parser_mediator , properties , property_name ) : """Retrieves a property value . Args : parser _ mediator ( ParserMediator ) : mediates interactions between parsers and other components , such as storage and dfvfs . properties ( dict [ str , object ] ) : properties . propert...
property_value = properties . get ( property_name , None ) if isinstance ( property_value , py2to3 . BYTES_TYPE ) : try : # TODO : get encoding form XML metadata . property_value = property_value . decode ( 'utf-8' ) except UnicodeDecodeError : parser_mediator . ProduceExtractionWarning ( 'unabl...
def _ancestors ( collection ) : """Get the ancestors of the collection ."""
for index , c in enumerate ( collection . path_to_root ( ) ) : if index > 0 and c . dbquery is not None : raise StopIteration yield c . name raise StopIteration
def is_nested_subset ( superset_list , subset_list ) : """This function checks if a nested list is a subset of another nested list . Examples : > > > is _ nested _ subset ( [ 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 , 11 , 12 , 13 , 14 ] , [ [ 12 , 18 , 23 , 25 , 45 ] , [ 7 , 11 , 19 , 24 , 28 ] , [ 1 , 5 , 8 , 1...
presence = all ( item in superset_list for item in subset_list ) return presence
def _repack_archive ( archive1 , archive2 , verbosity = 0 , interactive = True ) : """Repackage an archive to a different format ."""
format1 , compression1 = get_archive_format ( archive1 ) format2 , compression2 = get_archive_format ( archive2 ) if format1 == format2 and compression1 == compression2 : # same format and compression allows to copy the file util . link_or_copy ( archive1 , archive2 , verbosity = verbosity ) return tmpdir = uti...
def update_product ( AcceptLanguage = None , Id = None , Name = None , Owner = None , Description = None , Distributor = None , SupportDescription = None , SupportEmail = None , SupportUrl = None , AddTags = None , RemoveTags = None ) : """Updates an existing product . See also : AWS API Documentation : example...
pass
def _set_scroll_area ( self , force = False ) : """Args : force ( bool ) : Set the scroll area even if no change in height and position is detected Sets the scroll window based on the counter positions"""
# Save scroll offset for resizing oldOffset = self . scroll_offset self . scroll_offset = newOffset = max ( self . counters . values ( ) ) + 1 if not self . enabled : return # Set exit handling only once if not self . process_exit : atexit . register ( self . _at_exit ) if not self . no_resize and RESIZE_SU...
def get_pushes ( self , project , ** params ) : """Gets pushes from project , filtered by parameters By default this method will just return the latest 10 pushes ( if they exist ) : param project : project ( repository name ) to query data for : param params : keyword arguments to filter results"""
return self . _get_json_list ( self . PUSH_ENDPOINT , project , ** params )
def render_thread ( self ) : """A render loop that pulls observations off the queue to render ."""
obs = True while obs : # Send something falsy through the queue to shut down . obs = self . _obs_queue . get ( ) if obs : for alert in obs . observation . alerts : self . _alerts [ sc_pb . Alert . Name ( alert ) ] = time . time ( ) for err in obs . action_errors : if err ...
def get_host_system_failfast ( self , name , verbose = False , host_system_term = 'HS' ) : """Get a HostSystem object fail fast if the object isn ' t a valid reference"""
if verbose : print ( "Finding HostSystem named %s..." % name ) hs = self . get_host_system ( name ) if hs is None : print ( "Error: %s '%s' does not exist" % ( host_system_term , name ) ) sys . exit ( 1 ) if verbose : print ( "Found HostSystem: {0} Name: {1}" % ( hs , hs . name ) ) return hs
def backfill_unk_emb ( self , E , filled_words ) : """Backfills an embedding matrix with the embedding for the unknown token . : param E : original embedding matrix of dimensions ` ( vocab _ size , emb _ dim ) ` . : param filled _ words : these words will not be backfilled with unk . NOTE : this function is f...
unk_emb = E [ self [ self . _unk ] ] for i , word in enumerate ( self ) : if word not in filled_words : E [ i ] = unk_emb
def factor_hatch ( field_name , patterns , factors , start = 0 , end = None ) : '''Create a ` ` DataSpec ` ` dict that applies a client - side ` ` CategoricalPatternMapper ` ` transformation to a ` ` ColumnDataSource ` ` column . Args : field _ name ( str ) : a field name to configure ` ` DataSpec ` ` with ...
return field ( field_name , CategoricalPatternMapper ( patterns = patterns , factors = factors , start = start , end = end ) )
def get_instance ( self , payload ) : """Build an instance of YesterdayInstance : param dict payload : Payload response from the API : returns : twilio . rest . api . v2010 . account . usage . record . yesterday . YesterdayInstance : rtype : twilio . rest . api . v2010 . account . usage . record . yesterday ....
return YesterdayInstance ( self . _version , payload , account_sid = self . _solution [ 'account_sid' ] , )
def atlasdb_format_query ( query , values ) : """Turn a query into a string for printing . Useful for debugging ."""
return "" . join ( [ "%s %s" % ( frag , "'%s'" % val if type ( val ) in [ str , unicode ] else val ) for ( frag , val ) in zip ( query . split ( "?" ) , values + ( "" , ) ) ] )
def _metaclass_lookup_attribute ( self , name , context ) : """Search the given name in the implicit and the explicit metaclass ."""
attrs = set ( ) implicit_meta = self . implicit_metaclass ( ) metaclass = self . metaclass ( ) for cls in { implicit_meta , metaclass } : if cls and cls != self and isinstance ( cls , ClassDef ) : cls_attributes = self . _get_attribute_from_metaclass ( cls , name , context ) attrs . update ( set ( c...
def file_name ( self , file_name ) : """Updates the file _ name . Args : file _ name :"""
if not self . can_update ( ) : self . _tcex . handle_error ( 910 , [ self . type ] ) self . _data [ 'fileName' ] = file_name request = { 'fileName' : file_name } return self . tc_requests . update ( self . api_type , self . api_sub_type , self . unique_id , request )
def assign_complex_to_samples ( items ) : """Assign complex inputs like variants and align outputs to samples . Handles list inputs to record conversion where we have inputs from multiple locations and need to ensure they are properly assigned to samples in many environments . The unpleasant approach here i...
extract_fns = { ( "variants" , "samples" ) : _get_vcf_samples , ( "align_bam" , ) : _get_bam_samples } complex = { k : { } for k in extract_fns . keys ( ) } for data in items : for k in complex : v = tz . get_in ( k , data ) if v is not None : for s in extract_fns [ k ] ( v , items ) : ...
def sg_ctc ( tensor , opt ) : r"""Computes the CTC ( Connectionist Temporal Classification ) Loss between ` tensor ` and ` target ` . Args : tensor : A 3 - D ` float Tensor ` . opt : target : A ` Tensor ` with the same length in the first dimension as the ` tensor ` . Labels . ( Dense tensor ) name : A ` ...
assert opt . target is not None , 'target is mandatory.' # default sequence length shape = tf . shape ( tensor ) opt += tf . sg_opt ( seq_len = tf . ones ( ( shape [ 0 ] , ) , dtype = tf . sg_intx ) * shape [ 1 ] , merge = True ) # ctc loss out = tf . nn . ctc_loss ( opt . target . sg_to_sparse ( ) , tensor , opt . seq...
def generate_VD_junction_transfer_matrices ( self ) : """Compute the transfer matrices for the VD junction . Sets the attributes Tvd , Svd , Dvd , lTvd , and lDvd ."""
nt2num = { 'A' : 0 , 'C' : 1 , 'G' : 2 , 'T' : 3 } # Compute Tvd Tvd = { } for aa in self . codons_dict . keys ( ) : current_Tvd = np . zeros ( ( 4 , 4 ) ) for init_nt in 'ACGT' : for codon in self . codons_dict [ aa ] : current_Tvd [ nt2num [ codon [ 2 ] ] , nt2num [ init_nt ] ] += self . R...
def increase ( self , infile ) : '''Increase : 任意の箇所のバイト列と それより大きなサイズの任意のバイト列と入れ換える'''
gf = infile [ 31 : ] index = gf . index ( random . choice ( gf ) ) index_len = len ( gf [ index ] ) large_size_index = random . choice ( [ gf . index ( g ) for g in gf if len ( g ) > index_len ] ) gf [ index ] , gf [ large_size_index ] = gf [ large_size_index ] , gf [ index ] return infile [ : 31 ] + gf
def first_n_items ( array , n_desired ) : """Returns the first n _ desired items of an array"""
# Unfortunately , we can ' t just do array . flat [ : n _ desired ] here because it # might not be a numpy . ndarray . Moreover , access to elements of the array # could be very expensive ( e . g . if it ' s only available over DAP ) , so go out # of our way to get them in a single call to _ _ getitem _ _ using only sl...
def read ( self ) : """Reads the cache file as pickle file ."""
def warn ( msg , elapsed_time , current_time ) : desc = self . _cache_id_desc ( ) self . _warnings ( "{0} {1}: {2}s < {3}s" , msg , desc , elapsed_time , current_time ) file_time = get_time ( ) out = self . _out if out is None : if self . verbose : self . _warnings ( "reading {0} from disk" , self ....
def register_extension ( self , group , name , extension ) : """Register an extension . Args : group ( str ) : The type of the extension name ( str ) : A name for the extension extension ( str or class ) : If this is a string , then it will be interpreted as a path to import and load . Otherwise it will...
if isinstance ( extension , str ) : name , extension = self . load_extension ( extension ) [ 0 ] if group not in self . _registered_extensions : self . _registered_extensions [ group ] = [ ] self . _registered_extensions [ group ] . append ( ( name , extension ) )
def spline ( x , y , n , yp1 , ypn , y2 ) : '''/ * CALCULATE 2ND DERIVATIVES OF CUBIC SPLINE INTERP FUNCTION * ADAPTED FROM NUMERICAL RECIPES BY PRESS ET AL * X , Y : ARRAYS OF TABULATED FUNCTION IN ASCENDING ORDER BY X * N : SIZE OF ARRAYS X , Y * YP1 , YPN : SPECIFIED DERIVATIVES AT X [ 0 ] AND X [ N - 1 ...
u = [ 0.0 ] * n # I think this is the same as malloc # no need for the out of memory if ( yp1 > 0.99E30 ) : # pragma : no cover y2 [ 0 ] = 0 u [ 0 ] = 0 else : y2 [ 0 ] = - 0.5 u [ 0 ] = ( 3.0 / ( x [ 1 ] - x [ 0 ] ) ) * ( ( y [ 1 ] - y [ 0 ] ) / ( x [ 1 ] - x [ 0 ] ) - yp1 ) for i in range ( 1 , n - 1 ...
def setonce ( decorator ) : """A descriptor modifier which allows _ _ set _ _ to be called at most once ."""
def decorate ( fn , * args , ** kwargs ) : parent = decorator ( fn , * args , ** kwargs ) # doc = _ add _ msg ( getattr ( parent , ' _ _ doc _ _ ' , None ) , ' * @ setonce * ' ) doc = getattr ( parent , '__doc__' , None ) assert hasattr ( parent , "__set__" ) # don ' t use for non - data descriptors...
def add_hostname_cn_ip ( self , addresses ) : """Add an address to the SAN list for the hostname request : param addr : [ ] List of address to be added"""
for addr in addresses : if addr not in self . hostname_entry [ 'addresses' ] : self . hostname_entry [ 'addresses' ] . append ( addr )
def plot_ic_hist ( ic , ax = None ) : """Plots Spearman Rank Information Coefficient histogram for a given factor . Parameters ic : pd . DataFrame DataFrame indexed by date , with IC for each forward return . ax : matplotlib . Axes , optional Axes upon which to plot . Returns ax : matplotlib . Axes ...
ic = ic . copy ( ) num_plots = len ( ic . columns ) v_spaces = ( ( num_plots - 1 ) // 3 ) + 1 if ax is None : f , ax = plt . subplots ( v_spaces , 3 , figsize = ( 18 , v_spaces * 6 ) ) ax = ax . flatten ( ) for a , ( period_num , ic ) in zip ( ax , ic . iteritems ( ) ) : sns . distplot ( ic . replace ( np ....
def submit ( self , q , context = None , task_name = "casjobs" , estimate = 30 ) : """Submit a job to CasJobs . # # Arguments * ` q ` ( str ) : The SQL query . # # Keyword Arguments * ` context ` ( str ) : Casjobs context used for this query . * ` task _ name ` ( str ) : The task name . * ` estimate ` (...
if not context : context = self . context params = { "qry" : q , "context" : context , "taskname" : task_name , "estimate" : estimate } r = self . _send_request ( "SubmitJob" , params = params ) job_id = int ( self . _parse_single ( r . text , "long" ) ) return job_id
def _scale_mesh ( self , scale ) : """TODO : add documentation"""
pos_ks = [ 'vertices' , 'centers' ] # TODO : scale velocities ? ? ? # handle scale self . update_columns_dict ( { k : self [ k ] * scale for k in pos_ks } ) self . update_columns ( areas = self . areas * ( scale ** 2 ) ) self . _volume *= scale ** 3 if self . _area is not None : # self . _ area is None for wd meshes ...
def decorate ( cls , app , * args , run_middleware = False , with_context = False , ** kwargs ) : """This is a decorator that can be used to apply this plugin to a specific route / view on your app , rather than the whole app . : param app : : type app : Sanic | Blueprint : param args : : type args : tupl...
from spf . framework import SanicPluginsFramework spf = SanicPluginsFramework ( app ) # get the singleton from the app try : assoc = spf . register_plugin ( cls , skip_reg = True ) except ValueError as e : # this is normal , if this plugin has been registered previously assert e . args and len ( e . args ) > 1 ...
def password_attributes_character_restriction_upper ( self , ** kwargs ) : """Auto Generated Code"""
config = ET . Element ( "config" ) password_attributes = ET . SubElement ( config , "password-attributes" , xmlns = "urn:brocade.com:mgmt:brocade-aaa" ) character_restriction = ET . SubElement ( password_attributes , "character-restriction" ) upper = ET . SubElement ( character_restriction , "upper" ) upper . text = kw...
def load_parcellation_coords ( parcellation_name ) : """Loads coordinates of included parcellations . Parameters parcellation _ name : str options : ' gordon2014_333 ' , ' power2012_264 ' , ' shen2013_278 ' . Returns parc : array parcellation cordinates"""
path = tenetopath [ 0 ] + '/data/parcellation/' + parcellation_name + '.csv' parc = np . loadtxt ( path , skiprows = 1 , delimiter = ',' , usecols = [ 1 , 2 , 3 ] ) return parc
def reconstruct_url ( self , path_info = None , query_string = None , relative = False ) : """Reconstructs the request URL using the algorithm provided by PEP3333"""
environ = self . environ if relative : url = '' else : url = environ [ 'wsgi.url_scheme' ] + '://' if environ . get ( 'HTTP_HOST' ) : url += environ [ 'HTTP_HOST' ] else : url += environ [ 'SERVER_NAME' ] if environ [ 'wsgi.url_scheme' ] == 'https' : if environ [ 'SER...
def from_urlpath ( cls , path , app = None ) : '''Alternative constructor which accepts a path as taken from URL and uses the given app or the current app config to get the real path . If class has attribute ` generic ` set to True , ` directory _ class ` or ` file _ class ` will be used as type . : param p...
app = app or current_app base = app . config [ 'directory_base' ] path = urlpath_to_abspath ( path , base ) if not cls . generic : kls = cls elif os . path . isdir ( path ) : kls = cls . directory_class else : kls = cls . file_class return kls ( path = path , app = app )
def find_logs ( self , user_name , first_date , start_time , last_date , end_time , action , functionality , parameter , pagination ) : """Search all logs , filtering by the given parameters . : param user _ name : Filter by user _ name : param first _ date : Sets initial date for begin of the filter : param ...
if not isinstance ( pagination , Pagination ) : raise InvalidParameterError ( u"Invalid parameter: pagination must be a class of type 'Pagination'." ) eventlog_map = dict ( ) eventlog_map [ "start_record" ] = pagination . start_record eventlog_map [ "end_record" ] = pagination . end_record eventlog_map [ "asorting_...
def open_file ( loc ) : """Handle . gz , . tar . gz or unzipped files"""
loc = ensure_path ( loc ) if tarfile . is_tarfile ( str ( loc ) ) : return tarfile . open ( str ( loc ) , "r:gz" ) elif loc . parts [ - 1 ] . endswith ( "gz" ) : return ( line . decode ( "utf8" ) for line in gzip . open ( str ( loc ) , "r" ) ) elif loc . parts [ - 1 ] . endswith ( "zip" ) : zip_file = zipfi...
def get_subdomain_DID_info ( fqn , db_path = None , zonefiles_dir = None ) : """Get a subdomain ' s DID info . Return None if not found"""
opts = get_blockstack_opts ( ) if not is_subdomains_enabled ( opts ) : log . warn ( "Subdomain support is disabled" ) return None if db_path is None : db_path = opts [ 'subdomaindb_path' ] if zonefiles_dir is None : zonefiles_dir = opts [ 'zonefiles' ] db = SubdomainDB ( db_path , zonefiles_dir ) try : ...
def setWidth ( self , personID , width ) : """setWidth ( string , double ) - > None Sets the width in m for this person ."""
self . _connection . _sendDoubleCmd ( tc . CMD_SET_PERSON_VARIABLE , tc . VAR_WIDTH , personID , width )
def add_arrow ( self , x1 , y1 , x2 , y2 , ** kws ) : """add arrow to plot"""
self . panel . add_arrow ( x1 , y1 , x2 , y2 , ** kws )
def register ( cls ) : """Register variable handling in YAML"""
if not cls . IS_LOADED : cls . IS_LOADED = True yaml . add_constructor ( '!param' , Parameter . parameter_constructor , Loader = yaml . SafeLoader ) yaml . add_constructor ( '!env' , EnvironmentVariable . parameter_constructor , Loader = yaml . SafeLoader )
def configure_sbi ( ) : """Configure an SBI using POSTed configuration ."""
# Need an ID for the subarray - guessing I just get # the list of inactive subarrays and use the first inactive_list = SubarrayList ( ) . inactive request_data = request . data LOG . debug ( 'request is of type %s' , type ( request_data ) ) try : sbi = Subarray ( inactive_list [ 0 ] ) sbi . activate ( ) sbi...
def refresh_items ( self ) : """Refresh the items of the pattern . This method destroys the old items and creates and initializes the new items . It is overridden to NOT insert the children to the parent . The Fragment adapter handles this ."""
items = [ ] if self . condition : for nodes , key , f_locals in self . pattern_nodes : with new_scope ( key , f_locals ) : for node in nodes : child = node ( None ) if isinstance ( child , list ) : items . extend ( child ) else ...
def setEditorData ( self , editor , index ) : """Updates the editor with the model data . : param editor | < QtGui . QWidget > index | < QtGui . QModelIndex >"""
data = unwrapVariant ( index . data ( ) ) editor . setCurrentIndex ( editor . findText ( data ) )
def for_attempt ( self , attempt ) : """: meth : ` for _ attempt ` returns the duration for a specific attempt . This is useful if you have a large number of independent backoffs , but don ' t want to use unnecessary memory storing the backoff parameters per backoff . The first attempt should be 0. : meth :...
dur = float ( self . min_ms * pow ( self . factor , attempt ) ) if self . jitter : dur = random . random ( ) * ( dur - self . min_ms ) + self . min_ms if dur > self . max_ms : return to_seconds ( self . max_ms ) return to_seconds ( dur )
def calcAspectRatioFromCorners ( corners , in_plane = False ) : '''simple and better alg . than below in _ plane - > whether object has no tilt , but only rotation and translation'''
q = corners l0 = [ q [ 0 , 0 ] , q [ 0 , 1 ] , q [ 1 , 0 ] , q [ 1 , 1 ] ] l1 = [ q [ 0 , 0 ] , q [ 0 , 1 ] , q [ - 1 , 0 ] , q [ - 1 , 1 ] ] l2 = [ q [ 2 , 0 ] , q [ 2 , 1 ] , q [ 3 , 0 ] , q [ 3 , 1 ] ] l3 = [ q [ 2 , 0 ] , q [ 2 , 1 ] , q [ 1 , 0 ] , q [ 1 , 1 ] ] a1 = line . length ( l0 ) / line . length ( l1 ) a2 ...
def get_nearest_edges ( G , X , Y , method = None , dist = 0.0001 ) : """Return the graph edges nearest to a list of points . Pass in points as separate vectors of X and Y coordinates . The ' kdtree ' method is by far the fastest with large data sets , but only finds approximate nearest edges if working in un...
start_time = time . time ( ) if method is None : # calculate nearest edge one at a time for each point ne = [ get_nearest_edge ( G , ( x , y ) ) for x , y in zip ( X , Y ) ] ne = [ ( u , v ) for _ , u , v in ne ] elif method == 'kdtree' : # check if we were able to import scipy . spatial . cKDTree successfully ...
def list_nodes_full ( mask = 'mask[id, hostname, primaryIpAddress, \ primaryBackendIpAddress, processorPhysicalCoreAmount, memoryCount]' , call = None ) : '''Return a list of the VMs that are on the provider'''
if call == 'action' : raise SaltCloudSystemExit ( 'The list_nodes_full function must be called with -f or --function.' ) ret = { } conn = get_conn ( service = 'SoftLayer_Account' ) response = conn . getHardware ( mask = mask ) for node in response : ret [ node [ 'hostname' ] ] = node __utils__ [ 'cloud.cache_no...
def _get_indexes_in_altered_table ( self , diff ) : """: param diff : The table diff : type diff : orator . dbal . table _ diff . TableDiff : rtype : dict"""
indexes = diff . from_table . get_indexes ( ) column_names = self . _get_column_names_in_altered_table ( diff ) for key , index in OrderedDict ( [ ( k , v ) for k , v in indexes . items ( ) ] ) . items ( ) : for old_index_name , renamed_index in diff . renamed_indexes . items ( ) : if key . lower ( ) == old...
def has_child ( cls , child_type , query ) : '''http : / / www . elasticsearch . org / guide / reference / query - dsl / has - child - query . html The has _ child query accepts a query and the child type to run against , and results in parent documents that have child docs matching the query . > child _ query ...
instance = cls ( has_child = { 'type' : child_type , 'query' : query } ) return instance
def stream_directory ( directory , recursive = False , patterns = '**' , chunk_size = default_chunk_size ) : """Gets a buffered generator for streaming directories . Returns a buffered generator which encodes a directory as : mimetype : ` multipart / form - data ` with the corresponding headers . Parameters ...
stream = DirectoryStream ( directory , recursive = recursive , patterns = patterns , chunk_size = chunk_size ) return stream . body ( ) , stream . headers
def data_filler_company ( self , number_of_rows , cursor , conn ) : '''creates and fills the table with company data'''
companies_data = [ ] try : for i in range ( 0 , number_of_rows ) : companies_data . append ( ( rnd_id_generator ( self ) , self . faker . company ( ) , self . faker . date ( pattern = "%Y-%m-%d" ) , self . faker . company_email ( ) , self . faker . safe_email ( ) , self . faker . city ( ) ) ) companies_...
def print_at ( self , text , x , y , colour = 7 , attr = 0 , bg = 0 , transparent = False ) : """Print the text at the specified location using the specified colour and attributes . : param text : The ( single line ) text to be printed . : param x : The column ( x coord ) for the start of the text . : param y...
# Convert to the logically visible window that our double - buffer provides y -= self . _start_line # Trim text to the buffer vertically . Don ' t trim horizontally as we don ' t know whether any # of these characters are dual - width yet . Handle it on the fly below . . . if y < 0 or y >= self . _buffer_height or x > ...
def request_sensor_sampling_clear ( self , req ) : """Set all sampling strategies for this client to none . Returns success : { ' ok ' , ' fail ' } Whether sending the list of devices succeeded . Examples ? sensor - sampling - clear ! sensor - sampling - clear ok"""
f = Future ( ) @ gen . coroutine def _clear_strategies ( ) : self . clear_strategies ( req . client_connection ) raise gen . Return ( ( 'ok' , ) ) self . ioloop . add_callback ( lambda : chain_future ( _clear_strategies ( ) , f ) ) return f
def train_position_scales ( self , layout , layers ) : """Compute ranges for the x and y scales"""
_layout = layout . layout panel_scales_x = layout . panel_scales_x panel_scales_y = layout . panel_scales_y # loop over each layer , training x and y scales in turn for layer in layers : data = layer . data match_id = match ( data [ 'PANEL' ] , _layout [ 'PANEL' ] ) if panel_scales_x : x_vars = list...
def run ( self ) : """Start the consumer"""
if self . profile_file : LOGGER . info ( 'Profiling to %s' , self . profile_file ) profile . runctx ( 'self._run()' , globals ( ) , locals ( ) , self . profile_file ) else : self . _run ( ) LOGGER . debug ( 'Exiting %s (%i, %i)' , self . name , os . getpid ( ) , os . getppid ( ) )
def handleError ( self , test , err ) : """Baseclass override . Called when a test raises an exception . If the test isn ' t going to be rerun again , then report the error to the nose test result . : param test : The test that has raised an error : type test : : class : ` nose . case . Test ` : param...
# pylint : disable = invalid - name want_error = self . _handle_test_error_or_failure ( test , err ) if not want_error and id ( test ) in self . _tests_that_reran : self . _nose_result . addError ( test , err ) return want_error or None
def maybe_start_recording ( tokens , index ) : """Return a new _ InlineRSTRecorder when its time to record ."""
if tokens [ index ] . type == TokenType . BeginInlineRST : return _InlineRSTRecorder ( index )
def get_filters_values ( self ) : """Get different filters values as dicts ."""
# DATASETS - - # badges self . _DST_BADGES = requests . get ( self . base_url + "datasets/badges/" ) . json ( ) # licences self . _DST_LICENSES = { l . get ( "id" ) : l . get ( "title" ) for l in requests . get ( self . base_url + "datasets/licenses" ) . json ( ) } # frequencies self . _DST_FREQUENCIES = { f . get ( "i...
def post_collection ( self , session , data , api_type ) : """Create a new Resource . : param session : SQLAlchemy session : param data : Request JSON Data : param params : Keyword arguments"""
model = self . _fetch_model ( api_type ) self . _check_json_data ( data ) orm_desc_keys = model . __mapper__ . all_orm_descriptors . keys ( ) if 'type' not in data [ 'data' ] . keys ( ) : raise MissingTypeError ( ) if data [ 'data' ] [ 'type' ] != model . __jsonapi_type__ : raise InvalidTypeForEndpointError ( m...
def create_environment ( home_dir , site_packages = False , clear = False , unzip_setuptools = False , prompt = None , search_dirs = None , download = False , no_setuptools = False , no_pip = False , no_wheel = False , symlink = True ) : """Creates a new environment in ` ` home _ dir ` ` . If ` ` site _ packages ...
home_dir , lib_dir , inc_dir , bin_dir = path_locations ( home_dir ) py_executable = os . path . abspath ( install_python ( home_dir , lib_dir , inc_dir , bin_dir , site_packages = site_packages , clear = clear , symlink = symlink ) ) install_distutils ( home_dir ) to_install = [ ] if not no_setuptools : to_install...
def plots_html_page ( query_module ) : """Generate analysis output as html page Args : query _ module ( module ) : module to use for querying data for the desired model / pipeline variant , e . g . leonardo . standard . queries"""
# page template template = jenv . get_template ( "analysis.html" ) # container for template context context = dict ( extended = config . EXTENDED ) # a database client / session to run queries in cl = client . get_client ( ) session = cl . create_session ( ) # general styling seaborn . set_style ( 'whitegrid' ) # plot ...
def backend_from_fobj ( f ) : """Determine backend module object from a file object ."""
if magic is None : warn ( "magic lib is not installed; assuming mime type %r" % ( DEFAULT_MIME ) ) return backend_from_mime ( DEFAULT_MIME ) else : offset = f . tell ( ) try : f . seek ( 0 ) chunk = f . read ( MAGIC_BUFFER_SIZE ) mime = magic . from_buffer ( chunk , mime = True )...
def send_many ( self , outputs_array , fee = None , change_addr = None , id = None , endpoint = None ) : """Args : outputs _ array : ( dict ) array , the data structure of each element in the array is as follows : { " asset " : < asset > , " value " : < value > , " address " : < address > } asset : ( str ) as...
params = [ outputs_array ] if fee : params . append ( fee ) if fee and change_addr : params . append ( change_addr ) elif not fee and change_addr : params . append ( 0 ) params . append ( change_addr ) return self . _call_endpoint ( SEND_MANY , params = params , id = id , endpoint = endpoint )
def console_get_char_foreground ( con : tcod . console . Console , x : int , y : int ) -> Color : """Return the foreground color at the x , y of this console . . . deprecated : : 8.4 Array access performs significantly faster than using this function . See : any : ` Console . fg ` ."""
return Color . _new_from_cdata ( lib . TCOD_console_get_char_foreground ( _console ( con ) , x , y ) )
def _already_resized_on_flickr ( self , fn , pid , _megapixels ) : """Checks if image file ( fn ) with photo _ id ( pid ) has already been resized on flickr . If so , returns True"""
logger . debug ( "%s - resize requested" % ( fn ) ) # Get width / height from flickr width_flickr , height_flickr = self . _getphoto_originalsize ( pid ) # Now compute what image will be if we resize it new_width , new_height = pusher_utils . resize_compute_width_height ( fn , _megapixels ) if width_flickr == new_width...
def _loglr ( self ) : r"""Computes the log likelihood ratio Returns float The value of the log likelihood ratio ."""
# calculate < d - h | d - h > = < h | h > - 2 < h | d > + < d | d > up to a constant p = self . current_params . copy ( ) p . update ( self . static_params ) if self . time is None : self . time = p [ 'tc' ] shloglr = hhloglr = 0 for ifo in self . sh : fp , fc = self . det [ ifo ] . antenna_pattern ( p [ 'ra' ]...
def inform_hook_client_factory ( self , host , port , * args , ** kwargs ) : """Return an instance of : class : ` _ InformHookDeviceClient ` or similar Provided to ease testing . Dynamically overriding this method after instantiation but before start ( ) is called allows for deep brain surgery . See : class :...
return _InformHookDeviceClient ( host , port , * args , ** kwargs )
def make_path ( phase ) -> str : """Create the path to the folder at which the metadata and optimizer pickle should be saved"""
return "{}/{}{}{}" . format ( conf . instance . output_path , phase . phase_path , phase . phase_name , phase . phase_tag )
def schedule_function ( queue_name , function_name , * args , ** kwargs ) : """Schedule a function named ` function _ name ` to be run by workers on the queue ` queue _ name ` with * args and * * kwargs as specified by that function ."""
body = create_request_body ( function_name , * args , ** kwargs ) if getattr ( settings , 'BEANSTALK_DISPATCH_EXECUTE_SYNCHRONOUSLY' , False ) : execute_function ( json . loads ( body ) ) else : connection = boto . connect_sqs ( settings . BEANSTALK_DISPATCH_SQS_KEY , settings . BEANSTALK_DISPATCH_SQS_SECRET ) ...
def tags ( self ) : "Iterate over all tags yielding ( name , function )"
for bucket in self : for k , v in self [ bucket ] . items ( ) : yield k , v
def update ( self ) : """Monolithic update method . This method calls the following methods with the dynamic loss scaling . 1 . solver . zerograd 2 . feed data 3 . loss . forward 4 . loss . backward 5 . comm . all _ reduce ( if it is specified ) 6 . solver . update"""
# Initialize gradients . self . solver . zero_grad ( ) # Forward and backward for _ in range ( self . accum_grad ) : # feed data self . data_feeder ( ) # forward self . loss . forward ( clear_no_need_grad = self . clear_buffer ) # backward with scale self . loss . backward ( self . scale , clear_buf...
def processStoredSms ( self , unreadOnly = False ) : """Process all SMS messages currently stored on the device / SIM card . Reads all ( or just unread ) received SMS messages currently stored on the device / SIM card , initiates " SMS received " events for them , and removes them from the SIM card . This i...
states = [ Sms . STATUS_RECEIVED_UNREAD ] if not unreadOnly : states . insert ( 0 , Sms . STATUS_RECEIVED_READ ) for msgStatus in states : messages = self . listStoredSms ( status = msgStatus , delete = True ) for sms in messages : self . smsReceivedCallback ( sms )
def validate ( self , pA , pB ) : """Validate that the two properties may be linked . @ param pA : Endpoint ( A ) to link . @ type pA : L { Endpoint } @ param pB : Endpoint ( B ) to link . @ type pB : L { Endpoint } @ return : self @ rtype : L { Link }"""
if pA in pB . links or pB in pA . links : raise Exception ( 'Already linked' ) dA = pA . domains ( ) dB = pB . domains ( ) for d in dA : if d in dB : raise Exception ( 'Duplicate domain "%s" found' % d ) for d in dB : if d in dA : raise Exception ( 'Duplicate domain "%s" found' % d ) kA = pA...
def kdeconf ( kde , conf = 0.683 , xmin = None , xmax = None , npts = 500 , shortest = True , conftol = 0.001 , return_max = False ) : """Returns desired confidence interval for provided KDE object"""
if xmin is None : xmin = kde . dataset . min ( ) if xmax is None : xmax = kde . dataset . max ( ) x = np . linspace ( xmin , xmax , npts ) return conf_interval ( x , kde ( x ) , shortest = shortest , conf = conf , conftol = conftol , return_max = return_max )
def _set_traffic_state ( self , v , load = False ) : """Setter method for traffic _ state , mapped from YANG variable / traffic _ state ( container ) If this variable is read - only ( config : false ) in the source YANG file , then _ set _ traffic _ state is considered as a private method . Backends looking t...
if hasattr ( v , "_utype" ) : v = v . _utype ( v ) try : t = YANGDynClass ( v , base = traffic_state . traffic_state , is_container = 'container' , presence = False , yang_name = "traffic-state" , rest_name = "traffic-state" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods ,...
def interp_like ( self , other , method = 'linear' , assume_sorted = False , kwargs = { } ) : """Interpolate this object onto the coordinates of another object , filling the out of range values with NaN . Parameters other : Dataset or DataArray Object with an ' indexes ' attribute giving a mapping from dime...
coords = alignment . reindex_like_indexers ( self , other ) numeric_coords = OrderedDict ( ) object_coords = OrderedDict ( ) for k , v in coords . items ( ) : if v . dtype . kind in 'uifcMm' : numeric_coords [ k ] = v else : object_coords [ k ] = v ds = self if object_coords : # We do not suppor...
def add ( self , path ) : """Add the given path to the decided place in sys . path"""
# sys . path always has absolute paths . path = os . path . abspath ( path ) # It must exist . if not os . path . exists ( path ) : return # It must not already be in sys . path . if path in sys . path : return if self . index is not None : sys . path . insert ( self . index , path ) self . index += 1 e...
def reset ( self , data , size ) : """Set new contents for frame"""
return lib . zframe_reset ( self . _as_parameter_ , data , size )
def PushSection ( self , name , pre_formatters ) : """Given a section name , push it on the top of the stack . Returns : The new section , or None if there is no such section ."""
if name == '@' : value = self . stack [ - 1 ] . context else : value = self . stack [ - 1 ] . context . get ( name ) # Apply pre - formatters for i , ( f , args , formatter_type ) in enumerate ( pre_formatters ) : if formatter_type == ENHANCED_FUNC : value = f ( value , self , args ) elif format...
def get_role_secret_id ( self , role_name , secret_id , mount_point = 'approle' ) : """POST / auth / < mount _ point > / role / < role name > / secret - id / lookup : param role _ name : : type role _ name : : param secret _ id : : type secret _ id : : param mount _ point : : type mount _ point : : re...
url = '/v1/auth/{0}/role/{1}/secret-id/lookup' . format ( mount_point , role_name ) params = { 'secret_id' : secret_id } return self . _adapter . post ( url , json = params ) . json ( )
def __fade_in ( self ) : """Starts the Widget fade in ."""
self . __timer . stop ( ) self . __vector = self . __fade_speed self . __timer . start ( )
def proxy_set ( self , value ) : """A helper to easily call the proxy _ setter of the field"""
setter = getattr ( self , self . proxy_setter ) if isinstance ( value , ( list , tuple , set ) ) : result = setter ( * value ) elif isinstance ( value , dict ) : result = setter ( ** value ) else : result = setter ( value ) return result
def query_item ( name , query_string , order = 'Rank' ) : '''Query a type of record for one or more items . Requires a valid query string . See https : / / rally1 . rallydev . com / slm / doc / webservice / introduction . jsp for information on query syntax . CLI Example : . . code - block : : bash salt m...
status , result = _query ( action = name , args = { 'query' : query_string , 'order' : order } ) return result
def update_campaign_list ( self , ** kwargs ) : # noqa : E501 """List all campaigns # noqa : E501 Get update campaigns for devices specified by a filter . # noqa : E501 This method makes a synchronous HTTP request by default . To make an asynchronous HTTP request , please pass asynchronous = True > > > thre...
kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'asynchronous' ) : return self . update_campaign_list_with_http_info ( ** kwargs ) # noqa : E501 else : ( data ) = self . update_campaign_list_with_http_info ( ** kwargs ) # noqa : E501 return data
def fmt_margin ( text , margin = None , margin_left = None , margin_right = None , margin_char = ' ' ) : """Surround given text with given margin characters ."""
if margin_left is None : margin_left = margin if margin_right is None : margin_right = margin if margin_left is not None : text = '{}{}' . format ( str ( margin_char ) [ 0 ] * int ( margin_left ) , text ) if margin_right is not None : text = '{}{}' . format ( text , str ( margin_char ) [ 0 ] * int ( mar...
def check_script ( vouts ) : """Looks into the vouts list of a transaction and returns the ` ` op _ return ` ` if one exists . Args ; vouts ( list ) : List of outputs of a transaction . Returns : str : String representation of the ` ` op _ return ` ` . Raises : Exception : If no ` ` vout ` ` having a ...
for vout in [ v for v in vouts [ : : - 1 ] if v [ 'hex' ] . startswith ( '6a' ) ] : verb = BlockchainSpider . decode_op_return ( vout [ 'hex' ] ) action = Spoolverb . from_verb ( verb ) . action if action in Spoolverb . supported_actions : return verb raise Exception ( "Invalid ascribe transaction" ...
def bcrypt_set_password ( self , raw_password ) : """Sets the user ' s password to * raw _ password * , hashed with bcrypt ."""
if not is_enabled ( ) or raw_password is None : _set_password ( self , raw_password ) else : salt = bcrypt . gensalt ( get_rounds ( ) ) self . password = 'bc$' + bcrypt . hashpw ( smart_str ( raw_password ) , salt )
def distinct ( self , fieldname , key = None ) : """Returns the unique values seen at ` fieldname ` ."""
return tuple ( unique_everseen ( self [ fieldname ] , key = key ) )