signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def navigate ( self , name , * args ) : """Navigate to a route * @ param { String } name Route name * @ param { * } arg A single argument to pass to the route handler"""
if name not in self . routes : raise Exception ( 'invalid route name \'%s\'' % name ) elif callable ( self . routes [ name ] ) : return self . routes [ name ] ( self , * args ) raise Exception ( 'route %s not callable' , name )
def up_ec2 ( connection , region , instance_id , wait_for_ssh_available = True , log = False , timeout = 600 ) : """boots an existing ec2 _ instance"""
# boot the ec2 instance instance = connection . start_instances ( instance_ids = instance_id ) [ 0 ] instance . update ( ) while instance . state != "running" and timeout > 1 : log_yellow ( "Instance state: %s" % instance . state ) if log : log_yellow ( "Instance state: %s" % instance . state ) slee...
def main ( ) : """This is a Toil pipeline used to perform alignment of fastqs ."""
# Define Parser object and add to Toil if mock_mode ( ) : usage_msg = 'You have the TOIL_SCRIPTS_MOCK_MODE environment variable set, so this pipeline ' 'will run in mock mode. To disable mock mode, set TOIL_SCRIPTS_MOCK_MODE=0' else : usage_msg = None parser = argparse . ArgumentParser ( usage = usage_msg ) sub...
def get_cardinality ( self , node = None ) : """Returns the cardinality of the node . Throws an error if the CPD for the queried node hasn ' t been added to the network . Parameters node : Any hashable python object ( optional ) . The node whose cardinality we want . If node is not specified returns a dic...
if node : return self . get_cpds ( node ) . cardinality [ 0 ] else : cardinalities = defaultdict ( int ) for cpd in self . cpds : cardinalities [ cpd . variable ] = cpd . cardinality [ 0 ] return cardinalities
def getLinkedAnalyses ( self ) : """Lookup linked Analyses : returns : sorted list of ANs , where the latest AN comes first"""
# Fetch the linked Analyses UIDs refs = get_backreferences ( self , "AnalysisAttachment" ) # fetch the objects by UID and handle nonexisting UIDs gracefully ans = map ( lambda uid : api . get_object_by_uid ( uid , None ) , refs ) # filter out None values ( nonexisting UIDs ) ans = filter ( None , ans ) # sort by physic...
def center_land ( world ) : """Translate the map horizontally and vertically to put as much ocean as possible at the borders . It operates on elevation and plates map"""
y_sums = world . layers [ 'elevation' ] . data . sum ( 1 ) # 1 = = sum along x - axis y_with_min_sum = y_sums . argmin ( ) if get_verbose ( ) : print ( "geo.center_land: height complete" ) x_sums = world . layers [ 'elevation' ] . data . sum ( 0 ) # 0 = = sum along y - axis x_with_min_sum = x_sums . argmin ( ) if g...
def _get_token ( ) : '''Get an auth token'''
username = __opts__ . get ( 'rallydev' , { } ) . get ( 'username' , None ) password = __opts__ . get ( 'rallydev' , { } ) . get ( 'password' , None ) path = 'https://rally1.rallydev.com/slm/webservice/v2.0/security/authorize' result = salt . utils . http . query ( path , decode = True , decode_type = 'json' , text = Tr...
def get_docargs ( self , args = None , prt = None ) : """Pare down docopt . Return a minimal dictionary and a set containing runtime arg values ."""
# docargs = self . objdoc . get _ docargs ( args , exp _ letters = set ( [ ' o ' , ' t ' , ' p ' , ' c ' ] ) ) docargs = self . objdoc . get_docargs ( args , prt ) self . _chk_docopts ( docargs ) return docargs
def clean_query_Dict ( cls , query_Dict ) : """removes NoneTypes from the dict"""
return { k : v for k , v in query_Dict . items ( ) if v }
def set_data ( self , index , data ) : """Set the complete data for a single line strip . Parameters index : int The index of the line strip to be replaced . data : array - like The data to assign to the selected line strip ."""
self . _pos_tex [ index , : ] = data self . update ( )
def remove_user_from_group ( self , username , groupname ) : """Remove a user from a group . : param username : The user to remove from the group . : param groupname : The group that the user will be removed from ."""
url = self . _options [ 'server' ] + '/rest/api/latest/group/user' x = { 'groupname' : groupname , 'username' : username } self . _session . delete ( url , params = x ) return True
def assign_issue ( issue_key , assignee , server = None , username = None , password = None ) : '''Assign the issue to an existing user . Return ` ` True ` ` when the issue has been properly assigned . issue _ key The JIRA ID of the ticket to manipulate . assignee The name of the user to assign the ticket...
jira_ = _get_jira ( server = server , username = username , password = password ) assigned = jira_ . assign_issue ( issue_key , assignee ) return assigned
def _compute_bgid ( self , bg = None ) : """Return a unique identifier for the background data"""
if bg is None : bg = self . _bgdata if isinstance ( bg , qpimage . QPImage ) : # Single QPImage if "identifier" in bg : return bg [ "identifier" ] else : data = [ bg . amp , bg . pha ] for key in sorted ( list ( bg . meta . keys ( ) ) ) : val = bg . meta [ key ] ...
def get_combined_size ( tiles ) : """Calculate combined size of tiles ."""
# TODO : Refactor calculating layout to avoid repetition . columns , rows = calc_columns_rows ( len ( tiles ) ) tile_size = tiles [ 0 ] . image . size return ( tile_size [ 0 ] * columns , tile_size [ 1 ] * rows )
def calculate_mrcas ( self , c1 : ClassId , c2 : ClassId ) -> Set [ ClassId ] : """Calculate the MRCA for a class pair"""
G = self . G # reflexive ancestors ancs1 = self . _ancestors ( c1 ) | { c1 } ancs2 = self . _ancestors ( c2 ) | { c2 } common_ancestors = ancs1 & ancs2 redundant = set ( ) for a in common_ancestors : redundant = redundant | nx . ancestors ( G , a ) return common_ancestors - redundant
def get_mean_and_stddevs ( self , sites , rup , dists , imt , stddev_types ) : """See : meth : ` superclass method < . base . GroundShakingIntensityModel . get _ mean _ and _ stddevs > ` for spec of input and result values ."""
# extracting dictionary of coefficients specific to required # intensity measure type . C = self . COEFFS [ imt ] imean = self . _get_mean ( C , rup , dists , sites ) if imt . name in "SA PGA" : # Convert units to g , # but only for PGA and SA ( not PGV ) : mean = np . log ( ( 10.0 ** ( imean - 2.0 ) ) / g ) else :...
def get_metric ( self , timestamp ) : """Get a metric including all current time series . Get a : class : ` opencensus . metrics . export . metric . Metric ` with one : class : ` opencensus . metrics . export . time _ series . TimeSeries ` for each set of label values with a recorded measurement . Each ` Time...
if not self . points : return None with self . _points_lock : ts_list = get_timeseries_list ( self . points , timestamp ) return metric . Metric ( self . descriptor , ts_list )
def r_get_numbers ( matchgroup , num ) : """A helper function which can be used similarly to fscanf ( fid , ' % f ' , num ) to extract num arguments from the regex iterator"""
res = [ ] for i in range ( num ) : res . append ( float ( matchgroup . next ( ) . group ( ) ) ) return np . array ( res )
def _initialize_context ( self , trace_header ) : """Create a facade segment based on environment variables set by AWS Lambda and initialize storage for subsegments ."""
sampled = None if not global_sdk_config . sdk_enabled ( ) : # Force subsequent subsegments to be disabled and turned into DummySegments . sampled = False elif trace_header . sampled == 0 : sampled = False elif trace_header . sampled == 1 : sampled = True segment = FacadeSegment ( name = 'facade' , traceid =...
def multiply_adjacent_elements ( input_list ) : """This function multiplies neighbouring elements in a given list . Args : input _ list : The list whose consecutive elements are to be multiplied . Returns : A list of the products of neighbouring elements in the input list . Examples : > > > multiply _ a...
product_list = [ input_list [ i ] * input_list [ i + 1 ] for i in range ( len ( input_list ) - 1 ) ] return product_list
def parse_link ( self , node ) : """Parses < Link > @ param node : Node containing the < Link > element @ type node : xml . etree . Element"""
if 'name' in node . lattrib : name = node . lattrib [ 'name' ] else : self . raise_error ( '<Link> must specify a name' ) if 'type' in node . lattrib : type_ = node . lattrib [ 'type' ] else : self . raise_error ( "Link '{0}' must specify a type" , name ) description = node . lattrib . get ( 'descriptio...
def get_wallet_height ( self , id = None , endpoint = None ) : """Get the current wallet index height . Args : id : ( int , optional ) id to use for response tracking endpoint : ( RPCEndpoint , optional ) endpoint to specify to use Returns : json object of the result or the error encountered in the RPC ca...
return self . _call_endpoint ( GET_WALLET_HEIGHT , id = id , endpoint = endpoint )
def voronoi_partition ( G , outline ) : """For 2D - embedded graph ` G ` , within the boundary given by the shapely polygon ` outline ` , returns ` G ` with the Voronoi cell region as an additional node attribute ."""
# following line from vresutils . graph caused a bug # G = polygon _ subgraph ( G , outline , copy = False ) points = list ( vresutils . graph . get_node_attributes ( G , 'pos' ) . values ( ) ) regions = vresutils . graph . voronoi_partition_pts ( points , outline , no_multipolygons = True ) nx . set_node_attributes ( ...
def add_to_manifest ( self , manifest ) : """Add useful details to the manifest about this service so that it can be used in an application . : param manifest : An predix . admin . app . Manifest object instance that manages reading / writing manifest config for a cloud foundry app ."""
# Add this service to list of services manifest . add_service ( self . service . name ) # Add environment variables manifest . add_env_var ( self . __module__ + '.uri' , self . service . settings . data [ 'url' ] ) manifest . add_env_var ( self . __module__ + '.zone_id' , self . get_predix_zone_id ( ) ) manifest . writ...
def set_address ( self , host , port ) : """Add host and port attributes"""
self . host = host self . port = port
def _req_fix ( self , line ) : """Fix slacky and salix requirements because many dependencies splitting with " , " and others with " | " """
deps = [ ] for dep in line [ 18 : ] . strip ( ) . split ( "," ) : dep = dep . split ( "|" ) if self . repo == "slacky" : if len ( dep ) > 1 : for d in dep : deps . append ( d . split ( ) [ 0 ] ) dep = "" . join ( dep ) deps . append ( dep . split ( ) [ 0 ] ) ...
def fetch_file ( dataset_name , url , dataset_dir , dataset_prefix = None , default_paths = None , filetype = None , resume = True , overwrite = False , md5sum = None , username = None , password = None , retry = 0 , verbose = 1 , temp_downloads = None ) : """Load requested file , downloading it if needed or reques...
final_path , cached = _get_dataset ( dataset_name , dataset_prefix = dataset_prefix , data_dir = dataset_dir , default_paths = default_paths , verbose = verbose ) if cached and not overwrite : return final_path data_dir = final_path . parent if temp_downloads is None : temp_downloads = NIWORKFLOWS_CACHE_DIR / '...
def reindex ( self , indexers = None , method = None , tolerance = None , copy = True , ** indexers_kwargs ) : """Conform this object onto a new set of indexes , filling in missing values with NaN . Parameters indexers : dict , optional Dictionary with keys given by dimension names and values given by arr...
indexers = either_dict_or_kwargs ( indexers , indexers_kwargs , 'reindex' ) ds = self . _to_temp_dataset ( ) . reindex ( indexers = indexers , method = method , tolerance = tolerance , copy = copy ) return self . _from_temp_dataset ( ds )
def getHostsFromFile ( filename ) : """Parse a file to return a list of hosts ."""
valid_hostname = r"^[^ /\t=\n]+" workers = r"\d+" hostname_re = re . compile ( valid_hostname ) worker_re = re . compile ( workers ) hosts = [ ] with open ( filename ) as f : for line in f : # check to see if it is a SLURM grouping instead of a # regular list of hosts if re . search ( '[\[\]]' , line ) ...
def connect_edges ( graph ) : """Given a Graph element containing abstract edges compute edge segments directly connecting the source and target nodes . This operation just uses internal HoloViews operations and will be a lot slower than the pandas equivalent ."""
paths = [ ] for start , end in graph . array ( graph . kdims ) : start_ds = graph . nodes [ : , : , start ] end_ds = graph . nodes [ : , : , end ] if not len ( start_ds ) or not len ( end_ds ) : raise ValueError ( 'Could not find node positions for all edges' ) start = start_ds . array ( start_d...
def worker_task ( work_item , config ) : """The celery task which performs a single mutation and runs a test suite . This runs ` cosmic - ray worker ` in a subprocess and returns the results , passing ` config ` to it via stdin . Args : work _ item : A dict describing a WorkItem . config : The configurati...
global _workspace _ensure_workspace ( config ) result = worker ( work_item . module_path , config . python_version , work_item . operator_name , work_item . occurrence , config . test_command , config . timeout ) return work_item . job_id , result
def cover ( ctx , html = False ) : '''Run tests suite with coverage'''
header ( 'Run tests suite with coverage' ) cmd = 'pytest --cov udata --cov-report term' if html : cmd = ' ' . join ( ( cmd , '--cov-report html:reports/python/cover' ) ) with ctx . cd ( ROOT ) : ctx . run ( cmd , pty = True )
def extract_file_from_zip ( bytes_io , expected_file ) : """Extracts a file from a bytes _ io zip . Returns bytes"""
zipf = zipfile . ZipFile ( bytes_io ) return zipf . read ( expected_file )
def cap ( self ) : """" Caps " the construction of the pipeline , signifying that no more inputs and outputs are expected to be added and therefore the input and output nodes can be created along with the provenance ."""
to_cap = ( self . _inputnodes , self . _outputnodes , self . _prov ) if to_cap == ( None , None , None ) : self . _inputnodes = { f : self . _make_inputnode ( f ) for f in self . input_frequencies } self . _outputnodes = { f : self . _make_outputnode ( f ) for f in self . output_frequencies } self . _prov =...
def vqa_recurrent_self_attention_base ( ) : """VQA attention baseline hparams ."""
hparams = universal_transformer . universal_transformer_base ( ) hparams . batch_size = 1024 hparams . use_fixed_batch_size = True hparams . weight_decay = 0. hparams . clip_grad_norm = 0. # use default initializer # hparams . initializer = " xavier " hparams . learning_rate_schedule = ( "constant*linear_warmup*rsqrt_n...
def write_languages ( f , l ) : """Write language information ."""
f . write ( "Languages = {%s" % os . linesep ) for lang in sorted ( l ) : f . write ( " %r: %r,%s" % ( lang , l [ lang ] , os . linesep ) ) f . write ( "}%s" % os . linesep )
def _parse ( jsonOutput ) : '''Parses JSON response from Tika REST API server : param jsonOutput : JSON output from Tika Server : return : a dictionary having ' metadata ' and ' content ' values'''
parsed = { } if not jsonOutput : return parsed parsed [ "status" ] = jsonOutput [ 0 ] if jsonOutput [ 1 ] == None or jsonOutput [ 1 ] == "" : return parsed realJson = json . loads ( jsonOutput [ 1 ] ) content = "" for js in realJson : if "X-TIKA:content" in js : content += js [ "X-TIKA:content" ] if...
def list_nodes_select ( call = None ) : '''Return a list of the VMs that are on the provider , with select fields'''
if call == 'action' : raise SaltCloudSystemExit ( 'The list_nodes_select function must be called ' 'with -f or --function.' ) selection = __opts__ . get ( 'query.selection' ) if not selection : raise SaltCloudSystemExit ( 'query.selection not found in /etc/salt/cloud' ) # TODO : somewhat doubt the implementatio...
def full_value ( self ) : """Returns the full value with the path also ( ie , name = value ( path ) ) : returns : String"""
s = self . name_value ( ) s += self . path_value ( ) s += "\n\n" return s
def store_array_elements ( self , array , start_idx , data ) : """Stores either a single element or a range of elements in the array . : param array : Reference to the array . : param start _ idx : Starting index for the store . : param data : Either a single value or a list of values ."""
# we process data as a list of elements # = > if there is only a single element , wrap it in a list data = data if isinstance ( data , list ) else [ data ] # concretize start index concrete_start_idxes = self . concretize_store_idx ( start_idx ) if len ( concrete_start_idxes ) == 1 : # only one start index # = > concre...
def get_editor_buffer_for_location ( self , location ) : """Return the ` EditorBuffer ` for this location . When this file was not yet loaded , return None"""
for eb in self . editor_buffers : if eb . location == location : return eb
def parse_resource_extended ( self , session , resource_name ) : """Parse a resource string to get extended interface information . Corresponds to viParseRsrcEx function of the VISA library . : param session : Resource Manager session ( should always be the Default Resource Manager for VISA returned from open...
try : parsed = rname . parse_resource_name ( resource_name ) return ( ResourceInfo ( parsed . interface_type_const , parsed . board , parsed . resource_class , str ( parsed ) , None ) , constants . StatusCode . success ) except ValueError : return 0 , constants . StatusCode . error_invalid_resource_name
def select_gist ( self , allow_none = False ) : """Given the requested filename , it selects the proper gist ; if more than one gist is found with the given filename , user is asked to choose . : allow _ none : ( bool ) for ` getgist ` it should raise error if no gist is found , but setting this argument to T...
# pick up all macthing gists matches = list ( ) for gist in self . get_gists ( ) : for gist_file in gist . get ( "files" ) : if self . filename == gist_file . get ( "filename" ) : matches . append ( gist ) # abort if no match is found if not matches : if allow_none : return None ...
def calculate_column_sum ( matrix , column_index ) : """This function computes the sum of a given column in a given 2D list ( matrix ) . For example , Given matrix = [ [ 1 , 2 , 3 , 2 ] , [ 4 , 5 , 6 , 2 ] , [ 7 , 8 , 9 , 5 ] ] , calculate _ column _ sum ( matrix , 0 ) will give 12, calculate _ column _ sum...
column_total = sum ( row [ column_index ] for row in matrix ) return column_total
def revise_sql ( query , id_column , output_table , max_date_column , min_date_column , date_column , date , source_id_column = None ) : """Given an expensive query that aggregates temporal data , Revise the results to censor before a particular date"""
if source_id_column is None : source_id_column = id_column if hasattr ( id_column , '__iter__' ) : id_column = str . join ( ', ' , id_column ) if hasattr ( source_id_column , '__iter__' ) : source_id_column = str . join ( ', ' , source_id_column ) sql_vars = dict ( query = query , id_column = id_column , ou...
def snmp_server_group_group_auth_mode ( self , ** kwargs ) : """Auto Generated Code"""
config = ET . Element ( "config" ) snmp_server = ET . SubElement ( config , "snmp-server" , xmlns = "urn:brocade.com:mgmt:brocade-snmp" ) group = ET . SubElement ( snmp_server , "group" ) group_name_key = ET . SubElement ( group , "group-name" ) group_name_key . text = kwargs . pop ( 'group_name' ) group_version_key = ...
def com_google_fonts_check_metadata_valid_name_values ( style , font_metadata , font_familynames , typographic_familynames ) : """METADATA . pb font . name field contains font name in right format ?"""
from fontbakery . constants import RIBBI_STYLE_NAMES if style in RIBBI_STYLE_NAMES : familynames = font_familynames else : familynames = typographic_familynames failed = False for font_familyname in familynames : if font_familyname not in font_metadata . name : failed = True yield FAIL , ( "...
def convertShape ( shapeString ) : """Convert xml shape string into float tuples . This method converts the 2d or 3d shape string from SUMO ' s xml file into a list containing 3d float - tuples . Non existant z coordinates default to zero . If shapeString is empty , an empty list will be returned ."""
cshape = [ ] for pointString in shapeString . split ( ) : p = [ float ( e ) for e in pointString . split ( "," ) ] if len ( p ) == 2 : cshape . append ( ( p [ 0 ] , p [ 1 ] , 0. ) ) elif len ( p ) == 3 : cshape . append ( tuple ( p ) ) else : raise ValueError ( 'Invalid shape poi...
def create_node ( ctx , path ) : """Create node for given relative path . : param ctx : BuildContext object . : param path : Relative path relative to top directory . : return : Created Node ."""
# Ensure given context object is BuildContext object _ensure_build_context ( ctx ) # Get top directory ' s relative path relative to ` wscript ` directory top_dir_relpath = os . path . relpath ( # Top directory ' s absolute path ctx . top_dir , # ` wscript ` directory ' s absolute path ctx . run_dir , ) # Convert given...
def raises ( self , expected_exception ) : """Ensures preceding predicates ( specifically , : meth : ` called _ with ( ) ` ) result in * expected _ exception * being raised ."""
return unittest_case . assertRaises ( expected_exception , self . _orig_subject , * self . _args , ** self . _kwargs )
def validate ( self , sig = None ) : '''Check if file matches its signature'''
if sig is not None : sig_mtime , sig_size , sig_md5 = sig else : try : with open ( self . sig_file ( ) ) as sig : sig_mtime , sig_size , sig_md5 = sig . read ( ) . strip ( ) . split ( ) except : return False if not self . exists ( ) : if ( self + '.zapped' ) . is_file ( ) : ...
def _convert_rename ( self , fc ) : """Convert a FileRenameCommand into a new FileCommand . : return : None if the rename is being ignored , otherwise a new FileCommand based on the whether the old and new paths are inside or outside of the interesting locations ."""
old = fc . old_path new = fc . new_path keep_old = self . _path_to_be_kept ( old ) keep_new = self . _path_to_be_kept ( new ) if keep_old and keep_new : fc . old_path = self . _adjust_for_new_root ( old ) fc . new_path = self . _adjust_for_new_root ( new ) return fc elif keep_old : # The file has been renam...
def create_mssql_pymssql ( username , password , host , port , database , ** kwargs ) : # pragma : no cover """create an engine connected to a mssql database using pymssql ."""
return create_engine ( _create_mssql_pymssql ( username , password , host , port , database ) , ** kwargs )
def set_next_week_day ( val , week_day , iso = False ) : """Set week day . New date will be greater or equal than input date . : param val : datetime or date : type val : datetime . datetime | datetime . date : param week _ day : Week day to set : type week _ day : int : param iso : week _ day in ISO fo...
return _set_week_day ( val , week_day , val . isoweekday ( ) if iso else val . weekday ( ) , sign = 1 )
def execute_streaming_sql ( self , session , sql , transaction = None , params = None , param_types = None , resume_token = None , query_mode = None , partition_token = None , seqno = None , retry = google . api_core . gapic_v1 . method . DEFAULT , timeout = google . api_core . gapic_v1 . method . DEFAULT , metadata = ...
# Wrap the transport method to add retry and timeout logic . if "execute_streaming_sql" not in self . _inner_api_calls : self . _inner_api_calls [ "execute_streaming_sql" ] = google . api_core . gapic_v1 . method . wrap_method ( self . transport . execute_streaming_sql , default_retry = self . _method_configs [ "Ex...
def _thread_init ( cls ) : """Ensure thread local is initialized ."""
if not hasattr ( cls . _local , '_in_order_futures' ) : cls . _local . _in_order_futures = set ( ) cls . _local . _activated = False
def segmentlistdict_from_short_string ( s , boundtype = int ) : """Parse a string representation of a set of named segmentlists into a segmentlistdict object . The string encoding is that generated by segmentlistdict _ to _ short _ string ( ) . The optional boundtype argument will be passed to from _ range _ ...
d = segments . segmentlistdict ( ) for token in s . strip ( ) . split ( "/" ) : key , ranges = token . strip ( ) . split ( "=" ) d [ key . strip ( ) ] = from_range_strings ( ranges . strip ( ) . split ( "," ) , boundtype = boundtype ) return d
def fit ( self , X ) : """Apply KMeans Clustering X : dataset with feature vectors"""
self . centers_ , self . labels_ , self . sse_arr_ , self . n_iter_ = _kmeans ( X , self . n_clusters , self . max_iter , self . n_trials , self . tol )
def get_scope_by_name ( self , scope_name ) : """GetScopeByName . [ Preview API ] : param str scope _ name : : rtype : : class : ` < IdentityScope > < azure . devops . v5_0 . identity . models . IdentityScope > `"""
query_parameters = { } if scope_name is not None : query_parameters [ 'scopeName' ] = self . _serialize . query ( 'scope_name' , scope_name , 'str' ) response = self . _send ( http_method = 'GET' , location_id = '4e11e2bf-1e79-4eb5-8f34-a6337bd0de38' , version = '5.0-preview.2' , query_parameters = query_parameters...
def getChain ( self ) : "returns a list of keys representing the chain of documents"
l = [ ] h = self . head while h : l . append ( h . _key ) h = h . nextDoc return l
def asDictionary ( self ) : """returns the object as a dictionary"""
template = { "type" : "esriPMS" , "url" : self . _url , "imageData" : self . _imageDate , "contentType" : self . _contentType , "width" : self . _width , "height" : self . _height , "angle" : self . _angle , "xoffset" : self . _xoffset , "yoffset" : self . _yoffset , "xscale" : self . _xscale , "yscale" : self . _yscal...
def annotate_proto ( self , text , annotators = None ) : """Return a Document protocol buffer from the CoreNLP server , containing annotations of the text . : param ( str ) text : text to be annotated : param ( list [ str ] ) annotators : a list of annotator names : return ( CoreNLP _ pb2 . Document ) : a Doc...
properties = { 'annotators' : ',' . join ( annotators or self . default_annotators ) , 'outputFormat' : 'serialized' , 'serializer' : 'edu.stanford.nlp.pipeline.ProtobufAnnotationSerializer' } r = self . _request ( text , properties ) buffer = r . content # bytes size , pos = _DecodeVarint ( buffer , 0 ) buffer = buffe...
def new_connection ( self , remote_ip , remote_port ) : """This method is called when a new SMTP session is opened . [ PUBLIC API ]"""
self . state . set_state ( 'new' ) self . _message = Message ( Peer ( remote_ip , remote_port ) ) decision , response_sent = self . is_allowed ( 'accept_new_connection' , self . _message . peer ) if decision : if not response_sent : self . handle_input ( 'greet' ) self . _set_size_restrictions ( ) else ...
def wait_until_exit ( self ) : """Wait until thread exit Used for testing purpose only"""
if self . _timeout is None : raise Exception ( "Thread will never exit. Use stop or specify timeout when starting it!" ) self . _thread . join ( ) self . stop ( )
def mark_dmag_rec ( s , ind , data ) : """Edits demagnetization data to mark " bad " points with measurement _ flag"""
datablock = [ ] for rec in data : if rec [ 'er_specimen_name' ] == s : meths = rec [ 'magic_method_codes' ] . split ( ':' ) if 'LT-NO' in meths or 'LT-AF-Z' in meths or 'LT-T-Z' in meths : datablock . append ( rec ) dmagrec = datablock [ ind ] for k in range ( len ( data ) ) : meths ...
def get_command ( self , ctx , cmd_name ) : """gets the subcommands under the service name Parameters ctx : Context the context object passed into the method cmd _ name : str the service name Returns EventTypeSubCommand : returns subcommand if successful , None if not ."""
if cmd_name not in self . all_cmds : return None return EventTypeSubCommand ( self . events_lib , cmd_name , self . all_cmds [ cmd_name ] )
def on_lstSubcategories_itemSelectionChanged ( self ) : """Update subcategory description label . . . note : : This is an automatic Qt slot executed when the subcategory selection changes ."""
self . clear_further_steps ( ) # Set widgets subcategory = self . selected_subcategory ( ) # Exit if no selection if not subcategory : return # Set description label self . lblDescribeSubcategory . setText ( subcategory [ 'description' ] ) icon_path = get_image_path ( subcategory ) self . lblIconSubcategory . setPi...
def indexes ( self , recurse = True ) : """Returns the list of indexes that are associated with this schema . : return [ < orb . Index > , . . ]"""
output = self . __indexes . copy ( ) if recurse and self . inherits ( ) : schema = orb . system . schema ( self . inherits ( ) ) if not schema : raise orb . errors . ModelNotFound ( schema = self . inherits ( ) ) else : output . update ( schema . indexes ( recurse = recurse ) ) return output
def fem ( ab , off , angle , zsrc , zrec , lsrc , lrec , depth , freq , etaH , etaV , zetaH , zetaV , xdirect , isfullspace , ht , htarg , use_ne_eval , msrc , mrec , loop_freq , loop_off , conv = True ) : r"""Return the electromagnetic frequency - domain response . This function is called from one of the above m...
# Preallocate array fEM = np . zeros ( ( freq . size , off . size ) , dtype = complex ) # Initialize kernel count # ( how many times the wavenumber - domain kernel was calld ) kcount = 0 # If < ab > = 36 ( or 63 ) , fEM - field is zero if ab in [ 36 , ] : return fEM , kcount , conv # Get full - space - solution if ...
def subtract ( self , other = None , ** kwargs ) : """Elements are subtracted from an * iterable * or from another * mapping * ( or counter ) . Like : func : ` dict . update ` but subtracts counts instead of replacing them ."""
if other is not None : if self . _same_redis ( other , RedisCollection ) : self . _update_helper ( other , operator . sub , use_redis = True ) elif hasattr ( other , 'keys' ) : self . _update_helper ( other , operator . sub ) else : self . _update_helper ( collections . Counter ( oth...
def update_dataset ( self , dataStr , flatten = False ) : '''update class with a data structure . : keyword flatten : use this to automatically flatten variables ( squeeze dimensions )'''
# Load keys and dimensions dataDim = dataStr . pop ( '_dimensions' , { } ) attrStr = dataStr . pop ( '_attributes' , { } ) ndims = dataDim . pop ( '_ndims' , 0 ) dimensions = [ dataDim . keys ( ) , dataDim . values ( ) ] keys = dataStr . keys ( ) if len ( keys ) == 0 : self . warning ( 2 , 'No data loaded' ) re...
def parse_bowtie2_logs ( self , f ) : """Warning : This function may make you want to stab yourself . Parse logs from bowtie2 . These miss several key bits of information such as input files , so we try to look for logs from other wrapper tools that may have logged this info . If not found , we default to usi...
# Regexes regexes = { 'unpaired' : { 'unpaired_aligned_none' : r"(\d+) \([\d\.]+%\) aligned 0 times" , 'unpaired_aligned_one' : r"(\d+) \([\d\.]+%\) aligned exactly 1 time" , 'unpaired_aligned_multi' : r"(\d+) \([\d\.]+%\) aligned >1 times" } , 'paired' : { 'paired_aligned_none' : r"(\d+) \([\d\.]+%\) aligned concordan...
def _report_self ( self ) : """Reports the crawler uuid to redis"""
if self . redis_connected : self . logger . debug ( "Reporting self to redis" ) try : key = "stats:rest:self:{m}:{u}" . format ( m = socket . gethostname ( ) , u = self . my_uuid ) self . redis_conn . set ( key , self . get_time ( ) ) self . redis_conn . expire ( key , self . settings [ ...
def accumulate_from_superclasses ( cls , propname ) : '''Traverse the class hierarchy and accumulate the special sets of names ` ` MetaHasProps ` ` stores on classes : Args : name ( str ) : name of the special attribute to collect . Typically meaningful values are : ` ` _ _ container _ props _ _ ` ` , ` `...
cachename = "__cached_all" + propname # we MUST use cls . _ _ dict _ _ NOT hasattr ( ) . hasattr ( ) would also look at base # classes , and the cache must be separate for each class if cachename not in cls . __dict__ : s = set ( ) for c in inspect . getmro ( cls ) : if issubclass ( c , HasProps ) and h...
def replaceelement ( oldelem , newelem ) : '''Given a parent element , replace oldelem with newelem .'''
parent = oldelem . getparent ( ) if parent is not None : size = len ( parent . getchildren ( ) ) for x in range ( 0 , size ) : if parent . getchildren ( ) [ x ] == oldelem : parent . remove ( oldelem ) parent . insert ( x , newelem )
def token_name ( tokens , expected ) : """Match a token name ( type ) ."""
try : token = next ( iter ( tokens ) ) except StopIteration : return if token and token . name == expected : return TokenMatch ( None , token . value , ( token , ) )
def run ( self , allow_interactive = True ) : """Enter the native GUI event loop . Parameters allow _ interactive : bool Is the application allowed to handle interactive mode for console terminals ? By default , typing ` ` python - i main . py ` ` results in an interactive shell that also regularly calls ...
if allow_interactive and self . is_interactive ( ) : inputhook . set_interactive ( enabled = True , app = self ) else : return self . _backend . _vispy_run ( )
def logout_callback ( ) : """Route called by the OpenID provider when user logs out . Clear the cookies here ."""
resp = make_response ( 'Logging Out' ) resp . set_cookie ( 'sub' , 'null' , expires = 0 ) resp . set_cookie ( 'session_id' , 'null' , expires = 0 ) return resp
def fix_docs ( cls ) : """copies docstrings of derived attributes ( methods , properties , attrs ) from parent classes ."""
import inspect public_undocumented_members = { name : func for name , func in inspect . getmembers ( cls ) if not name . startswith ( '_' ) and func . __doc__ is None } for name , func in public_undocumented_members . items ( ) : for parent in cls . __mro__ [ 1 : ] : parfunc = getattr ( parent , name , None...
def withNamedBits ( cls , ** values ) : """Creates a subclass with discreet named bits constraint . Reduce fully duplicate enumerations along the way ."""
enums = set ( cls . namedValues . items ( ) ) enums . update ( values . items ( ) ) class X ( cls ) : namedValues = namedval . NamedValues ( * enums ) X . __name__ = cls . __name__ return X
def set_level ( self , position , channel = None ) : """Seek a specific value by specifying a float ( ) from 0.0 to 1.0."""
try : position = float ( position ) except Exception as err : LOG . debug ( "HelperLevel.set_level: Exception %s" % ( err , ) ) return False self . writeNodeData ( "LEVEL" , position , channel )
def expose_finish ( self , * args ) : """Finish drawing process"""
# Obtain a reference to the OpenGL drawable # and rendering context . gldrawable = self . get_gl_drawable ( ) # glcontext = self . get _ gl _ context ( ) if not gldrawable : return # Put the buffer on the screen ! if gldrawable . is_double_buffered ( ) : gldrawable . swap_buffers ( ) else : glFlush ( ) # Op...
def LDRSH ( self , params ) : """LDRSH Ra , [ Rb , Rc ] Load a half word from memory , sign extend , and put into Ra Ra , Rb , and Rc must be low registers"""
# TODO LDRSH cant use immediates Ra , Rb , Rc = self . get_three_parameters ( self . THREE_PARAMETER_WITH_BRACKETS , params ) self . check_arguments ( low_registers = ( Ra , Rb , Rc ) ) def LDRSH_func ( ) : # TODO does memory read up ? if ( self . register [ Rb ] + self . register [ Rc ] ) % 2 != 0 : raise ...
def remove_config_to_machine_group ( self , project_name , config_name , group_name ) : """remove a logtail config to a machine group Unsuccessful opertaion will cause an LogException . : type project _ name : string : param project _ name : the Project name : type config _ name : string : param config _ ...
headers = { } params = { } resource = "/machinegroups/" + group_name + "/configs/" + config_name ( resp , header ) = self . _send ( "DELETE" , project_name , None , resource , params , headers ) return RemoveConfigToMachineGroupResponse ( header , resp )
def register_model_resource ( self , resource : ModelResource ) : """Method to manually register a : class : ` ModelResource ` with APISpec . : param resource :"""
model_name = resource . Meta . model . __name__ self . spec . add_tag ( { 'name' : model_name , 'description' : resource . Meta . model . __doc__ , } ) for method in resource . methods ( ) : key = f'{resource.__name__}.{method}' if key not in unchained . controller_bundle . controller_endpoints : contin...
def _cache_index ( self , dbname , collection , index , cache_for ) : """Add an index to the index cache for ensure _ index operations ."""
now = datetime . datetime . utcnow ( ) expire = datetime . timedelta ( seconds = cache_for ) + now with self . __index_cache_lock : if dbname not in self . __index_cache : self . __index_cache [ dbname ] = { } self . __index_cache [ dbname ] [ collection ] = { } self . __index_cache [ dbname...
def _get_description ( self , args : Tuple , kwargs : Dict [ str , Any ] ) -> Dict [ str , Any ] : """Return the dictionary to be sent to the queue ."""
return { 'id' : uuid1 ( ) . hex , 'args' : args , 'kwargs' : kwargs , 'module' : self . _module_name , 'function' : self . f . __name__ , 'sender_hostname' : socket . gethostname ( ) , 'sender_pid' : os . getpid ( ) , 'sender_cmd' : ' ' . join ( sys . argv ) , 'sender_timestamp' : datetime . utcnow ( ) . isoformat ( ) ...
def clearCache ( ) : """Clears any cached data we have stored about specific engine versions"""
if os . path . exists ( CachedDataManager . _cacheDir ( ) ) == True : shutil . rmtree ( CachedDataManager . _cacheDir ( ) )
def software_fibonacci ( n ) : """a normal old python function to return the Nth fibonacci number ."""
a , b = 0 , 1 for i in range ( n ) : a , b = b , a + b return a
def phi_inv ( p ) : """phi _ inv : inverse of gaussian ( normal ) CDF Source : Handbook of Mathematical Functions Dover Books on Mathematics Milton Abramowitz and Irene A . Stegun ( Editors ) Formula 26.2.23."""
if p < 0.5 : t = math . sqrt ( - 2.0 * math . log ( p ) ) return ( ( 0.010328 * t + 0.802853 ) * t + 2.515517 ) / ( ( ( 0.001308 * t + 0.189269 ) * t + 1.432788 ) * t + 1.0 ) - t else : t = math . sqrt ( - 2.0 * math . log ( 1.0 - p ) ) return t - ( ( 0.010328 * t + 0.802853 ) * t + 2.515517 ) / ( ( ( 0...
def set_weekly ( self , interval , * , days_of_week , first_day_of_week , ** kwargs ) : """Set to repeat every week on specified days for every x no . of days : param int interval : no . of days to repeat at : param str first _ day _ of _ week : starting day for a week : param list [ str ] days _ of _ week : ...
self . set_daily ( interval , ** kwargs ) self . __days_of_week = set ( days_of_week ) self . __first_day_of_week = first_day_of_week
def set_session_token ( self , session_token ) : """Sets session token and new login time . : param str session _ token : Session token from request ."""
self . session_token = session_token self . _login_time = datetime . datetime . now ( )
def end_timing ( self ) : """Ends timing of an execution block , calculates elapsed time and updates the associated counter ."""
if self . _callback != None : elapsed = time . perf_counter ( ) * 1000 - self . _start self . _callback . end_timing ( self . _counter , elapsed )
def get_path ( self ) : '''获取选择的路径 , 如果没有选择 , 就返回根目录'''
model , tree_iter = self . selection . get_selected ( ) if not tree_iter : return '/' else : return model [ tree_iter ] [ PATH_COL ]
def summary_reporter ( self ) : """Parse individual MOB Recon reports into a summary report"""
logging . info ( 'Creating MOB-recon summary report' ) with open ( os . path . join ( self . reportpath , 'mob_recon_summary.csv' ) , 'w' ) as summary : data = 'Strain,Location,Contig,Incompatibility,IncompatibilityAccession,RelaxaseType,' 'MashNearestNeighbor,MashNeighborDistance\n' for sample in self . metada...
def create ( window , root ) : """Create a notification object . Args : window ( : py : class : ` BrowserWindow ` ) : Window object this region appears in . root ( : py : class : ` ~ selenium . webdriver . remote . webelement . WebElement ` ) : WebDriver element object that serves as the root for the ...
notifications = { } _id = root . get_property ( "id" ) from foxpuppet . windows . browser . notifications import addons notifications . update ( addons . NOTIFICATIONS ) return notifications . get ( _id , BaseNotification ) ( window , root )
def _warn ( self , problem , kind = BrotherQLRasterError ) : """Logs the warning message ` problem ` or raises a ` BrotherQLRasterError ` exception ( changeable via ` kind ` ) if ` self . exception _ on _ warning ` is set to True . : raises BrotherQLRasterError : Or other exception set via the ` kind ` keywor...
if self . exception_on_warning : raise kind ( problem ) else : logger . warning ( problem )
def presence_handler ( type_ , from_ ) : """Register the decorated function as presence stanza handler . : param type _ : Presence type to listen for : type type _ : : class : ` ~ . PresenceType ` : param from _ : Sender JIDs to listen for : type from _ : : class : ` aioxmpp . JID ` or : data : ` None ` :...
def decorator ( f ) : if asyncio . iscoroutinefunction ( f ) : raise TypeError ( "presence_handler must not be a coroutine function" ) aioxmpp . service . add_handler_spec ( f , aioxmpp . service . HandlerSpec ( ( _apply_presence_handler , ( type_ , from_ ) ) , require_deps = ( SimplePresenceDispatcher ...
def _validate_arguments ( self ) : """method to sanitize model parameters Parameters None Returns None"""
if self . _has_terms ( ) : [ term . _validate_arguments ( ) for term in self . _terms ] else : super ( TensorTerm , self ) . _validate_arguments ( ) return self
def p_InDecrement ( p ) : '''InDecrement : INDECREMENT Expression | Expression INDECREMENT'''
from . helper import isString if isString ( p [ 1 ] ) : p [ 0 ] = InDecrement ( p [ 1 ] , p [ 2 ] , False ) else : p [ 0 ] = InDecrement ( p [ 2 ] , p [ 1 ] , True )