signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def run ( self ) : """Listen for pings until stopped ."""
self . _live = True self . _sock . settimeout ( self . LISTEN_TIMEOUT_S ) # Passing in INADDR _ ANY means the kernel will choose the default interface . # The localhost address is used to receive messages sent in " local _ only " # mode and the default address is used to receive all other messages . for interface_ip in...
def configure_truth ( self , ** kwargs ) : # pragma : no cover """Configure the arguments passed to the ` ` axvline ` ` and ` ` axhline ` ` methods when plotting truth values . If you do not call this explicitly , the : func : ` plot ` method will invoke this method automatically . Recommended to set the pa...
if kwargs . get ( "ls" ) is None and kwargs . get ( "linestyle" ) is None : kwargs [ "ls" ] = "--" kwargs [ "dashes" ] = ( 3 , 3 ) if kwargs . get ( "color" ) is None : kwargs [ "color" ] = "#000000" self . config_truth = kwargs self . _configured_truth = True return self
def direct_to_template ( request , template , extra_context = None , ** kwargs ) : """Replacement for Django ' s ` ` direct _ to _ template ` ` that uses ` ` TemplateResponse ` ` via ` ` yacms . utils . views . render ` ` ."""
context = extra_context or { } context [ "params" ] = kwargs for ( key , value ) in context . items ( ) : if callable ( value ) : context [ key ] = value ( ) return TemplateResponse ( request , template , context )
def insert_one ( self , validate = True ) : """Insert this document . The ` validate ` argument translates to the inverse of the ` bypass _ document _ validation ` PyMongo option . https : / / api . mongodb . com / python / current / api / pymongo / collection . html # pymongo . collection . Collection . insert...
kw = { } kw [ 'bypass_document_validation' ] = not validate collection = self . get_collection ( kw . pop ( 'source' , None ) ) return collection . insert_one ( self , ** kw )
def xml_marshal_bucket_constraint ( region ) : """Marshal ' s bucket constraint based on * region * . : param region : Region name of a given bucket . : return : Marshalled XML data ."""
root = s3_xml . Element ( 'CreateBucketConfiguration' , { 'xmlns' : _S3_NAMESPACE } ) location_constraint = s3_xml . SubElement ( root , 'LocationConstraint' ) location_constraint . text = region data = io . BytesIO ( ) s3_xml . ElementTree ( root ) . write ( data , encoding = None , xml_declaration = False ) return da...
def get_target_transcript ( self , min_intron = 1 ) : """Get the mapping of to the target strand : returns : Transcript mapped to target : rtype : Transcript"""
if min_intron < 1 : sys . stderr . write ( "ERROR minimum intron should be 1 base or longer\n" ) sys . exit ( ) # tx = Transcript ( ) rngs = [ self . alignment_ranges [ 0 ] [ 0 ] . copy ( ) ] # rngs [ 0 ] . set _ direction ( None ) for i in range ( len ( self . alignment_ranges ) - 1 ) : dist = self . align...
def render_string ( self , source : str , ** vars ) -> str : """Render the template contained in the given string . The current context will be available to the template as the ` ` ctx ` ` variable . : param source : content of the template to render : param vars : extra variables made available to the templa...
vars . setdefault ( 'ctx' , self . _ctx ) return self . _renderer . render_string ( source , ** vars )
def get_top_artists ( self , period = PERIOD_OVERALL , limit = None ) : """Returns the top artists played by a user . * period : The period of time . Possible values : o PERIOD _ OVERALL o PERIOD _ 7DAYS o PERIOD _ 1MONTH o PERIOD _ 3MONTHS o PERIOD _ 6MONTHS o PERIOD _ 12MONTHS"""
params = self . _get_params ( ) params [ "period" ] = period if limit : params [ "limit" ] = limit doc = self . _request ( self . ws_prefix + ".getTopArtists" , True , params ) return _extract_top_artists ( doc , self . network )
def update_flags ( self , idlist , flags ) : """A thin back compat wrapper around build _ update ( flags = X )"""
return self . update_bugs ( idlist , self . build_update ( flags = flags ) )
def get_time_graded ( self ) : """Gets the time the gradeable object was graded . return : ( osid . calendaring . DateTime ) - the timestamp of the grading entry raise : IllegalState - ` ` is _ graded ( ) ` ` is ` ` false ` ` or ` ` is _ derived ( ) ` ` is ` ` true ` ` * compliance : mandatory - - This me...
if not self . is_graded ( ) or self . is_derived ( ) : raise errors . IllegalState ( ) time_graded = self . _my_map [ 'timeGraded' ] return DateTime ( year = time_graded . year , month = time_graded . month , day = time_graded . day , hour = time_graded . hour , minute = time_graded . minute , second = time_graded ...
def log_handler ( self , handler ) : """Setter for the log handler function . Args : self ( JLink ) : the ` ` JLink ` ` instance Returns : ` ` None ` `"""
if not self . opened ( ) : handler = handler or util . noop self . _log_handler = enums . JLinkFunctions . LOG_PROTOTYPE ( handler ) self . _dll . JLINKARM_EnableLog ( self . _log_handler )
def restore_state ( self , state ) : """Restore the current state of this emulated object . Args : state ( dict ) : A previously dumped state produced by dump _ state ."""
super ( ReferenceController , self ) . restore_state ( state ) state_name = state . get ( 'state_name' ) state_version = state . get ( 'state_version' ) if state_name != self . STATE_NAME or state_version != self . STATE_VERSION : raise ArgumentError ( "Invalid emulated device state name or version" , found = ( sta...
def parse ( cls , version_string , partial = False , coerce = False ) : """Parse a version string into a Version ( ) object . Args : version _ string ( str ) , the version string to parse partial ( bool ) , whether to accept incomplete input coerce ( bool ) , whether to try to map the passed in string into ...
if not version_string : raise ValueError ( 'Invalid empty version string: %r' % version_string ) if partial : version_re = cls . partial_version_re else : version_re = cls . version_re match = version_re . match ( version_string ) if not match : raise ValueError ( 'Invalid version string: %r' % version_...
def fix_config ( self , options ) : """Fixes the options , if necessary . I . e . , it adds all required elements to the dictionary . : param options : the options to fix : type options : dict : return : the ( potentially ) fixed options : rtype : dict"""
options = super ( ForLoop , self ) . fix_config ( options ) opt = "min" if opt not in options : options [ opt ] = 1 if opt not in self . help : self . help [ opt ] = "The minimum for the loop (included, int)." opt = "max" if opt not in options : options [ opt ] = 10 if opt not in self . help : self . he...
def write_and_convert ( self , text ) : '''Write the given text to our wrapped stream , stripping any ANSI sequences from the text , and optionally converting them into win32 calls .'''
cursor = 0 for match in self . ANSI_RE . finditer ( text ) : start , end = match . span ( ) self . write_plain_text ( text , cursor , start ) self . convert_ansi ( * match . groups ( ) ) cursor = end self . write_plain_text ( text , cursor , len ( text ) )
def parse_actor_and_date ( line ) : """Parse out the actor ( author or committer ) info from a line like : : author Tom Preston - Werner < tom @ mojombo . com > 1191999972 - 0700 : return : [ Actor , int _ seconds _ since _ epoch , int _ timezone _ offset ]"""
actor , epoch , offset = '' , 0 , 0 m = _re_actor_epoch . search ( line ) if m : actor , epoch , offset = m . groups ( ) else : m = _re_only_actor . search ( line ) actor = m . group ( 1 ) if m else line or '' return ( Actor . _from_string ( actor ) , int ( epoch ) , utctz_to_altz ( offset ) )
def detect ( self , color_im , depth_im , cfg , camera_intr , T_camera_world , vis_foreground = False , vis_segmentation = False , segmask = None ) : """Detects all relevant objects in an rgbd image pair using foreground masking . Parameters color _ im : : obj : ` ColorImage ` color image for detection dept...
# read params min_pt_box = np . array ( cfg [ 'min_pt' ] ) max_pt_box = np . array ( cfg [ 'max_pt' ] ) min_contour_area = cfg [ 'min_contour_area' ] max_contour_area = cfg [ 'max_contour_area' ] min_box_area = cfg [ 'min_box_area' ] max_box_area = cfg [ 'max_box_area' ] box_padding_px = cfg [ 'box_padding_px' ] crop_h...
def translate_connect_args ( self , names = [ ] , ** kw ) : """Translate url attributes into a dictionary of connection arguments . Returns attributes of this url ( ` host ` , ` database ` , ` username ` , ` password ` , ` port ` ) as a plain dictionary . The attribute names are used as the keys by default . ...
translated = { } attribute_names = [ "host" , "database" , "username" , "password" , "port" ] for sname in attribute_names : if names : name = names . pop ( 0 ) elif sname in kw : name = kw [ sname ] else : name = sname if name is not None and getattr ( self , sname , False ) : ...
def get ( self , action , version = None ) : """Get the method class handing the given action and version ."""
by_version = self . _by_action [ action ] if version in by_version : return by_version [ version ] else : return by_version [ None ]
def drag_and_drop ( self , droppable ) : """Performs drag a element to another elmenet . Currently works only on Chrome driver ."""
self . scroll_to ( ) ActionChains ( self . parent . driver ) . drag_and_drop ( self . _element , droppable . _element ) . perform ( )
def add_arguments ( parser ) : """Parse arguments Args : parser ( argparse . ArgumentParser )"""
parser . description = 'Examples:\n' 'python -m etk regex_extractor pattern /tmp/date.txt\n' 'cat /tmp/date.txt | python -m etk regex_extractor pattern' parser . add_argument ( 'pattern' , nargs = '?' , type = str , default = sys . stdin ) parser . add_argument ( 'input_file' , nargs = '?' , type = argparse . FileType ...
def signed_add ( a , b ) : """Return wirevector for result of signed addition . : param a : a wirevector to serve as first input to addition : param b : a wirevector to serve as second input to addition Given a length n and length m wirevector the result of the signed addition is length max ( n , m ) + 1 . ...
a , b = match_bitwidth ( as_wires ( a ) , as_wires ( b ) , signed = True ) result_len = len ( a ) + 1 ext_a = a . sign_extended ( result_len ) ext_b = b . sign_extended ( result_len ) # add and truncate to the correct length return ( ext_a + ext_b ) [ 0 : result_len ]
def get_clients ( self ) : """return a merge of public and private clients"""
public = self . channels [ 'public' ] private = self . channels [ 'private' ] return dict ( public . items ( ) + private . items ( ) )
def parse_aws_include_transform ( data ) : """If the input data is an AWS : : Include data , then parse and return the location of the included file . AWS : : Include transform data usually has the following format : " Fn : : Transform " : { " Name " : " AWS : : Include " , " Parameters " : { " Location "...
if not data : return if _FN_TRANSFORM not in data : return transform_data = data [ _FN_TRANSFORM ] name = transform_data . get ( "Name" ) location = transform_data . get ( "Parameters" , { } ) . get ( "Location" ) if name == "AWS::Include" : LOG . debug ( "Successfully parsed location from AWS::Include tran...
def _read_mplain ( self , lines ) : """Read text fragments from a multilevel format text file . : param list lines : the lines of the subtitles text file"""
self . log ( u"Parsing fragments from subtitles text format" ) word_separator = self . _mplain_word_separator ( ) self . log ( [ u"Word separator is: '%s'" , word_separator ] ) lines = [ line . strip ( ) for line in lines ] pairs = [ ] i = 1 current = 0 tree = Tree ( ) while current < len ( lines ) : line_text = li...
def trace_walker ( module ) : """Defines a generator used to walk into modules . : param module : Module to walk . : type module : ModuleType : return : Class / Function / Method . : rtype : object or object"""
for name , function in inspect . getmembers ( module , inspect . isfunction ) : yield None , function for name , cls in inspect . getmembers ( module , inspect . isclass ) : yield cls , None for name , method in inspect . getmembers ( cls , inspect . ismethod ) : yield cls , method for name , fu...
def create_background ( bg_type , fafile , outfile , genome = "hg18" , width = 200 , nr_times = 10 , custom_background = None ) : """Create background of a specific type . Parameters bg _ type : str Name of background type . fafile : str Name of input FASTA file . outfile : str Name of output FASTA fi...
width = int ( width ) config = MotifConfig ( ) fg = Fasta ( fafile ) if bg_type in [ "genomic" , "gc" ] : if not genome : logger . error ( "Need a genome to create background" ) sys . exit ( 1 ) if bg_type == "random" : f = MarkovFasta ( fg , k = 1 , n = nr_times * len ( fg ) ) logger . debu...
def __try_read_record ( self ) : """Try reading a record . Returns : ( data , record _ type ) tuple . Raises : EOFError : when end of file was reached . InvalidRecordError : when valid record could not be read ."""
block_remaining = _BLOCK_SIZE - self . __reader . tell ( ) % _BLOCK_SIZE if block_remaining < _HEADER_LENGTH : return ( '' , _RECORD_TYPE_NONE ) header = self . __reader . read ( _HEADER_LENGTH ) if len ( header ) != _HEADER_LENGTH : raise EOFError ( 'Read %s bytes instead of %s' % ( len ( header ) , _HEADER_LE...
def traverse_one ( self , attribute , source , target , visitor ) : """: param source : source data proxy : type source : instance of ` DataTraversalProxy ` or None : param target : target data proxy : type target : instance of ` DataTraversalProxy ` or None"""
if __debug__ : self . __log_traverse_one ( self . __trv_path , attribute , source , target ) prx = source or target rel_op = RELATION_OPERATIONS . check ( source , target ) if prx . do_traverse ( ) and ( rel_op == prx . relation_operation or attribute is None ) : for attr in prx . get_relationship_attributes ( ...
def validate ( self , obj ) : """Base validation method . Will inspect class attributes to dermine just what should be present"""
if 'tags' in obj and not isinstance ( obj [ 'tags' ] , list ) : raise aomi_excep . Validation ( 'tags must be a list' ) if self . present : check_obj ( self . required_fields , self . name ( ) , obj )
def _postprocess_output ( self , output ) : '''Performs the last modifications before the output is returned .'''
# Replace long vowels with circumflex characters . if self . vowel_style == CIRCUMFLEX_STYLE : try : output = output . translate ( vowels_to_circumflexes ) except TypeError : # Python 2 will error out here if there are no # macron characters in the string to begin with . pass # Output the de...
def _FindPartition ( self , key ) : """Finds the partition from the byte array representation of the partition key ."""
hash_value = self . hash_generator . ComputeHash ( key ) return self . _LowerBoundSearch ( self . partitions , hash_value )
def get_post ( self , slug ) : """This method returns a single post by slug"""
cache_key = self . get_cache_key ( post_slug = slug ) content = cache . get ( cache_key ) if not content : post = Post . objects . get ( slug = slug ) content = self . _format ( post ) cache_duration = conf . GOSCALE_CACHE_DURATION if post else 1 cache . set ( cache_key , content , cache_duration ) retu...
def _filter ( self , data ) : """Apply a filter to reduce noisy data . Return the median value of a heap of data ."""
filtered_data = [ ] for queue , data in zip ( self . _raw_data_queues , data ) : queue . append ( data ) filtered_data . append ( numpy . median ( queue ) ) return filtered_data
def csetLog_maintenance ( self , please_stop = None ) : '''Handles deleting old csetLog entries and timestamping revisions once they pass the length for permanent storage for deletion later . : param please _ stop : : return :'''
while not please_stop : try : # Wait until something signals the maintenance cycle # to begin ( or end ) . ( self . maintenance_signal | please_stop ) . wait ( ) if please_stop : break if self . disable_maintenance : continue # Reset signal so we don ' t r...
def _dequeue_batch ( self ) -> Optional [ Batch ] : """Return a single batch from queue or ` ` None ` ` signaling epoch end . : raise ChildProcessError : if the enqueueing thread ended unexpectedly"""
if self . _enqueueing_thread is None : raise ValueError ( 'StreamWrapper `{}` with buffer of size `{}` was used outside with-resource environment.' . format ( self . _name , self . _buffer_size ) ) if not self . _enqueueing_thread . is_alive ( ) and self . _queue . empty ( ) : self . _start_thread ( ) while Tru...
def create_operator ( operator , auth , url , headers = HEADERS ) : """Function takes input of dictionary operator with the following keys operator = { " fullName " : " " , " sessionTimeout " : " " , " password " : " " , " operatorGroupId " : " " , " name " : " " , " desc " : " " , " defaultAcl " : " ...
create_operator_url = '/imcrs/plat/operator' f_url = url + create_operator_url payload = json . dumps ( operator , indent = 4 ) # creates the URL using the payload variable as the contents r = requests . post ( f_url , data = payload , auth = auth , headers = headers ) try : if r . status_code == 409 : # print ( " ...
def rebuild ( self ) : """Rebuilds the grid lines based on the current settings and scene width . This method is triggered automatically , and shouldn ' t need to be manually called ."""
rect = self . sceneRect ( ) x = rect . left ( ) y = rect . top ( ) w = rect . width ( ) h = rect . height ( ) # calculate background gridlines cx = x + ( w / 2 ) cy = y + ( h / 2 ) self . _centerLines = [ QLine ( cx , rect . top ( ) , cx , rect . bottom ( ) ) , QLine ( rect . left ( ) , cy , rect . right ( ) , cy ) ] #...
def valid_index ( index , shape ) -> tuple : """Get a valid index for a broadcastable shape . Parameters index : tuple Given index . shape : tuple of int Shape . Returns tuple Valid index ."""
# append slices to index index = list ( index ) while len ( index ) < len ( shape ) : index . append ( slice ( None ) ) # fill out , in reverse out = [ ] for i , s in zip ( index [ : : - 1 ] , shape [ : : - 1 ] ) : if s == 1 : if isinstance ( i , slice ) : out . append ( slice ( None ) ) ...
def round ( self , ndigits = 0 ) : """Rounds the amount using the current ` ` Decimal ` ` rounding algorithm ."""
if ndigits is None : ndigits = 0 return self . __class__ ( amount = self . amount . quantize ( Decimal ( '1e' + str ( - ndigits ) ) ) , currency = self . currency )
def get_rates_from_response_headers ( headers ) : """Returns a namedtuple with values for short - and long usage and limit rates found in provided HTTP response headers : param headers : HTTP response headers : type headers : dict : return : namedtuple with request rates or None if no rate - limit headers pre...
try : usage_rates = [ int ( v ) for v in headers [ 'X-RateLimit-Usage' ] . split ( ',' ) ] limit_rates = [ int ( v ) for v in headers [ 'X-RateLimit-Limit' ] . split ( ',' ) ] return RequestRate ( short_usage = usage_rates [ 0 ] , long_usage = usage_rates [ 1 ] , short_limit = limit_rates [ 0 ] , long_limit...
def _get_timethresh_heuristics ( self ) : """resonably decent hueristics for how much time to wait before updating progress ."""
if self . length > 1E5 : time_thresh = 2.5 elif self . length > 1E4 : time_thresh = 2.0 elif self . length > 1E3 : time_thresh = 1.0 else : time_thresh = 0.5 return time_thresh
def prepare_mac_header ( token , uri , key , http_method , nonce = None , headers = None , body = None , ext = '' , hash_algorithm = 'hmac-sha-1' , issue_time = None , draft = 0 ) : """Add an ` MAC Access Authentication ` _ signature to headers . Unlike OAuth 1 , this HMAC signature does not require inclusion of ...
http_method = http_method . upper ( ) host , port = utils . host_from_uri ( uri ) if hash_algorithm . lower ( ) == 'hmac-sha-1' : h = hashlib . sha1 elif hash_algorithm . lower ( ) == 'hmac-sha-256' : h = hashlib . sha256 else : raise ValueError ( 'unknown hash algorithm' ) if draft == 0 : nonce = nonce...
def check_ffprobe ( cls ) : """Check whether ` ` ffprobe ` ` can be called . Return ` ` True ` ` on failure and ` ` False ` ` on success . : rtype : bool"""
try : from aeneas . ffprobewrapper import FFPROBEWrapper file_path = gf . absolute_path ( u"tools/res/audio.mp3" , __file__ ) prober = FFPROBEWrapper ( ) properties = prober . read_properties ( file_path ) gf . print_success ( u"ffprobe OK" ) return False except : pass gf . print_erro...
def _next_move_direction ( self ) : """pick a move at random from the list of moves"""
nmoves = len ( self . moves ) move = np . random . randint ( 1 , nmoves + 1 ) while self . prev_move == ( move + 3 ) % nmoves : move = np . random . randint ( 1 , nmoves + 1 ) self . prev_move = move return np . array ( self . moves [ move ] )
def _model_for_CLASS ( self , name , definition ) : """Model a Swagger definition that is like a Python class . : param unicode name : The name of the definition from the specification . : param pyrsistent . PMap definition : A Swagger definition to categorize . This will be a value like the one found at ...
return _ClassModel . from_swagger ( self . pclass_for_definition , name , definition , )
def proxy_for ( widget ) : """Create a proxy for a Widget : param widget : A gtk . Widget to proxy This will raise a KeyError if there is no proxy type registered for the widget type ."""
proxy_type = widget_proxies . get ( widget . __class__ ) if proxy_type is None : raise KeyError ( 'There is no proxy type registered for %r' % widget ) return proxy_type ( widget )
def auth_aliases ( d ) : """Interpret user / password aliases ."""
for alias , real in ( ( USER_KEY , "readonly_user" ) , ( PASS_KEY , "readonly_password" ) ) : if alias in d : d [ real ] = d [ alias ] del d [ alias ]
def write ( self , message , flush = True ) : """Function : write Summary : write method on the default stream Examples : > > > stream . write ( ' message ' ) ' message ' Attributes : @ param ( message ) : str - like content to send on stream @ param ( flush ) default = True : flush the stdout after wri...
self . stream . write ( message ) if flush : self . stream . flush ( )
def cc ( self , args = None , ret_val = None , sp_delta = None , func_ty = None ) : """Return a SimCC ( calling convention ) parametrized for this project and , optionally , a given function . : param args : A list of argument storage locations , as SimFunctionArguments . : param ret _ val : The return value st...
return self . _default_cc ( arch = self . project . arch , args = args , ret_val = ret_val , sp_delta = sp_delta , func_ty = func_ty )
def str2midi ( note_string ) : """Given a note string name ( e . g . " Bb4 " ) , returns its MIDI pitch number ."""
if note_string == "?" : return nan data = note_string . strip ( ) . lower ( ) name2delta = { "c" : - 9 , "d" : - 7 , "e" : - 5 , "f" : - 4 , "g" : - 2 , "a" : 0 , "b" : 2 } accident2delta = { "b" : - 1 , "#" : 1 , "x" : 2 } accidents = list ( it . takewhile ( lambda el : el in accident2delta , data [ 1 : ] ) ) octa...
def known_dists ( ) : '''Return a list of all Distributions exporting udata . * entrypoints'''
return ( dist for dist in pkg_resources . working_set if any ( k in ENTRYPOINTS for k in dist . get_entry_map ( ) . keys ( ) ) )
def _adjust_probability_vec_best ( population , fitnesses , probability_vec , adjust_rate ) : """Shift probabilities towards the best solution ."""
best_solution = max ( zip ( fitnesses , population ) ) [ 1 ] # Shift probabilities towards best solution return _adjust ( probability_vec , best_solution , adjust_rate )
def run_xenon ( workflow , * , machine , worker_config , n_processes , deref = False , verbose = False ) : """Run the workflow using a number of online Xenon workers . : param workflow : | Workflow | or | PromisedObject | to evaluate . : param machine : The | Machine | instance . : param worker _ config : Con...
dynamic_pool = DynamicPool ( machine ) for i in range ( n_processes ) : cfg = copy ( worker_config ) cfg . name = 'xenon-{0:02}' . format ( i ) dynamic_pool . add_xenon_worker ( cfg ) job_keeper = JobKeeper ( ) S = Scheduler ( job_keeper = job_keeper , verbose = verbose ) result = S . run ( dynamic_pool , g...
def _calc_font_size ( self , win_wd ) : """Heuristic to calculate an appropriate font size based on the width of the viewer window . Parameters win _ wd : int The width of the viewer window . Returns font _ size : int Approximately appropriate font size in points"""
font_size = 4 if win_wd >= 1600 : font_size = 24 elif win_wd >= 1000 : font_size = 18 elif win_wd >= 800 : font_size = 16 elif win_wd >= 600 : font_size = 14 elif win_wd >= 500 : font_size = 12 elif win_wd >= 400 : font_size = 11 elif win_wd >= 300 : font_size = 10 elif win_wd >= 250 : f...
def cached_request ( self , request ) : """Return a cached response if it exists in the cache , otherwise return False ."""
cache_url = self . cache_url ( request . url ) logger . debug ( 'Looking up "%s" in the cache' , cache_url ) cc = self . parse_cache_control ( request . headers ) # Bail out if the request insists on fresh data if "no-cache" in cc : logger . debug ( 'Request header has "no-cache", cache bypassed' ) return False...
def construct_multi_parameter_validators ( parameters , context ) : """Given an iterable of parameters , returns a dictionary of validator functions for each parameter . Note that this expects the parameters to be unique in their name value , and throws an error if this is not the case ."""
validators = ValidationDict ( ) for parameter in parameters : key = parameter [ 'name' ] if key in validators : raise ValueError ( "Duplicate parameter name {0}" . format ( key ) ) parameter_validators = construct_parameter_validators ( parameter , context = context ) validators . add_validator ...
def _set_key_table ( self , v , load = False ) : """Setter method for key _ table , mapped from YANG variable / interface / fortygigabitethernet / ip / interface _ fo _ ospf _ conf / ospf _ interface _ config / md5 _ authentication / key _ table ( container ) If this variable is read - only ( config : false ) in ...
if hasattr ( v , "_utype" ) : v = v . _utype ( v ) try : t = YANGDynClass ( v , base = key_table . key_table , is_container = 'container' , presence = False , yang_name = "key-table" , rest_name = "key-id" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = T...
def convert_type_list_elements ( list_object = None , element_type = str ) : """Recursively convert all elements and all elements of all sublists of a list to a specified type and return the new list ."""
if element_type is str : return [ str ( element ) if not isinstance ( element , list ) else convert_type_list_elements ( list_object = element , element_type = str ) for element in list_object ]
def clear ( ) : """Clears the console ."""
if sys . platform . startswith ( "win" ) : call ( "cls" , shell = True ) else : call ( "clear" , shell = True )
def atq ( tag = None ) : '''List all queued and running jobs or only those with an optional ' tag ' . CLI Example : . . code - block : : bash salt ' * ' at . atq salt ' * ' at . atq [ tag ] salt ' * ' at . atq [ job number ]'''
jobs = [ ] # Shim to produce output similar to what _ _ virtual _ _ ( ) should do # but _ _ salt _ _ isn ' t available in _ _ virtual _ _ ( ) # Tested on CentOS 5.8 if __grains__ [ 'os_family' ] == 'RedHat' : output = _cmd ( 'at' , '-l' ) else : output = _cmd ( 'atq' ) if output is None : return '\'at.atq\'...
def validate_implementation_for_auto_decode_and_soupify ( func ) : """Validate that : func : ` auto _ decode _ and _ soupify ` is applicable to this function . If not applicable , a ` ` NotImplmentedError ` ` will be raised ."""
arg_spec = inspect . getargspec ( func ) for arg in [ "response" , "html" , "soup" ] : if arg not in arg_spec . args : raise NotImplementedError ( ( "{func} method has to take the keyword syntax input: " "{arg}" ) . format ( func = func , arg = arg ) )
def generateNodeDocuments ( self ) : '''Creates all of the reStructuredText documents related to types parsed by Doxygen . This includes all leaf - like documents ( ` ` class ` ` , ` ` struct ` ` , ` ` enum ` ` , ` ` typedef ` ` , ` ` union ` ` , ` ` variable ` ` , and ` ` define ` ` ) , as well as namespace ...
# initialize all of the nodes first for node in self . all_nodes : self . initializeNodeFilenameAndLink ( node ) self . adjustFunctionTitles ( ) # now that all potential ` ` node . link _ name ` ` members are initialized , generate # the leaf - like documents for node in self . all_nodes : if node . kind in uti...
def RandomUniformInt ( shape , minval , maxval , seed ) : """Random uniform int op ."""
if seed : np . random . seed ( seed ) return np . random . randint ( minval , maxval , size = shape ) ,
def _set_bd_add ( self , v , load = False ) : """Setter method for bd _ add , mapped from YANG variable / routing _ system / evpn _ config / evpn / evpn _ instance / bridge _ domain / bd _ add ( container ) If this variable is read - only ( config : false ) in the source YANG file , then _ set _ bd _ add is con...
if hasattr ( v , "_utype" ) : v = v . _utype ( v ) try : t = YANGDynClass ( v , base = bd_add . bd_add , is_container = 'container' , presence = False , yang_name = "bd-add" , rest_name = "" , parent = self , path_helper = self . _path_helper , extmethods = self . _extmethods , register_paths = True , extension...
def evaluate ( self , pipeline_key , purpose = None , attempt = 0 ) : """Evaluates the given Pipeline and enqueues sub - stages for execution . Args : pipeline _ key : The db . Key or stringified key of the _ PipelineRecord to run . purpose : Why evaluate was called ( ' start ' , ' finalize ' , or ' abort ' )...
After . _thread_init ( ) InOrder . _thread_init ( ) InOrder . _local . _activated = False if not isinstance ( pipeline_key , db . Key ) : pipeline_key = db . Key ( pipeline_key ) pipeline_record = db . get ( pipeline_key ) if pipeline_record is None : logging . error ( 'Pipeline ID "%s" does not exist.' , pipel...
def prepare_editable_requirement ( self , req , # type : InstallRequirement require_hashes , # type : bool use_user_site , # type : bool finder # type : PackageFinder ) : # type : ( . . . ) - > DistAbstraction """Prepare an editable requirement"""
assert req . editable , "cannot prepare a non-editable req as editable" logger . info ( 'Obtaining %s' , req ) with indent_log ( ) : if require_hashes : raise InstallationError ( 'The editable requirement %s cannot be installed when ' 'requiring hashes, because there is no single file to ' 'hash.' % req ) ...
def SetGaugeCallback ( self , metric_name , callback , fields = None ) : """See base class ."""
self . _gauge_metrics [ metric_name ] . SetCallback ( callback , fields )
def check_column_id ( problems : List , table : str , df : DataFrame , column : str , * , column_required : bool = True , ) -> List : """A specialization of : func : ` check _ column ` . Parameters problems : list A four - tuple containing 1 . A problem type ( string ) equal to ` ` ' error ' ` ` or ` ` ' wa...
f = df . copy ( ) if not column_required : if column not in f . columns : f [ column ] = np . nan f = f . dropna ( subset = [ column ] ) cond = ~ f [ column ] . map ( valid_str ) problems = check_table ( problems , table , f , cond , f"Invalid {column}; maybe has extra space characters" , ) cond = f [ c...
def get_rprof ( step , var ) : """Extract or compute and rescale requested radial profile . Args : step ( : class : ` ~ stagpy . stagyydata . _ Step ` ) : a step of a StagyyData instance . var ( str ) : radial profile name , a key of : data : ` stagpy . phyvars . RPROF ` or : data : ` stagpy . phyvars . R...
if var in step . rprof . columns : rprof = step . rprof [ var ] rad = None if var in phyvars . RPROF : meta = phyvars . RPROF [ var ] else : meta = phyvars . Varr ( var , None , '1' ) elif var in phyvars . RPROF_EXTRA : meta = phyvars . RPROF_EXTRA [ var ] rprof , rad = meta . de...
def interpolate2dStructuredFastIDW ( grid , mask , kernel = 15 , power = 2 , minnvals = 5 ) : '''FASTER IMPLEMENTATION OF interpolate2dStructuredIDW replace all values in [ grid ] indicated by [ mask ] with the inverse distance weighted interpolation of all values within px + - kernel [ power ] - > distance...
indices , dist = growPositions ( kernel ) weights = 1 / dist ** ( 0.5 * power ) return _calc ( grid , mask , indices , weights , minnvals - 1 )
def sample_u ( self , q ) : r"""Extract a sample from random variates uniform on : math : ` [ 0 , 1 ] ` . For a univariate distribution , this is simply evaluating the inverse CDF . To facilitate efficient sampling , this function returns a * vector * of PPF values , one value for each variable . Basically , ...
q = scipy . atleast_1d ( q ) if len ( q ) != len ( self . univariate_priors ) : raise ValueError ( "length of q must equal the number of parameters!" ) if q . ndim != 1 : raise ValueError ( "q must be one-dimensional!" ) if ( q < 0 ) . any ( ) or ( q > 1 ) . any ( ) : raise ValueError ( "q must be within [0...
def alter_partition_with_environment_context ( self , db_name , tbl_name , new_part , environment_context ) : """Parameters : - db _ name - tbl _ name - new _ part - environment _ context"""
self . send_alter_partition_with_environment_context ( db_name , tbl_name , new_part , environment_context ) self . recv_alter_partition_with_environment_context ( )
def option_parser ( ) : """Option Parser to give various options ."""
usage = ''' $ ./crawler -d5 <url> Here in this case it goes till depth of 5 and url is target URL to start crawling. ''' version = "2.0.0" parser = optparse . OptionParser ( usage = usage , version = version ) parser . add_option ( "-l" , "--links" , action = "st...
def convert_audio ( self , file_path , new_mimetype , overwrite = False ) : '''a method to convert an audio file into a different codec : param file _ path : string with path to file on localhost : param new _ mimetype : string with mimetype for new file : param overwrite : [ optional ] boolean to overwrite e...
title = '%s.convert_audio' % self . __class__ . __name__ # validate inputs input_fields = { 'file_path' : file_path , 'new_mimetype' : new_mimetype } for key , value in input_fields . items ( ) : object_title = '%s(%s=%s)' % ( title , key , str ( value ) ) self . fields . validate ( value , '.%s' % key , object...
def target_gene_indices ( gene_names , target_genes ) : """: param gene _ names : list of gene names . : param target _ genes : either int ( the top n ) , ' all ' , or a collection ( subset of gene _ names ) . : return : the ( column ) indices of the target genes in the expression _ matrix ."""
if isinstance ( target_genes , list ) and len ( target_genes ) == 0 : return [ ] if isinstance ( target_genes , str ) and target_genes . upper ( ) == 'ALL' : return list ( range ( len ( gene_names ) ) ) elif isinstance ( target_genes , int ) : top_n = target_genes assert top_n > 0 return list ( rang...
async def _grab_connection ( self , url ) : '''The connection pool handler . Returns a connection to the caller . If there are no connections ready , and as many connections checked out as there are available total , we yield control to the event loop . If there is a connection ready or space to create a ne...
scheme , host , _ , _ , _ , _ = urlparse ( url ) host_loc = urlunparse ( ( scheme , host , '' , '' , '' , '' ) ) sock = self . _checkout_connection ( host_loc ) if sock is None : sock = await self . _make_connection ( host_loc ) return sock
def _sqla_postgresql ( self , uri , version = None , isolation_level = "READ COMMITTED" ) : '''expected uri form : postgresql + psycopg2 : / / % s : % s @ % s : % s / % s ' % ( username , password , host , port , db )'''
isolation_level = isolation_level or "READ COMMITTED" kwargs = dict ( isolation_level = isolation_level ) # FIXME : version of postgresql < 9.2 don ' t have pg . JSON ! # check and use JSONTypedLite instead # override default dict and list column types types = { list : pg . ARRAY , tuple : pg . ARRAY , set : pg . ARRAY...
def _flush ( self ) : """Flushes all registered consumer streams ."""
for consumer in self . consumers : if not getattr ( consumer , "closed" , False ) : consumer . flush ( )
def file_list ( load ) : '''Return a list of all files on the file server in a specified environment'''
if 'env' in load : # " env " is not supported ; Use " saltenv " . load . pop ( 'env' ) if load [ 'saltenv' ] not in envs ( ) : return [ ] mountpoint = salt . utils . url . strip_proto ( __opts__ [ 'minionfs_mountpoint' ] ) prefix = load . get ( 'prefix' , '' ) . strip ( '/' ) if mountpoint and prefix . startswi...
def fancy_error_template_middleware ( app ) : """WGSI middleware for catching errors and rendering the error page ."""
def application ( environ , start_response ) : try : return app ( environ , start_response ) except Exception as exc : sio = StringIO ( ) traceback . print_exc ( file = sio ) sio . seek ( 0 ) response = Response ( status = 500 , body = render_error_page ( 500 , exc , trac...
def main ( input_filename , format ) : """Calculate the fingerprint hashses of the referenced audio file and save to disk as a pickle file"""
# open the file & convert to wav song_data = AudioSegment . from_file ( input_filename , format = format ) song_data = song_data . set_channels ( 1 ) # convert to mono wav_tmp = song_data . export ( format = "wav" ) # write to a tmp file buffer wav_tmp . seek ( 0 ) rate , wav_data = wavfile . read ( wav_tmp ) rows_per_...
def cl_picard ( self , command , options , memscale = None ) : """Prepare a Picard commandline ."""
options = [ "%s=%s" % ( x , y ) for x , y in options ] options . append ( "VALIDATION_STRINGENCY=SILENT" ) return self . _get_picard_cmd ( command , memscale = memscale ) + options
def __type2python ( cls , value ) : """: Description : Convert javascript value to python value by type . : param value : Value to transform . : type value : None , bool , int , float , string : return : None , bool , int , float , string"""
if isinstance ( value , string_types ) : if value is 'null' : return None elif value in ( 'true' , 'false' ) : return False if value == 'false' else True elif value . replace ( '.' , '' , 1 ) . isdigit ( ) : return eval ( value ) elif value . startswith ( '{' ) and value . endswi...
def name ( self ) : """Cluster name used in requests . . . note : : This property will not change if ` ` _ instance ` ` and ` ` cluster _ id ` ` do not , but the return value is not cached . For example : . . literalinclude : : snippets . py : start - after : [ START bigtable _ cluster _ name ] : end ...
return self . _instance . _client . instance_admin_client . cluster_path ( self . _instance . _client . project , self . _instance . instance_id , self . cluster_id )
def action_run_command ( self ) : """Run a shortcut , if exists"""
name = self . args [ '<name>' ] # get entry from DB self . db_query ( ''' SELECT path,command FROM shortcuts WHERE name=? ''' , ( name , ) ) row = self . db_fetch_one ( ) if row is None : print_err ( 'Shortcut "%s" does not exist.' % name ) return path = row [ 0 ] cmd = row [ 1 ] # show ...
def _move_files_to_compute ( compute , project_id , directory , files_path ) : """Move the files to a remote compute"""
location = os . path . join ( directory , files_path ) if os . path . exists ( location ) : for ( dirpath , dirnames , filenames ) in os . walk ( location ) : for filename in filenames : path = os . path . join ( dirpath , filename ) dst = os . path . relpath ( path , directory ) ...
def get_remote_etag ( storage , prefixed_path ) : """Get etag of path from S3 using boto or boto3."""
normalized_path = safe_join ( storage . location , prefixed_path ) . replace ( '\\' , '/' ) try : return storage . bucket . get_key ( normalized_path ) . etag except AttributeError : pass try : return storage . bucket . Object ( normalized_path ) . e_tag except : pass return None
def dump_br_version ( project ) : """Dump an enhanced version json including - The version from package . json - The current branch ( if it can be found ) - The current sha"""
normalized = get_version ( project ) sha = subprocess . check_output ( [ 'git' , 'rev-parse' , 'HEAD' ] , cwd = HERE ) . strip ( ) branch = subprocess . check_output ( [ 'git' , 'rev-parse' , '--abbrev-ref' , 'HEAD' ] , cwd = HERE ) . strip ( ) pref = br_version_prefixes [ project ] return json . dumps ( { pref + '_ver...
def run ( name , cmd , container_type = None , exec_driver = None , output = None , no_start = False , stdin = None , python_shell = True , output_loglevel = 'debug' , ignore_retcode = False , path = None , use_vt = False , keep_env = None ) : '''Common logic for running shell commands in containers path path t...
valid_output = ( 'stdout' , 'stderr' , 'retcode' , 'all' ) if output is None : cmd_func = 'cmd.run' elif output not in valid_output : raise SaltInvocationError ( '\'output\' param must be one of the following: {0}' . format ( ', ' . join ( valid_output ) ) ) else : cmd_func = 'cmd.run_all' if keep_env is No...
def reset ( self ) : """Stops the timer and resets its values to 0."""
self . _elapsed = datetime . timedelta ( ) self . _delta = datetime . timedelta ( ) self . _starttime = datetime . datetime . now ( ) self . refresh ( )
def load ( self , txt_fst_filename ) : """Save the transducer in the text file format of OpenFST . The format is specified as follows : arc format : src dest ilabel olabel [ weight ] final state format : state [ weight ] lines may occur in any order except initial state must be first line Args : txt _ f...
with open ( txt_fst_filename , 'r' ) as txt_fst : for line in txt_fst : line = line . strip ( ) splitted_line = line . split ( ) if len ( splitted_line ) == 1 : self [ int ( splitted_line [ 0 ] ) ] . final = True else : self . add_arc ( int ( splitted_line [ 0...
def pauli_basis ( nq = 1 ) : """Returns a TomographyBasis for the Pauli basis on ` ` nq ` ` qubits . : param int nq : Number of qubits on which the returned basis is defined ."""
basis = tensor_product_basis ( * [ TomographyBasis ( gell_mann_basis ( 2 ) . data [ [ 0 , 2 , 3 , 1 ] ] , [ 2 ] , [ u'𝟙' , r'\sigma_x' , r'\sigma_y' , r'\sigma_z' ] ) ] * nq ) basis . _name = 'pauli_basis' return basis
def _soln2str ( self , soln , fancy = False ) : """Convert a Sudoku solution point to a string ."""
chars = list ( ) for r in range ( 1 , 10 ) : for c in range ( 1 , 10 ) : if fancy and c in ( 4 , 7 ) : chars . append ( "|" ) chars . append ( self . _get_val ( soln , r , c ) ) if fancy and r != 9 : chars . append ( "\n" ) if r in ( 3 , 6 ) : chars . appe...
def mkdir ( self , astr_dirSpec ) : """Given an < astr _ dirSpec > in form ' / a / b / c / d / . . . / f ' , create that path in the internal stree , creating all intermediate nodes as necessary : param astr _ dirSpec : : return :"""
if astr_dirSpec != '/' and astr_dirSpec != "//" : str_currentPath = self . cwd ( ) l_pathSpec = astr_dirSpec . split ( '/' ) if not len ( l_pathSpec [ 0 ] ) : self . cd ( '/' ) l_nodesDepth = l_pathSpec [ 1 : ] else : l_nodesDepth = l_pathSpec for d in l_nodesDepth : ...
def get_workspace_config ( namespace , workspace , cnamespace , config ) : """Get method configuration in workspace . Args : namespace ( str ) : project to which workspace belongs workspace ( str ) : Workspace name cnamespace ( str ) : Config namespace config ( str ) : Config name Swagger : https : / ...
uri = "workspaces/{0}/{1}/method_configs/{2}/{3}" . format ( namespace , workspace , cnamespace , config ) return __get ( uri )
def remove_response_property ( xml_root ) : """Removes response properties if exist ."""
if xml_root . tag == "testsuites" : properties = xml_root . find ( "properties" ) resp_properties = [ ] for prop in properties : prop_name = prop . get ( "name" , "" ) if "polarion-response-" in prop_name : resp_properties . append ( prop ) for resp_property in resp_propertie...
def _write_cache ( self , lines , append = False ) : """Write virtualenv metadata to cache ."""
mode = 'at' if append else 'wt' with open ( self . filepath , mode , encoding = 'utf8' ) as fh : fh . writelines ( line + '\n' for line in lines )
def get_position ( self , row , col ) : """Get char position in all the text from row and column ."""
result = col self . log . debug ( '%s %s' , row , col ) lines = self . editor . getlines ( ) [ : row - 1 ] result += sum ( [ len ( l ) + 1 for l in lines ] ) self . log . debug ( result ) return result
def max_substring ( words , position = 0 , _last_letter = '' ) : """Finds max substring shared by all strings starting at position Args : words ( list ) : list of unicode of all words to compare position ( int ) : starting position in each word to begin analyzing for substring _ last _ letter ( unicode ) ...
# If end of word is reached , begin reconstructing the substring try : letter = [ word [ position ] for word in words ] except IndexError : return _last_letter # Recurse if position matches , else begin reconstructing the substring if all ( l == letter [ 0 ] for l in letter ) is True : _last_letter += max_s...