signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def version ( self , value ) : """Version setter ."""
self . bytearray [ self . _get_slicers ( 1 ) ] = bytearray ( c_uint8 ( value or 0 ) )
def _set_dict_value_by_path ( d , val , path ) : """Given a dict ( ` ` d ` ` ) , a value ( ` ` val ` ` ) , and a list specifying the hierarchical path to a key in that dict ( ` ` path ` ` ) , set the value in ` ` d ` ` at ` ` path ` ` to ` ` val ` ` . : param d : the dict to search in : type d : dict : pa...
tmp_path = deepcopy ( path ) tmp_d = deepcopy ( d ) result = tmp_d while len ( tmp_path ) > 0 : if len ( tmp_path ) == 1 : result [ tmp_path [ 0 ] ] = val break k = tmp_path . pop ( 0 ) result = result [ k ] return tmp_d
def get_port_profile_for_vlan ( self , vlan_id , device_id ) : """Returns Vlan id associated with the port profile ."""
entry = self . session . query ( ucsm_model . PortProfile ) . filter_by ( vlan_id = vlan_id , device_id = device_id ) . first ( ) return entry . profile_id if entry else None
def upgrade ( options = None , fatal = False , dist = False ) : """Upgrade all packages ."""
cmd = [ 'yum' , '--assumeyes' ] if options is not None : cmd . extend ( options ) cmd . append ( 'upgrade' ) log ( "Upgrading with options: {}" . format ( options ) ) _run_yum_command ( cmd , fatal )
def get ( self , name , * df ) : """Get the value of a property by I { name } . @ param name : The property name . @ type name : str @ param df : An optional value to be returned when the value is not set @ type df : [ 1 ] . @ return : The stored value , or I { df [ 0 ] } if not set . @ rtype : any"""
return self . provider ( name ) . __get ( name , * df )
def list_folders ( kwargs = None , call = None ) : '''List all the folders for this VMware environment CLI Example : . . code - block : : bash salt - cloud - f list _ folders my - vmware - config'''
if call != 'function' : raise SaltCloudSystemExit ( 'The list_folders function must be called with ' '-f or --function.' ) return { 'Folders' : salt . utils . vmware . list_folders ( _get_si ( ) ) }
def get_std_start_date ( self ) : """If the date is custom , return the start datetime with the format % Y - % m - % d % H : % M : % S . Else , returns " " ."""
first , _ = self . _val if first != datetime . min and first != datetime . max : return first . strftime ( "%Y-%m-%d %H:%M:%S" ) else : return ""
def do_transform ( self ) : """Apply the transformation ( if it exists ) to the latest _ value"""
if not self . transform : return try : self . latest_value = utils . Transform ( expr = self . transform , value = self . latest_value , timedelta = self . time_between_updates ( ) . total_seconds ( ) ) . result ( ) except ( TypeError , ValueError ) : logger . warn ( "Invalid transformation '%s' for metric ...
def load_hha ( self , idx ) : """Load HHA features from Gupta et al . ECCV14. See https : / / github . com / s - gupta / rcnn - depth / blob / master / rcnn / saveHHA . m"""
im = Image . open ( '{}/data/hha/img_{}.png' . format ( self . nyud_dir , idx ) ) hha = np . array ( im , dtype = np . float32 ) hha -= self . mean_hha hha = hha . transpose ( ( 2 , 0 , 1 ) ) return hha
def funname ( file ) : """Return variable names from file names ."""
if isinstance ( file , str ) : files = [ file ] else : files = file bases = [ os . path . basename ( f ) for f in files ] names = [ os . path . splitext ( b ) [ 0 ] for b in bases ] if isinstance ( file , str ) : return names [ 0 ] else : return names
def inference ( self , dataRDD , feed_timeout = 600 , qname = 'input' ) : """* For InputMode . SPARK only * : Feeds Spark RDD partitions into the TensorFlow worker nodes and returns an RDD of results It is the responsibility of the TensorFlow " main " function to interpret the rows of the RDD and provide valid da...
logging . info ( "Feeding inference data" ) assert self . input_mode == InputMode . SPARK , "TFCluster.inference() requires InputMode.SPARK" assert qname in self . queues , "Unknown queue: {}" . format ( qname ) return dataRDD . mapPartitions ( TFSparkNode . inference ( self . cluster_info , feed_timeout = feed_timeout...
def from_json ( cls , data , result = None ) : """Create new Relation element from JSON data : param data : Element data from JSON : type data : Dict : param result : The result this element belongs to : type result : overpy . Result : return : New instance of Relation : rtype : overpy . Relation : ra...
if data . get ( "type" ) != cls . _type_value : raise exception . ElementDataWrongType ( type_expected = cls . _type_value , type_provided = data . get ( "type" ) ) tags = data . get ( "tags" , { } ) rel_id = data . get ( "id" ) ( center_lat , center_lon ) = cls . get_center_from_json ( data = data ) members = [ ] ...
def _decode_sensor_data ( properties ) : """Decode , decompress , and parse the data from the history API"""
b64_input = "" for s in properties . get ( 'payload' ) : # pylint : disable = consider - using - join b64_input += s decoded = base64 . b64decode ( b64_input ) data = zlib . decompress ( decoded ) points = [ ] i = 0 while i < len ( data ) : points . append ( { 'timestamp' : int ( 1e3 * ArloBaseStation . _parse_...
def cigar ( self , x , rot = 0 , cond = 1e6 , noise = 0 ) : """Cigar test objective function"""
if rot : x = rotate ( x ) x = [ x ] if isscalar ( x [ 0 ] ) else x # scalar into list f = [ ( x [ 0 ] ** 2 + cond * sum ( x [ 1 : ] ** 2 ) ) * np . exp ( noise * np . random . randn ( 1 ) [ 0 ] / len ( x ) ) for x in x ] return f if len ( f ) > 1 else f [ 0 ] # 1 - element - list into scalar
def pvwatts_ac ( pdc , pdc0 , eta_inv_nom = 0.96 , eta_inv_ref = 0.9637 ) : r"""Implements NREL ' s PVWatts inverter model [ 1 ] _ . . . math : : \ eta = \ frac { \ eta _ { nom } } { \ eta _ { ref } } ( - 0.0162 \ zeta - \ frac { 0.0059 } { \ zeta } + 0.9858) . . math : : P _ { ac } = \ min ( \ eta P _ { dc...
pac0 = eta_inv_nom * pdc0 zeta = pdc / pdc0 # arrays to help avoid divide by 0 for scalar and array eta = np . zeros_like ( pdc , dtype = float ) pdc_neq_0 = ~ np . equal ( pdc , 0 ) # eta < 0 if zeta < 0.006 . pac is forced to be > = 0 below . GH 541 eta = eta_inv_nom / eta_inv_ref * ( - 0.0162 * zeta - np . divide ( ...
def _make_variant_locus_id ( gene_id , allele_id ) : """A convenience method to uniformly create variant loci . If we want to materialize these in the monarch space , then we wrap with the self . make _ id function . : param gene _ id : : param allele _ id : : return :"""
varloci = '-' . join ( ( gene_id , allele_id ) ) varloci = '_:' + re . sub ( r'(ZFIN)?:' , '' , varloci ) return varloci
def get_row_at_index ( self , index ) : """Returns a table row by it ' s index : param int index : the zero - indexed position of the row in the table"""
if index is None : return None url = self . build_url ( self . _endpoints . get ( 'get_row_index' ) ) response = self . session . post ( url , data = { 'index' : index } ) if not response : return None return self . row_constructor ( parent = self , ** { self . _cloud_data_key : response . json ( ) } )
async def delete ( self , force = False ) : '''Delete a node from the cortex . The following tear - down operations occur in order : * validate that you have permissions to delete the node * validate that you have permissions to delete all tags * validate that there are no remaining references to the node ....
formname , formvalu = self . ndef if self . isrunt : raise s_exc . IsRuntForm ( mesg = 'Cannot delete runt nodes' , form = formname , valu = formvalu ) tags = [ ( len ( t ) , t ) for t in self . tags . keys ( ) ] # check for tag permissions # TODO # check for any nodes which reference us . . . if not force : # refu...
def run_mesh ( self , mesh = 100.0 , shift = None , is_time_reversal = True , is_mesh_symmetry = True , with_eigenvectors = False , with_group_velocities = False , is_gamma_center = False ) : """Run mesh sampling phonon calculation . See the parameter details in Phonopy . init _ mesh ( ) ."""
self . init_mesh ( mesh = mesh , shift = shift , is_time_reversal = is_time_reversal , is_mesh_symmetry = is_mesh_symmetry , with_eigenvectors = with_eigenvectors , with_group_velocities = with_group_velocities , is_gamma_center = is_gamma_center ) self . _mesh . run ( )
def is_active ( cache , token ) : """Accepts the cache and ID token and checks to see if the profile is currently logged in . If so , return the token , otherwise throw a NotAuthenticatedException . : param cache : : param token : : return :"""
profile = cache . get ( token ) if not profile : raise exceptions . NotAuthenticatedException ( 'The token is good, but you are not logged in. Please ' 'try logging in again.' ) return profile
def create_glir_message ( commands , array_serialization = None ) : """Create a JSON - serializable message of GLIR commands . NumPy arrays are serialized according to the specified method . Arguments commands : list List of GLIR commands . array _ serialization : string or None Serialization method for...
# Default serialization method for NumPy arrays . if array_serialization is None : array_serialization = 'binary' # Extract the buffers . commands_modified , buffers = _extract_buffers ( commands ) # Serialize the modified commands ( with buffer pointers ) and the buffers . commands_serialized = [ _serialize_comman...
def show_warnings ( self ) : """SHOW WARNINGS"""
yield self . _execute_command ( COMMAND . COM_QUERY , "SHOW WARNINGS" ) result = MySQLResult ( self ) yield result . read ( ) raise gen . Return ( result . rows )
def update_topic_counter ( sender , topic , user , request , response , ** kwargs ) : """Handles the update of the views counter associated with topics ."""
topic . __class__ . _default_manager . filter ( id = topic . id ) . update ( views_count = F ( 'views_count' ) + 1 )
def decline_noun ( self , noun , gender , mimation = True ) : """Return a list of all possible declined forms given any form of a noun and its gender ."""
stem = self . stemmer . get_stem ( noun , gender ) declension = [ ] for case in self . endings [ gender ] [ 'singular' ] : if gender == 'm' : form = stem + self . endings [ gender ] [ 'singular' ] [ case ] else : form = stem + self . endings [ gender ] [ 'singular' ] [ case ] [ 1 : ] declens...
def weighted_sum ( groupe , var ) : '''Fonction qui calcule la moyenne pondérée par groupe d ' une variable'''
data = groupe [ var ] weights = groupe [ 'pondmen' ] return ( data * weights ) . sum ( )
def kldiv_cs_model ( prediction , fm ) : """Computes Chao - Shen corrected KL - divergence between prediction and fdm made from fixations in fm . Parameters : prediction : np . ndarray a fixation density map fm : FixMat object"""
# compute histogram of fixations needed for ChaoShen corrected kl - div # image category must exist ( > - 1 ) and image _ size must be non - empty assert ( len ( fm . image_size ) == 2 and ( fm . image_size [ 0 ] > 0 ) and ( fm . image_size [ 1 ] > 0 ) ) assert ( - 1 not in fm . category ) # check whether fixmat contai...
def reset_scan_stats ( self ) : """Clears the scan event statistics and updates the last reset time"""
self . _scan_event_count = 0 self . _v1_scan_count = 0 self . _v1_scan_response_count = 0 self . _v2_scan_count = 0 self . _device_scan_counts = { } self . _last_reset_time = time . time ( )
def _verify_dict_list ( self , values , keys , name ) : '''Validate a list of ` dict ` , ensuring it has specific keys and no others . : param values : A list of ` dict ` to validate . : param keys : A list of keys to validate each ` dict ` against . : param name : Name describing the values , to show in er...
keys = set ( keys ) name = name . title ( ) for value in values : if not isinstance ( value , Mapping ) : raise MessageError ( 'Invalid {0} value' . format ( name ) ) for key in keys : if key not in value : err = '{0} must contain "{1}"' raise MessageError ( err . format ...
def segmentlistdict_to_short_string ( seglists ) : """Return a string representation of a segmentlistdict object . Each segmentlist in the dictionary is encoded using to _ range _ strings ( ) with " , " used to delimit segments . The keys are converted to strings and paired with the string representations of ...
return "/" . join ( [ "%s=%s" % ( str ( key ) , "," . join ( to_range_strings ( value ) ) ) for key , value in seglists . items ( ) ] )
def get_dns_zone_ids ( env = 'dev' , facing = 'internal' ) : """Get Route 53 Hosted Zone IDs for _ env _ . Args : env ( str ) : Deployment environment . facing ( str ) : Type of ELB , external or internal . Returns : list : Hosted Zone IDs for _ env _ . Only * PrivateZone * when _ facing _ is internal ....
client = boto3 . Session ( profile_name = env ) . client ( 'route53' ) zones = client . list_hosted_zones_by_name ( DNSName = '.' . join ( [ env , DOMAIN ] ) ) zone_ids = [ ] for zone in zones [ 'HostedZones' ] : LOG . debug ( 'Found Hosted Zone: %s' , zone ) if facing == 'external' or zone [ 'Config' ] [ 'Priv...
def write ( self , s ) : """Write wrapper . Parameters s : bytes Bytes to write"""
try : return self . handle . write ( s ) except OSError : print ( ) print ( "Piksi disconnected" ) print ( ) raise IOError
def bass ( self , bass ) : """Set the speaker ' s bass ."""
bass = int ( bass ) bass = max ( - 10 , min ( bass , 10 ) ) # Coerce in range self . renderingControl . SetBass ( [ ( 'InstanceID' , 0 ) , ( 'DesiredBass' , bass ) ] )
def get_path ( self , path , query = None ) : """Make a GET request , optionally including a query , to a relative path . The path of the request includes a path on top of the base URL assigned to the endpoint . Parameters path : str The path to request , relative to the endpoint query : DataQuery , opt...
return self . get ( self . url_path ( path ) , query )
def MGMT_COMM_GET ( self , Addr = 'ff02::1' , TLVs = [ ] ) : """send MGMT _ COMM _ GET command Returns : True : successful to send MGMT _ COMM _ GET False : fail to send MGMT _ COMM _ GET"""
print '%s call MGMT_COMM_GET' % self . port try : cmd = 'commissioner mgmtget' if len ( TLVs ) != 0 : tlvs = "" . join ( hex ( tlv ) . lstrip ( "0x" ) . zfill ( 2 ) for tlv in TLVs ) cmd += ' binary ' cmd += tlvs print cmd return self . __sendCommand ( cmd ) [ 0 ] == 'Done' excep...
def delete ( self , id = None ) : """Delete a record from the database : param id : The id of the row to delete : type id : mixed : return : The number of rows deleted : rtype : int"""
if id is not None : self . where ( 'id' , '=' , id ) sql = self . _grammar . compile_delete ( self ) return self . _connection . delete ( sql , self . get_bindings ( ) )
def compose_tree_path ( tree , issn = False ) : """Compose absolute path for given ` tree ` . Args : pub ( obj ) : : class : ` . Tree ` instance . issn ( bool , default False ) : Compose URL using ISSN . Returns : str : Absolute path of the tree , without server ' s address and protocol ."""
if issn : return join ( "/" , ISSN_DOWNLOAD_KEY , basename ( tree . issn ) ) return join ( "/" , PATH_DOWNLOAD_KEY , quote_plus ( tree . path ) . replace ( "%2F" , "/" ) , )
def _deserialize ( self , value , attr , obj ) : """Deserializes a string into a Pendulum object ."""
if not self . context . get ( 'convert_dates' , True ) or not value : return value value = super ( PendulumField , self ) . _deserialize ( value , attr , value ) timezone = self . get_field_value ( 'timezone' ) target = pendulum . instance ( value ) if ( timezone and ( text_type ( target ) != text_type ( target . i...
def server ( self , default_not_found = True , base_url = None ) : """Returns a WSGI compatible API server for the given Hug API module"""
falcon_api = falcon . API ( middleware = self . middleware ) default_not_found = self . documentation_404 ( ) if default_not_found is True else None base_url = self . base_url if base_url is None else base_url not_found_handler = default_not_found self . api . _ensure_started ( ) if self . not_found_handlers : if l...
def num_buttons ( self ) : """The number of buttons on a device with the : attr : ` ~ libinput . constant . DeviceCapability . TABLET _ PAD ` capability . Buttons on a pad device are numbered sequentially , see ` Tablet pad button numbers ` _ for details . Returns : int : The number of buttons supported b...
num = self . _libinput . libinput_device_tablet_pad_get_num_buttons ( self . _handle ) if num < 0 : raise AttributeError ( 'This device is not a tablet pad device' ) return num
def _create_diff_action ( diff , diff_key , key , value ) : '''DRY to build diff parts ( added , removed , updated ) .'''
if diff_key not in diff . keys ( ) : diff [ diff_key ] = { } diff [ diff_key ] [ key ] = value
def maybe_coroutine ( decide ) : """Either be a coroutine or not . Use as a decorator : @ maybe _ coroutine ( lambda maybeAPromise : return isinstance ( maybeAPromise , Promise ) ) def foo ( maybeAPromise ) : result = yield maybeAPromise print ( " hello " ) return result The function passed should be ...
def _maybe_coroutine ( f ) : @ functools . wraps ( f ) def __maybe_coroutine ( * args , ** kwargs ) : if decide ( * args , ** kwargs ) : return coroutine ( f ) ( * args , ** kwargs ) else : return no_coroutine ( f ) ( * args , ** kwargs ) return __maybe_coroutine retu...
def with_query ( self , * args , ** kwargs ) : """Return a new URL with query part replaced . Accepts any Mapping ( e . g . dict , multidict . MultiDict instances ) or str , autoencode the argument if needed . A sequence of ( key , value ) pairs is supported as well . It also can take an arbitrary number of...
# N . B . doesn ' t cleanup query / fragment new_query = self . _get_str_query ( * args , ** kwargs ) return URL ( self . _val . _replace ( path = self . _val . path , query = new_query ) , encoded = True )
def get_id ( brain_or_object ) : """Get the Plone ID for this object : param brain _ or _ object : A single catalog brain or content object : type brain _ or _ object : ATContentType / DexterityContentType / CatalogBrain : returns : Plone ID : rtype : string"""
if is_brain ( brain_or_object ) and base_hasattr ( brain_or_object , "getId" ) : return brain_or_object . getId return get_object ( brain_or_object ) . getId ( )
def _expected_condition_value_in_element_attribute ( self , element_attribute_value ) : """Tries to find the element and checks that it contains the requested attribute with the expected value , but does not thrown an exception if the element is not found : param element _ attribute _ value : Tuple with 3 items...
element , attribute , value = element_attribute_value web_element = self . _expected_condition_find_element ( element ) try : return web_element if web_element and web_element . get_attribute ( attribute ) == value else False except StaleElementReferenceException : return False
def from_value ( cls , value : str ) -> T : """Create instance from symbol : param value : unique symbol : return : This instance Usage : > > > from owlmixin . samples import Animal > > > Animal . from _ value ( ' cat ' ) . crow ( ) mewing"""
return [ x for x in cls . __members__ . values ( ) if x . value [ 0 ] == value ] [ 0 ]
def setNreps ( self , nreps ) : """Sets the number of reps before the raster plot resets"""
for plot in self . responsePlots . values ( ) : plot . setNreps ( nreps )
def dump ( props , output ) : """Dumps a dict of properties to the specified open stream or file path . : API : public"""
def escape ( token ) : return re . sub ( r'([=:\s])' , r'\\\1' , token ) def write ( out ) : for k , v in props . items ( ) : out . write ( '%s=%s\n' % ( escape ( str ( k ) ) , escape ( str ( v ) ) ) ) if hasattr ( output , 'write' ) and callable ( output . write ) : write ( output ) elif isinstance...
def set_interrupt ( self , method = None , ** kwargs ) : """Decorator that turns a function or controller method into an action interrupt ."""
def action_wrap ( f ) : action_id = kwargs . get ( "action_id" , f . __name__ ) name = kwargs . get ( "name" , action_id ) if inspect . ismethod ( f ) : # not " . " in f . _ _ qualname _ _ : self . _interrupt = _ActionInterrupt ( f ) self . _ui_parameters [ "interrupt_enabled" ] = True ...
def info ( cls , name , message , * args ) : """Convenience function to log a message at the INFO level . : param name : The name of the logger instance in the VSG namespace ( VSG . < name > ) : param message : A message format string . : param args : The arguments that are are merged into msg using the strin...
cls . getLogger ( name ) . info ( message , * args )
def generate_block_from_parent_header_and_coinbase ( cls , parent_header : BlockHeader , coinbase : Address ) -> BaseBlock : """Generate block from parent header and coinbase ."""
block_header = generate_header_from_parent_header ( cls . compute_difficulty , parent_header , coinbase , timestamp = parent_header . timestamp + 1 , ) block = cls . get_block_class ( ) ( block_header , transactions = [ ] , uncles = [ ] , ) return block
def columns ( self ) : """Return the columns of the result set ."""
result = self . query . result ( ) return [ field . name for field in result . schema ]
def get_host_template ( resource_root , name , cluster_name ) : """Lookup a host template by name in the specified cluster . @ param resource _ root : The root Resource object . @ param name : Host template name . @ param cluster _ name : Cluster name . @ return : An ApiHostTemplate object . @ since : API...
return call ( resource_root . get , HOST_TEMPLATE_PATH % ( cluster_name , name ) , ApiHostTemplate , api_version = 3 )
def openstack_undercloud_install ( self ) : """Deploy an undercloud on the host ."""
instack_undercloud_ver , _ = self . run ( 'repoquery --whatprovides /usr/share/instack-undercloud/puppet-stack-config/puppet-stack-config.pp' ) if instack_undercloud_ver . rstrip ( '\n' ) == 'instack-undercloud-0:2.2.0-1.el7ost.noarch' : LOG . warn ( 'Workaround for BZ1298189' ) self . run ( "sed -i \"s/.*Keyst...
def format_code ( source , preferred_quote = "'" ) : """Return source code with quotes unified ."""
try : return _format_code ( source , preferred_quote ) except ( tokenize . TokenError , IndentationError ) : return source
def password ( self , length : int = 8 , hashed : bool = False ) -> str : """Generate a password or hash of password . : param length : Length of password . : param hashed : MD5 hash . : return : Password or hash of password . : Example : k6dv2odff9#4h"""
text = ascii_letters + digits + punctuation password = '' . join ( [ self . random . choice ( text ) for _ in range ( length ) ] ) if hashed : md5 = hashlib . md5 ( ) md5 . update ( password . encode ( ) ) return md5 . hexdigest ( ) else : return password
def take_branch ( self , example ) : """Returns a ` DecisionTreeNode ` instance that can better classify ` example ` based on the selectors value . If there are no more branches ( ie , this node is a leaf ) or the attribute gives a value for an unexistent branch then this method returns None ."""
if self . attribute is None : return None value = self . attribute ( example ) return self . branches . get ( value , None )
def produce_scattertext_html ( term_doc_matrix , category , category_name , not_category_name , protocol = 'https' , minimum_term_frequency = DEFAULT_MINIMUM_TERM_FREQUENCY , pmi_threshold_coefficient = DEFAULT_PMI_THRESHOLD_COEFFICIENT , max_terms = None , filter_unigrams = False , height_in_pixels = None , width_in_p...
scatter_chart_data = ScatterChart ( term_doc_matrix = term_doc_matrix , minimum_term_frequency = minimum_term_frequency , pmi_threshold_coefficient = pmi_threshold_coefficient , filter_unigrams = filter_unigrams , max_terms = max_terms , term_ranker = term_ranker ) . to_dict ( category = category , category_name = cate...
def _client ( self , id , secret ) : """Performs client login with the provided credentials"""
url = self . api_url + self . auth_token_url auth_string = '%s:%s' % ( id , secret ) authorization = base64 . b64encode ( auth_string . encode ( ) ) . decode ( ) headers = { 'Authorization' : "Basic " + authorization , 'Content-Type' : "application/x-www-form-urlencoded" } params = { 'grant_type' : 'client_credentials'...
def get_standings ( date ) : """Return the standings file for current standings ( given current date ) ."""
try : return urlopen ( STANDINGS_URL . format ( date . year , date . strftime ( '%Y/%m/%d' ) ) ) except HTTPError : ValueError ( 'Could not find the standings file. ' 'mlb.com does not provide the file that ' 'mlbgame needs to perform this operation.' )
def weighted_n ( self ) : """float count of returned rows adjusted for weighting ."""
if not self . is_weighted : return float ( self . unweighted_n ) return float ( sum ( self . _cube_dict [ "result" ] [ "measures" ] [ "count" ] [ "data" ] ) )
def open_in_browser ( file_location ) : """Attempt to open file located at file _ location in the default web browser ."""
# If just the name of the file was given , check if it ' s in the Current # Working Directory . if not os . path . isfile ( file_location ) : file_location = os . path . join ( os . getcwd ( ) , file_location ) if not os . path . isfile ( file_location ) : raise IOError ( "\n\nFile not found." ) # For some reas...
def parse_compound ( compound_def , context = None ) : """Parse a structured compound definition as obtained from a YAML file Returns a CompoundEntry ."""
compound_id = compound_def . get ( 'id' ) _check_id ( compound_id , 'Compound' ) mark = FileMark ( context , None , None ) return CompoundEntry ( compound_def , mark )
def eval_jacobian ( self , ordered_parameters = [ ] , ** parameters ) : """Jacobian of : math : ` S ` in the : class : ` ~ symfit . core . argument . Parameter ` ' s ( : math : ` \\ nabla _ \\ vec { p } S ` ) . : param parameters : values of the : class : ` ~ symfit . core . argument . Parameter ` ' s to eval...
evaluated_func = super ( LeastSquares , self ) . __call__ ( ordered_parameters , ** parameters ) evaluated_jac = super ( LeastSquares , self ) . eval_jacobian ( ordered_parameters , ** parameters ) result = 0 for var , f , jac_comp in zip ( self . model . dependent_vars , evaluated_func , evaluated_jac ) : y = self...
def reader ( fname ) : '''Helper function to open the results file ( coords file ) and create alignment objects with the values in it'''
f = pyfastaq . utils . open_file_read ( fname ) for line in f : if line . startswith ( '[' ) or ( not '\t' in line ) : continue yield alignment . Alignment ( line ) pyfastaq . utils . close ( f )
def keyReleaseEvent ( self , event ) : """Reimplement Qt method . Handle " most recent used " tab behavior , When ctrl is released and tab _ switcher is visible , tab will be changed ."""
if self . isVisible ( ) : qsc = get_shortcut ( context = 'Editor' , name = 'Go to next file' ) for key in qsc . split ( '+' ) : key = key . lower ( ) if ( ( key == 'ctrl' and event . key ( ) == Qt . Key_Control ) or ( key == 'alt' and event . key ( ) == Qt . Key_Alt ) ) : self . item...
def _get_selected_cipher_suite ( server_connectivity : ServerConnectivityInfo , ssl_version : OpenSslVersionEnum , openssl_cipher_str : str , should_use_legacy_openssl : Optional [ bool ] ) -> 'AcceptedCipherSuite' : """Given an OpenSSL cipher string ( which may specify multiple cipher suites ) , return the cipher ...
ssl_connection = server_connectivity . get_preconfigured_ssl_connection ( override_ssl_version = ssl_version , should_use_legacy_openssl = should_use_legacy_openssl ) ssl_connection . ssl_client . set_cipher_list ( openssl_cipher_str ) # Perform the SSL handshake try : ssl_connection . connect ( ) selected_ciph...
def apply ( self , x ) : """Apply Householder transformation to vector x . Applies the Householder transformation efficiently to the given vector ."""
# make sure that x is a ( N , * ) matrix if len ( x . shape ) != 2 : raise ArgumentError ( 'x is not a matrix of shape (N,*)' ) if self . beta == 0 : return x return x - self . beta * self . v * numpy . dot ( self . v . T . conj ( ) , x )
def get_message ( self , method , args , kwargs , options = None ) : """Get the soap message for the specified method , args and soapheaders . This is the entry point for creating the outbound soap message . @ param method : The method being invoked . @ type method : I { service . Method } @ param args : A ...
content = self . headercontent ( method , options = options ) header = self . header ( content ) content = self . bodycontent ( method , args , kwargs ) body = self . body ( content ) env = self . envelope ( header , body ) if self . options ( ) . prefixes : body . normalizePrefixes ( ) env . promotePrefixes ( ...
def position ( self ) : """Read / write : ref : ` XlDataLabelPosition ` member specifying the position of this data label with respect to its data point , or | None | if no position is specified . Assigning | None | causes PowerPoint to choose the default position , which varies by chart type ."""
dLbl = self . _dLbl if dLbl is None : return None dLblPos = dLbl . dLblPos if dLblPos is None : return None return dLblPos . val
def list_current_orders ( self , bet_ids = None , market_ids = None , order_projection = None , customer_order_refs = None , customer_strategy_refs = None , date_range = time_range ( ) , order_by = None , sort_dir = None , from_record = None , record_count = None , session = None , lightweight = None ) : """Returns...
params = clean_locals ( locals ( ) ) method = '%s%s' % ( self . URI , 'listCurrentOrders' ) ( response , elapsed_time ) = self . request ( method , params , session ) return self . process_response ( response , resources . CurrentOrders , elapsed_time , lightweight )
def __get_verb ( counts ) : """Let ' s fetch a VERB : param counts :"""
cursor = CONN . cursor ( ) check_query = "select verb_id from surverbs" cursor . execute ( check_query ) check_result = cursor . fetchall ( ) id_list = [ ] for row in check_result : id_list . append ( row [ 0 ] ) rand = random . randint ( 1 , counts [ 'max_verb' ] ) while rand not in id_list : rand = random . r...
def make_alf_dirs_ ( self ) : """DEPRECATED"""
alf_dirs = { } for k in range ( self . num_classes ) : dirname = fileIO . join_path ( self . tmpdir , 'class{0:0>1}' . format ( k + 1 ) ) alf_dirs [ k + 1 ] = errors . directorymake ( dirname ) self . alf_dirs = alf_dirs
def MAX ( values , * others ) : """DECISIVE MAX : param values : : param others : : return :"""
if others : from mo_logs import Log Log . warning ( "Calling wrong" ) return MAX ( [ values ] + list ( others ) ) output = Null for v in values : if v == None : continue elif output == None or v > output : output = v else : pass return output
def refresh_authorization ( self ) : """Refreshes the authorization tokens . : return : Dictionary containing auth tokens , expiration info , and response status . : rtype : ` ` dict ` `"""
response = self . request_handler . post ( endpoint = 'oauth/token' , omit_api_version = True , data = { 'grant_type' : 'refresh_token' , 'client_id' : self . auth . client_id , 'client_secret' : self . auth . client_secret , 'refresh_token' : self . auth . refresh_token , } ) data = response . json ( ) token_expiratio...
def GET_name_info ( self , path_info , name ) : """Look up a name ' s zonefile , address , and last TXID Reply status , zonefile , zonefile hash , address , and last TXID . ' status ' can be ' available ' , ' registered ' , ' revoked ' , or ' pending '"""
if not check_name ( name ) and not check_subdomain ( name ) : return self . _reply_json ( { 'error' : 'Invalid name or subdomain' } , status_code = 400 ) blockstackd_url = get_blockstackd_url ( ) name_rec = None try : name_rec = blockstackd_client . get_name_record ( name , include_history = False , hostport = ...
def gdate_to_jdn ( date ) : """Compute Julian day from Gregorian day , month and year . Algorithm from wikipedia ' s julian _ day article . Return : The julian day number"""
not_jan_or_feb = ( 14 - date . month ) // 12 year_since_4800bc = date . year + 4800 - not_jan_or_feb month_since_4800bc = date . month + 12 * not_jan_or_feb - 3 jdn = date . day + ( 153 * month_since_4800bc + 2 ) // 5 + 365 * year_since_4800bc + ( year_since_4800bc // 4 - year_since_4800bc // 100 + year_since_4800bc //...
def from_dict ( cls , d ) : """As in : Class : ` pymatgen . core . Molecule ` except restoring graphs using ` from _ dict _ of _ dicts ` from NetworkX to restore graph information ."""
m = Molecule . from_dict ( d [ 'molecule' ] ) return cls ( m , d [ 'graphs' ] )
def _compile_new_relic_stats ( self , stats_this_second , stats_next_second ) : """from instance ' stats _ this _ second ' and instance ' stats _ next _ second ' , compute some per second stats metrics and other aggregated metrics : param dict stats _ this _ second : : param dict stats _ next _ second : : r...
server_statistics_per_second = { } opcounters_per_node_per_second = [ ] for subdoc in [ "opcounters" , "network" ] : first_doc = stats_this_second [ 'aggregate_server_statistics' ] [ subdoc ] second_doc = stats_next_second [ 'aggregate_server_statistics' ] [ subdoc ] keys = set ( first_doc . keys ( ) ) | se...
def imap_unordered ( self , jobs , timeout = 0.5 ) : """A iterator over a set of jobs . : param jobs : the items to pass through our function : param timeout : timeout between polling queues Results are yielded as soon as they are available in the output queue ( up to the discretisation provided by timeout ...
timeout = max ( timeout , 0.5 ) jobs_iter = iter ( jobs ) out_jobs = 0 job = None while True : if not self . closed and job is None : # Get a job try : job = jobs_iter . next ( ) except StopIteration : job = None self . close ( ) if job is not None : # Put any...
def generate_fpn_proposals ( multilevel_pred_boxes , multilevel_label_logits , image_shape2d ) : """Args : multilevel _ pred _ boxes : # lvl HxWxAx4 boxes multilevel _ label _ logits : # lvl tensors of shape HxWxA Returns : boxes : kx4 float scores : k logits"""
num_lvl = len ( cfg . FPN . ANCHOR_STRIDES ) assert len ( multilevel_pred_boxes ) == num_lvl assert len ( multilevel_label_logits ) == num_lvl training = get_current_tower_context ( ) . is_training all_boxes = [ ] all_scores = [ ] if cfg . FPN . PROPOSAL_MODE == 'Level' : fpn_nms_topk = cfg . RPN . TRAIN_PER_LEVEL_...
def get_environment_from_batch_command ( env_cmd , initial = None ) : """Take a command ( either a single command or list of arguments ) and return the environment created after running that command . Note that if the command must be a batch file or . cmd file , or the changes to the environment will not be c...
if not isinstance ( env_cmd , ( list , tuple ) ) : env_cmd = [ env_cmd ] if not os . path . exists ( env_cmd [ 0 ] ) : raise RuntimeError ( 'Error: %s does not exist' % ( env_cmd [ 0 ] , ) ) # construct the command that will alter the environment env_cmd = subprocess . list2cmdline ( env_cmd ) # create a tag so...
def middle_end ( self , index ) : """Set the index ( + 1 ) where MIDDLE ends . : param int index : the new index for MIDDLE end"""
if ( index < 0 ) or ( index > self . all_length ) : raise ValueError ( u"The given index is not valid" ) self . __middle_end = index
def main ( search , query ) : """main function that does the search"""
url = search . search ( query ) print ( url ) search . open_page ( url )
def match_field ( self , field , value , required = True , new_group = False ) : """Add a ` ` field : value ` ` term to the query . Matches will have the ` ` value ` ` in the ` ` field ` ` . Arguments : field ( str ) : The field to check for the value . The field must be namespaced according to Elasticsearc...
# If not the start of the query string , add an AND or OR if self . initialized : if required : self . _and_join ( new_group ) else : self . _or_join ( new_group ) self . _field ( field , value ) return self
def probabilities ( self ) : '''Trains a model and predicts recommendations . If the query feature collection could not be found or if there is insufficient training data , an empty list is returned . Otherwise , a list of content objects ( tuples of content id and feature collection ) and probabilities is ...
self . query_fc = self . store . get ( self . query_content_id ) if self . query_fc is None : logger . warning ( 'Could not find FC for %s' , self . query_content_id ) return [ ] # Try the canopy query before training , because if the canopy query # gives us nothing , then there ' s no point in the additional w...
def find ( * args , ** kwargs ) : """Find the first matching element in a list and return it . Usage : : find ( element , list _ ) find ( of = element , in _ = list _ ) find ( where = predicate , in _ = list _ ) : param element , of : Element to search for ( by equality comparison ) : param where : Pred...
list_ , idx = _index ( * args , start = 0 , step = 1 , ** kwargs ) if idx < 0 : raise IndexError ( "element not found" ) return list_ [ idx ]
def fixture_to_tables ( fixture ) : """convert fixture into * behave * examples : param fixture : a dictionary in the following form : : " test1name " : " test1property1 " : . . . , " test1property2 " : . . . , " test2name " : " test2property1 " : . . . , " test2property2 " : . . . , : return : a li...
tables = [ ] for ( title , content ) in fixture . iteritems ( ) : rows = [ ] # header ( keyword ) row keys = sorted ( content . keys ( ) ) rows . append ( tuple ( keys ) ) # item ( value ) row row1 = [ ] for col in rows [ 0 ] : row1 . append ( content [ col ] ) rows . append ( tu...
def ParseFileObject ( self , parser_mediator , file_object ) : """Parses a NTFS $ MFT metadata file - like object . Args : parser _ mediator ( ParserMediator ) : mediates interactions between parsers and other components , such as storage and dfvfs . file _ object ( dfvfs . FileIO ) : file - like object .""...
mft_metadata_file = pyfsntfs . mft_metadata_file ( ) try : mft_metadata_file . open_file_object ( file_object ) except IOError as exception : parser_mediator . ProduceExtractionWarning ( 'unable to open file with error: {0!s}' . format ( exception ) ) for entry_index in range ( 0 , mft_metadata_file . number_of...
def export ( self , storage_client , overwrite = True ) : '''a method to export all the records in collection to another platform : param storage _ client : class object with storage client methods : return : string with exit message'''
title = '%s.export' % self . __class__ . __name__ # validate storage client method_list = [ 'save' , 'load' , 'list' , 'export' , 'delete' , 'remove' , '_import' , 'collection_name' ] for method in method_list : if not getattr ( storage_client , method , None ) : from labpack . parsing . grammar import join...
def upload_file ( self , fax_file , ** kwargs ) : # noqa : E501 """upload a file # noqa : E501 Before sending a fax you need to upload your files using this API . In order to upload your fax file , you have to send a ` multipart / form - data ` request with your file . If the upload was successful you would recei...
kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'async' ) : return self . upload_file_with_http_info ( fax_file , ** kwargs ) # noqa : E501 else : ( data ) = self . upload_file_with_http_info ( fax_file , ** kwargs ) # noqa : E501 return data
def command ( self , * args , ** kwargs ) : """A shortcut decorator for declaring and attaching a command to the group . This takes the same arguments as : func : ` command ` but immediately registers the created command with this instance by calling into : meth : ` add _ command ` ."""
def decorator ( f ) : cmd = command ( * args , ** kwargs ) ( f ) self . add_command ( cmd ) return cmd return decorator
def __embed_branch ( dfs_data ) : """Builds the combinatorial embedding of the graph . Returns whether the graph is planar ."""
u = dfs_data [ 'ordering' ] [ 0 ] dfs_data [ 'LF' ] = [ ] dfs_data [ 'RF' ] = [ ] dfs_data [ 'FG' ] = { } n = dfs_data [ 'graph' ] . num_nodes ( ) f0 = ( 0 , n ) g0 = ( 0 , n ) L0 = { 'u' : 0 , 'v' : n } R0 = { 'x' : 0 , 'y' : n } dfs_data [ 'LF' ] . append ( f0 ) dfs_data [ 'RF' ] . append ( g0 ) dfs_data [ 'FG' ] [ 0...
def execute ( node ) : """Uses ohai to get virtualization information which is then saved to then node file"""
with hide ( 'everything' ) : virt = json . loads ( sudo ( 'ohai virtualization' ) ) if not len ( virt ) or virt [ 0 ] [ 1 ] != "host" : # It may work for virtualization solutions other than Xen print ( "This node is not a Xen host, doing nothing" ) return node [ 'virtualization' ] = { 'role' : 'host' , 'sys...
def lmom ( self , * args , nmom = 5 , ** kwds ) : """Compute the distribution ' s L - moments , e . g . l1 , l2 , l3 , l4 , . . : param args : Distribution parameters in order of shape ( s ) , loc , scale : type args : float : param nmom : Number of moments to calculate : type nmom : int : param kwds : Di...
ratios = self . lmom_ratios ( * args , nmom = nmom , ** kwds ) moments = ratios [ 0 : 2 ] moments += [ ratio * moments [ 1 ] for ratio in ratios [ 2 : ] ] return moments
def write ( grp , out_path ) : """Write a GRP to a text file . Args : grp ( list ) : GRP object to write to new - line delimited text file out _ path ( string ) : output path Returns : None"""
with open ( out_path , "w" ) as f : for x in grp : f . write ( str ( x ) + "\n" )
def deploy_project ( ) : """Deploy to the project directory in the virtualenv"""
project_root = '/' . join ( [ deployment_root ( ) , 'env' , env . project_fullname , 'project' ] ) local_dir = os . getcwd ( ) if env . verbosity : print env . host , "DEPLOYING project" , env . project_fullname # Exclude a few things that we don ' t want deployed as part of the project folder rsync_exclude = [ 'lo...
def PyplotLineStyles ( ) : """Linestyles This examples showcases different linestyles copying those of Tikz / PGF ."""
import numpy as np import matplotlib . pyplot as plt from collections import OrderedDict from matplotlib . transforms import blended_transform_factory linestyles = OrderedDict ( [ ( 'solid' , ( 0 , ( ) ) ) , ( 'loosely dotted' , ( 0 , ( 1 , 10 ) ) ) , ( 'dotted' , ( 0 , ( 1 , 5 ) ) ) , ( 'densely dotted' , ( 0 , ( 1 , ...
def create ( cls , name , cluster_virtual , network_value , macaddress , interface_id , nodes , vlan_id = None , cluster_mode = 'balancing' , backup_mgt = None , primary_heartbeat = None , log_server_ref = None , domain_server_address = None , location_ref = None , zone_ref = None , default_nat = False , enable_antivir...
interfaces = kw . pop ( 'interfaces' , [ ] ) # Add the primary interface to the interface list interface = { 'cluster_virtual' : cluster_virtual , 'network_value' : network_value , 'nodes' : nodes } if vlan_id : interface . update ( vlan_id = vlan_id ) interfaces . append ( dict ( interface_id = interface_id , maca...
def list ( request , content_type , id ) : """Wrapper exposing comment ' s render _ comment _ list tag as a view ."""
# get object app_label , model = content_type . split ( '-' ) ctype = ContentType . objects . get ( app_label = app_label , model = model ) obj = ctype . get_object_for_this_type ( id = id ) # setup template and return result t = Template ( "{% load comments %}{% render_comment_list for object %}" ) context = RequestCo...
def request ( self , path , api = 'public' , method = 'GET' , params = { } , headers = None , body = None ) : """Exchange . request is the entry point for all generated methods"""
return self . fetch2 ( path , api , method , params , headers , body )