signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def register_iq_request_handler ( self , type_ , payload_cls , cb , * , with_send_reply = False ) : """Register a coroutine function or a function returning an awaitable to run when an IQ request is received . : param type _ : IQ type to react to ( must be a request type ) . : type type _ : : class : ` ~ aiox...
type_ = self . _coerce_enum ( type_ , structs . IQType ) if not type_ . is_request : raise ValueError ( "{!r} is not a request IQType" . format ( type_ ) ) key = type_ , payload_cls if key in self . _iq_request_map : raise ValueError ( "only one listener is allowed per tag" ) self . _iq_request_map [ key ] = cb...
def write_file ( infile : str , outfile : str , progress_callback : Callable [ [ float ] , None ] , chunk_size : int = 1024 , file_size : int = None ) : """Write a file to another file with progress callbacks . : param infile : The input filepath : param outfile : The output filepath : param progress _ callba...
total_written = 0 with open ( infile , 'rb' ) as img , open ( outfile , 'wb' ) as part : if None is file_size : file_size = img . seek ( 0 , 2 ) img . seek ( 0 ) LOG . info ( f'write_file: file size calculated as {file_size}B' ) LOG . info ( f'write_file: writing {infile} ({file_size}B)'...
def set_year ( self , year ) : """Set an user ' s year . This is required on magma just before the login . It ' s called by default by ` ` login ` ` ."""
self . year = YEARS . get ( year , year ) data = { 'idCursus' : self . year } soup = self . post_soup ( '/~etudiant/login.php' , data = data ) return bool ( soup . select ( 'ul.rMenu-hor' ) )
def findTheLost ( config_file , configspec_file , skipHidden = True ) : """Find any lost / missing parameters in this cfg file , compared to what the . cfgspc says should be there . This method is recommended by the ConfigObj docs . Return a stringified list of item errors ."""
# do some sanity checking , but don ' t ( yet ) make this a serious error if not os . path . exists ( config_file ) : print ( "ERROR: Config file not found: " + config_file ) return [ ] if not os . path . exists ( configspec_file ) : print ( "ERROR: Configspec file not found: " + configspec_file ) retur...
def cql_encode_all_types ( self , val , as_text_type = False ) : """Converts any type into a CQL string , defaulting to ` ` cql _ encode _ object ` ` if : attr : ` ~ Encoder . mapping ` does not contain an entry for the type ."""
encoded = self . mapping . get ( type ( val ) , self . cql_encode_object ) ( val ) if as_text_type and not isinstance ( encoded , six . text_type ) : return encoded . decode ( 'utf-8' ) return encoded
def save ( self ) : """Save current draft state ."""
response = self . session . request ( "save:Message" , [ self . data ] ) self . data = response self . message_id = self . data [ "id" ] return self
def raise_failure_exception ( self , expect_or_allow = 'Allowed' ) : """Raises a ` ` MockExpectationError ` ` with a useful message . : raise : ` ` MockExpectationError ` `"""
raise MockExpectationError ( "{} '{}' to be called {}on {!r} with {}, but was not. ({}:{})" . format ( expect_or_allow , self . _method_name , self . _call_counter . error_string ( ) , self . _target . obj , self . _expected_argument_string ( ) , self . _caller . filename , self . _caller . lineno , ) )
def get_rank ( self , entity , criteria , condition = True ) : """Get the rank of a person within an entity according to a criteria . The person with rank 0 has the minimum value of criteria . If condition is specified , then the persons who don ' t respect it are not taken into account and their rank is - 1. ...
# If entity is for instance ' person . household ' , we get the reference entity ' household ' behind the projector entity = entity if not isinstance ( entity , Projector ) else entity . reference_entity positions = entity . members_position biggest_entity_size = np . max ( positions ) + 1 filtered_criteria = np . wher...
def format_page ( self , page , link_resolver , output ) : """Called by ` project . Project . format _ page ` , to leave full control to extensions over the formatting of the pages they are responsible of . Args : page : tree . Page , the page to format . link _ resolver : links . LinkResolver , object re...
debug ( 'Formatting page %s' % page . link . ref , 'formatting' ) if output : actual_output = os . path . join ( output , 'html' ) if not os . path . exists ( actual_output ) : os . makedirs ( actual_output ) else : actual_output = None page . format ( self . formatter , link_resolver , actual_outpu...
def does_not_mutate ( func ) : """Prevents methods from mutating the receiver"""
def wrapper ( self , * args , ** kwargs ) : new = self . copy ( ) return func ( new , * args , ** kwargs ) wrapper . __name__ = func . __name__ wrapper . __doc__ = func . __doc__ return wrapper
def threshold ( self , data_1 , data_2 , recall_weight = 1.5 ) : # pragma : no cover """Returns the threshold that maximizes the expected F score , a weighted average of precision and recall for a sample of data . Arguments : data _ 1 - - Dictionary of records from first dataset , where the keys are recor...
blocked_pairs = self . _blockData ( data_1 , data_2 ) return self . thresholdBlocks ( blocked_pairs , recall_weight )
def context ( name ) : '''A decorator for theme context processors'''
def wrapper ( func ) : g . theme . context_processors [ name ] = func return func return wrapper
def connect_full_direct ( self , config ) : """Create a fully - connected genome , including direct input - output connections ."""
for input_id , output_id in self . compute_full_connections ( config , True ) : connection = self . create_connection ( config , input_id , output_id ) self . connections [ connection . key ] = connection
def call ( cls , iterable , * a , ** kw ) : """Calls every item in * iterable * with the specified arguments ."""
return cls ( x ( * a , ** kw ) for x in iterable )
def exptime ( self ) : '''exptime : 下一個日期時間 : returns : 下一個預設時間'''
return self . nextday + timedelta ( hours = self . __hour - 8 , minutes = self . __minutes )
def save ( self , record_key , record_data , overwrite = True , secret_key = '' ) : '''a method to create a record in the collection folder : param record _ key : string with name to assign to record ( see NOTES below ) : param record _ data : byte data for record body : param overwrite : [ optional ] boolean...
title = '%s.save' % self . __class__ . __name__ # validate inputs input_fields = { 'record_key' : record_key , 'secret_key' : secret_key } for key , value in input_fields . items ( ) : if value : object_title = '%s(%s=%s)' % ( title , key , str ( value ) ) self . fields . validate ( value , '.%s' % ...
def from_ewif_hex ( cls : Type [ SigningKeyType ] , ewif_hex : str , password : str ) -> SigningKeyType : """Return SigningKey instance from Duniter EWIF in hexadecimal format : param ewif _ hex : EWIF string in hexadecimal format : param password : Password of the encrypted seed"""
ewif_bytes = Base58Encoder . decode ( ewif_hex ) if len ( ewif_bytes ) != 39 : raise Exception ( "Error: the size of EWIF is invalid" ) # extract data fi = ewif_bytes [ 0 : 1 ] checksum_from_ewif = ewif_bytes [ - 2 : ] ewif_no_checksum = ewif_bytes [ 0 : - 2 ] salt = ewif_bytes [ 1 : 5 ] encryptedhalf1 = ewif_bytes...
def request ( self , verb , subpath , data = '' ) : """Generic Vingd - backend authenticated request ( currently HTTP Basic Auth over HTTPS , but OAuth1 in the future ) . : returns : Data ` ` dict ` ` , or raises exception ."""
if not self . api_key or not self . api_secret : raise Exception ( "Vingd authentication credentials undefined." ) endpoint = urlparse ( self . api_endpoint ) if endpoint . scheme != 'https' : raise Exception ( "Invalid Vingd endpoint URL (non-https)." ) host = endpoint . netloc . split ( ':' ) [ 0 ] port = 443...
def select_directory ( self ) : """Select directory"""
self . __redirect_stdio_emit ( False ) directory = getexistingdirectory ( self , _ ( "Select directory" ) , self . path ) if directory : directory = to_unicode_from_fs ( osp . abspath ( directory ) ) self . __redirect_stdio_emit ( True ) return directory
def random_tournament_graph ( n , random_state = None ) : """Return a random tournament graph [ 1 ] _ with n nodes . Parameters n : scalar ( int ) Number of nodes . random _ state : int or np . random . RandomState , optional Random seed ( integer ) or np . random . RandomState instance to set the initi...
random_state = check_random_state ( random_state ) num_edges = n * ( n - 1 ) // 2 r = random_state . random_sample ( num_edges ) row = np . empty ( num_edges , dtype = int ) col = np . empty ( num_edges , dtype = int ) _populate_random_tournament_row_col ( n , r , row , col ) data = np . ones ( num_edges , dtype = bool...
def _init_env ( self ) : '''Initialize some Salt environment .'''
from salt . config import minion_config from salt . grains import core as g_core g_core . __opts__ = minion_config ( self . DEFAULT_MINION_CONFIG_PATH ) self . grains_core = g_core
def unicode_left ( s , width ) : """Cut unicode string from left to fit a given width ."""
i = 0 j = 0 for ch in s : j += __unicode_width_mapping [ east_asian_width ( ch ) ] if width < j : break i += 1 return s [ : i ]
def recv_exactly ( self , n , timeout = 'default' ) : """Recieve exactly n bytes Aliases : read _ exactly , readexactly , recvexactly"""
self . _print_recv_header ( '======== Receiving until exactly {0}B{timeout_text} ========' , timeout , n ) return self . _recv_predicate ( lambda s : n if len ( s ) >= n else 0 , timeout )
def _guess_name_of ( self , expr ) : """Tries to guess what variable name ' expr ' ends in . This is a heuristic that roughly emulates what most SQL databases name columns , based on selected variable names or applied functions ."""
if isinstance ( expr , ast . Var ) : return expr . value if isinstance ( expr , ast . Resolve ) : # We know the RHS of resolve is a Literal because that ' s what # Parser . dot _ rhs does . return expr . rhs . value if isinstance ( expr , ast . Select ) and isinstance ( expr . rhs , ast . Literal ) : name =...
def estimate ( self ) : """Estimates the ` DAG ` structure that fits best to the given data set , according to the scoring method supplied in the constructor . Exhaustively searches through all models . Only estimates network structure , no parametrization . Returns model : ` DAG ` instance A ` DAG ` with...
best_dag = max ( self . all_dags ( ) , key = self . scoring_method . score ) best_model = DAG ( ) best_model . add_nodes_from ( sorted ( best_dag . nodes ( ) ) ) best_model . add_edges_from ( sorted ( best_dag . edges ( ) ) ) return best_model
def first ( series , order_by = None ) : """Returns the first value of a series . Args : series ( pandas . Series ) : column to summarize . Kwargs : order _ by : a pandas . Series or list of series ( can be symbolic ) to order the input series by before summarization ."""
if order_by is not None : series = order_series_by ( series , order_by ) first_s = series . iloc [ 0 ] return first_s
def next_cursor_location ( self ) : """Move cursor to the next trace frame ."""
self . _verify_entrypoint_selected ( ) self . current_trace_frame_index = min ( self . current_trace_frame_index + 1 , len ( self . trace_tuples ) - 1 ) self . trace ( )
def addRnaQuantificationSet ( self ) : """Adds an rnaQuantificationSet into this repo"""
self . _openRepo ( ) dataset = self . _repo . getDatasetByName ( self . _args . datasetName ) if self . _args . name is None : name = getNameFromPath ( self . _args . filePath ) else : name = self . _args . name rnaQuantificationSet = rna_quantification . SqliteRnaQuantificationSet ( dataset , name ) referenceS...
def export_project ( self ) : """Processes groups and misc options specific for eclipse , and run generator"""
output = copy . deepcopy ( self . generated_project ) data_for_make = self . workspace . copy ( ) self . exporter . process_data_for_makefile ( data_for_make ) output [ 'path' ] , output [ 'files' ] [ 'makefile' ] = self . gen_file_jinja ( 'makefile_gcc.tmpl' , data_for_make , 'Makefile' , data_for_make [ 'output_dir' ...
def probe_wdl ( self , board : chess . Board ) -> int : """Probes WDL tables for win / draw / loss - information . Probing is thread - safe when done with different * board * objects and if * board * objects are not modified during probing . Returns ` ` 2 ` ` if the side to move is winning , ` ` 0 ` ` if the ...
# Positions with castling rights are not in the tablebase . if board . castling_rights : raise KeyError ( "syzygy tables do not contain positions with castling rights: {}" . format ( board . fen ( ) ) ) # Validate piece count . if chess . popcount ( board . occupied ) > 7 : raise KeyError ( "syzygy tables suppo...
def get_vmpolicy_macaddr_input_last_rcvd_instance ( self , ** kwargs ) : """Auto Generated Code"""
config = ET . Element ( "config" ) get_vmpolicy_macaddr = ET . Element ( "get_vmpolicy_macaddr" ) config = get_vmpolicy_macaddr input = ET . SubElement ( get_vmpolicy_macaddr , "input" ) last_rcvd_instance = ET . SubElement ( input , "last-rcvd-instance" ) last_rcvd_instance . text = kwargs . pop ( 'last_rcvd_instance'...
def sed ( self , name , ** kwargs ) : """Generate a spectral energy distribution ( SED ) for a source . This function will fit the normalization of the source in each energy bin . By default the SED will be generated with the analysis energy bins but a custom binning can be defined with the ` ` loge _ bins ...
timer = Timer . create ( start = True ) name = self . roi . get_source_by_name ( name ) . name # Create schema for method configuration schema = ConfigSchema ( self . defaults [ 'sed' ] , optimizer = self . defaults [ 'optimizer' ] ) schema . add_option ( 'prefix' , '' ) schema . add_option ( 'outfile' , None , '' , st...
def grantham_score ( ref_aa , mut_aa ) : """https : / / github . com / ashutoshkpandey / Annotation / blob / master / Grantham _ score _ calculator . py"""
grantham = { 'S' : { 'R' : 110 , 'L' : 145 , 'P' : 74 , 'T' : 58 , 'A' : 99 , 'V' : 124 , 'G' : 56 , 'I' : 142 , 'F' : 155 , 'Y' : 144 , 'C' : 112 , 'H' : 89 , 'Q' : 68 , 'N' : 46 , 'K' : 121 , 'D' : 65 , 'E' : 80 , 'M' : 135 , 'W' : 177 } , 'R' : { 'R' : 0 , 'L' : 102 , 'P' : 103 , 'T' : 71 , 'A' : 112 , 'V' : 96 , 'G...
def get_user ( self , userPk ) : """Returns the user specified with the user ' s Pk or UUID"""
r = self . _request ( 'user/' + str ( userPk ) ) if r : # Set base properties and copy data inside the user u = User ( ) u . pk = u . id = userPk u . __dict__ . update ( r . json ( ) ) return u return None
def to_json_dict ( self , filter_fcn = None ) : """Create a dict with Entity properties for json encoding . It can be overridden by subclasses for each standard serialization doesn ' t work . By default it call _ to _ json _ dict on OneToOne fields and build a list calling the same method on each OneToMany ob...
fields , values = self . get_fields ( ) , self . get_values ( ) filtered_fields = fields . items ( ) if filter_fcn is not None : filtered_fields = ( tpl for tpl in filtered_fields if filter_fcn ( tpl [ 0 ] , tpl [ 1 ] ) ) json_dct = { } for field_name , field in filtered_fields : if field_name in values : ...
def get_dpi ( ) : """Returns screen dpi resolution"""
def pxmm_2_dpi ( ( pixels , length_mm ) ) : return pixels * 25.6 / length_mm return map ( pxmm_2_dpi , zip ( wx . GetDisplaySize ( ) , wx . GetDisplaySizeMM ( ) ) )
def change_password ( self , old_password , new_password ) : """Update the user ' s password to a new one ."""
body = self . _formdata ( { "old_password" : old_password , "password" : new_password , } , [ "old_password" , "password" ] ) content = self . _fetch ( "/current_user/password" , method = "POST" , body = body ) return FastlyUser ( self , content )
def add ( self , * l ) : '''add inner to outer Args : * l : element that is passed into Inner init'''
for a in flatten ( l ) : self . _add ( [ self . Inner ( a ) ] , self . l )
def read_http_status_codes ( filename = 'HTTP_1.1 Status Code Definitions.html' ) : r"""Parse the HTTP documentation HTML page in filename Return : code _ dict : { 200 : " OK " , . . . } > > > fn = ' HTTP _ 1.1 Status Code Definitions . html ' > > > code _ dict = read _ http _ status _ codes ( fn ) > > >...
lines = read_text ( filename ) level_lines = get_markdown_levels ( lines , 3 ) code_dict = { } for level , line in level_lines : code , name = ( re . findall ( r'\s(\d\d\d)[\W]+([-\w\s]*)' , line ) or [ [ 0 , '' ] ] ) [ 0 ] if 1000 > int ( code ) >= 100 : code_dict [ code ] = name code_dict [ in...
def close ( self ) : """Close the plot and release its memory ."""
from matplotlib . pyplot import close for ax in self . axes [ : : - 1 ] : # avoid matplotlib / matplotlib # 9970 ax . set_xscale ( 'linear' ) ax . set_yscale ( 'linear' ) # clear the axes ax . cla ( ) # close the figure close ( self )
def assign_default_storage_policy_to_datastore ( profile_manager , policy , datastore ) : '''Assigns a storage policy as the default policy to a datastore . profile _ manager Reference to the profile manager . policy Reference to the policy to assigned . datastore Reference to the datastore .'''
placement_hub = pbm . placement . PlacementHub ( hubId = datastore . _moId , hubType = 'Datastore' ) log . trace ( 'placement_hub = %s' , placement_hub ) try : profile_manager . AssignDefaultRequirementProfile ( policy . profileId , [ placement_hub ] ) except vim . fault . NoPermission as exc : log . exception ...
def example_list ( a , args ) : """list topics and cluster metadata"""
if len ( args ) == 0 : what = "all" else : what = args [ 0 ] md = a . list_topics ( timeout = 10 ) print ( "Cluster {} metadata (response from broker {}):" . format ( md . cluster_id , md . orig_broker_name ) ) if what in ( "all" , "brokers" ) : print ( " {} brokers:" . format ( len ( md . brokers ) ) ) ...
def get_relative_breadcrumbs ( self ) : """get the breadcrumbs as relative to the basedir"""
basedir = self . basedir crumbs = self . breadcrumbs return [ ( relpath ( b , basedir ) , e ) for b , e in crumbs ]
def find_complement ( am , sites = None , bonds = None , asmask = False ) : r"""Finds the complementary sites ( or bonds ) to a given set of inputs Parameters am : scipy . sparse matrix The adjacency matrix of the network . sites : array _ like ( optional ) The set of sites for which the complement is sou...
if ( sites is not None ) and ( bonds is None ) : inds = sp . unique ( sites ) N = am . shape [ 0 ] elif ( bonds is not None ) and ( sites is None ) : inds = sp . unique ( bonds ) N = int ( am . nnz / 2 ) elif ( bonds is not None ) and ( sites is not None ) : raise Exception ( 'Only one of sites or b...
def _init_idxs_strpat ( self , usr_hdrs ) : """List of indexes whose values will be strings ."""
strpat = self . strpat_hdrs . keys ( ) self . idxs_strpat = [ Idx for Hdr , Idx in self . hdr2idx . items ( ) if Hdr in usr_hdrs and Hdr in strpat ]
def hash ( self , val ) : """Calculate hash from value ( must be bytes ) ."""
if not isinstance ( val , bytes ) : raise _TypeError ( "val" , "bytes" , val ) rv = self . compute ( val ) if not isinstance ( val , bytes ) : raise _TypeError ( "val" , "bytes" , rv ) return rv
def symbol ( self , id , bp = 0 ) : """Adds symbol ' id ' to symbol _ table if it does not exist already , if it does it merely updates its binding power and returns it ' s symbol class"""
try : s = self . symbol_table [ id ] except KeyError : class s ( self . symbol_base ) : pass s . id = id s . lbp = bp self . symbol_table [ id ] = s else : s . lbp = max ( bp , s . lbp ) return s
def parse_union_type_definition ( lexer : Lexer ) -> UnionTypeDefinitionNode : """UnionTypeDefinition"""
start = lexer . token description = parse_description ( lexer ) expect_keyword ( lexer , "union" ) name = parse_name ( lexer ) directives = parse_directives ( lexer , True ) types = parse_union_member_types ( lexer ) return UnionTypeDefinitionNode ( description = description , name = name , directives = directives , ty...
def multiplexer ( f = None , nruns = 1 , nprocs = 1 , seeding = None , ** args ) : """Evaluate a function for different parameters , optionally in parallel . Parameters f : function function f to evaluate , must take only kw arguments as inputs nruns : int number of evaluations of f for each set of argume...
if not callable ( f ) : raise ValueError ( 'multiplexer: function f missing, or not callable' ) if seeding is None : seeding = ( nruns > 1 ) # extra arguments ( meant to be arguments for f ) fixedargs , listargs , dictargs = { } , { } , { } listargs [ 'run' ] = list ( range ( nruns ) ) for k , v in args . items...
def remove ( self , handler_id = None ) : """Remove a previously added handler and stop sending logs to its sink . Parameters handler _ id : | int | or ` ` None ` ` The id of the sink to remove , as it was returned by the | add | method . If ` ` None ` ` , all handlers are removed . The pre - configured han...
with self . _lock : handlers = self . _handlers . copy ( ) if handler_id is None : for handler in handlers . values ( ) : handler . stop ( ) handlers . clear ( ) else : try : handler = handlers . pop ( handler_id ) except KeyError : raise V...
def week_to_datetime ( iso_year , iso_week ) : "datetime instance for the start of the given ISO year and week"
gregorian = iso_to_gregorian ( iso_year , iso_week , 0 ) return datetime . datetime . combine ( gregorian , datetime . time ( 0 ) )
def _verify_views ( ) : '''Verify that you have the views you need . This can be disabled by adding couchbase . skip _ verify _ views : True in config'''
global VERIFIED_VIEWS if VERIFIED_VIEWS or __opts__ . get ( 'couchbase.skip_verify_views' , False ) : return cb_ = _get_connection ( ) ddoc = { 'views' : { 'jids' : { 'map' : "function (doc, meta) { if (meta.id.indexOf('/') === -1 && doc.load){ emit(meta.id, null) } }" } , 'jid_returns' : { 'map' : "function (doc, ...
def to_pandas ( self , wrap = False , ** kwargs ) : """Convert to pandas DataFrame . Execute at once . : param wrap : if True , wrap the pandas DataFrame into a PyODPS DataFrame : return : pandas DataFrame"""
try : import pandas as pd except ImportError : raise DependencyNotInstalledError ( 'to_pandas requires `pandas` library' ) def wrapper ( result ) : res = result . values if wrap : from . . import DataFrame return DataFrame ( res , schema = self . schema ) return res return self . exe...
def _chunk_write ( chunk , local_file , progress ) : """Write a chunk to file and update the progress bar ."""
local_file . write ( chunk ) if progress is not None : progress . update ( len ( chunk ) )
def remove ( path , force = False ) : '''Remove the named file or directory Args : path ( str ) : The path to the file or directory to remove . force ( bool ) : Remove even if marked Read - Only . Default is False Returns : bool : True if successful , False if unsuccessful CLI Example : . . code - blo...
# This must be a recursive function in windows to properly deal with # Symlinks . The shutil . rmtree function will remove the contents of # the Symlink source in windows . path = os . path . expanduser ( path ) if not os . path . isabs ( path ) : raise SaltInvocationError ( 'File path must be absolute: {0}' . form...
def phase_parents_by_transmission ( g , window_size ) : """Phase parent genotypes from a trio or cross , given progeny genotypes already phased by Mendelian transmission . Parameters g : GenotypeArray Genotype array , with parents as first two columns and progeny as remaining columns , where progeny genot...
# setup check_type ( g , GenotypeArray ) check_dtype ( g . values , 'i1' ) check_ploidy ( g . ploidy , 2 ) if g . is_phased is None : raise ValueError ( 'genotype array must first have progeny phased by transmission' ) check_min_samples ( g . n_samples , 3 ) # run the phasing g . _values = memoryview_safe ( g . val...
def get_netconf_client_capabilities_output_session_session_id ( self , ** kwargs ) : """Auto Generated Code"""
config = ET . Element ( "config" ) get_netconf_client_capabilities = ET . Element ( "get_netconf_client_capabilities" ) config = get_netconf_client_capabilities output = ET . SubElement ( get_netconf_client_capabilities , "output" ) session = ET . SubElement ( output , "session" ) session_id = ET . SubElement ( session...
def vector_cross ( vector1 , vector2 ) : """Computes the cross - product of the input vectors . : param vector1 : input vector 1 : type vector1 : list , tuple : param vector2 : input vector 2 : type vector2 : list , tuple : return : result of the cross product : rtype : tuple"""
try : if vector1 is None or len ( vector1 ) == 0 or vector2 is None or len ( vector2 ) == 0 : raise ValueError ( "Input vectors cannot be empty" ) except TypeError as e : print ( "An error occurred: {}" . format ( e . args [ - 1 ] ) ) raise TypeError ( "Input must be a list or tuple" ) except Except...
def _add_chrome_arguments ( self , options ) : """Add Chrome arguments from properties file : param options : chrome options object"""
try : for pref , pref_value in dict ( self . config . items ( 'ChromeArguments' ) ) . items ( ) : pref_value = '={}' . format ( pref_value ) if pref_value else '' self . logger . debug ( "Added chrome argument: %s%s" , pref , pref_value ) options . add_argument ( '{}{}' . format ( pref , sel...
def getGeometry ( self , ra = None , dec = None ) : """Return an array of rectangles that represent the ' ra , dec ' corners of the FOV"""
import math , ephem ccds = [ ] if ra is None : ra = self . ra if dec is None : dec = self . dec self . ra = ephem . hours ( ra ) self . dec = ephem . degrees ( dec ) for geo in self . geometry [ self . camera ] : ycen = math . radians ( geo [ "dec" ] ) + dec xcen = math . radians ( geo [ "ra" ] ) / math...
def create_empty ( self , name = None , renderers = None , RootNetworkList = None , verbose = False ) : """Create a new , empty network . The new network may be created as part of an existing network collection or a new network collection . : param name ( string , optional ) : Enter the name of the new network ...
PARAMS = set_param ( [ "name" , "renderers" , "RootNetworkList" ] , [ name , renderers , RootNetworkList ] ) response = api ( url = self . __url + "/create empty" , PARAMS = PARAMS , method = "POST" , verbose = verbose ) return response
def choose_raw_dataset ( currently = "" ) : """Let the user choose a raw dataset . Return the absolute path ."""
folder = os . path . join ( get_project_root ( ) , "raw-datasets" ) files = [ os . path . join ( folder , name ) for name in os . listdir ( folder ) if name . endswith ( ".pickle" ) ] default = - 1 for i , filename in enumerate ( files ) : if os . path . basename ( currently ) == os . path . basename ( filename ) :...
def heirarchical_matched_filter_and_cluster ( self , segnum , template_norm , window ) : """Returns the complex snr timeseries , normalization of the complex snr , the correlation vector frequency series , the list of indices of the triggers , and the snr values at the trigger locations . Returns empty lists ...
from pycbc . fft . fftw_pruned import pruned_c2cifft , fft_transpose htilde = self . htilde stilde = self . segments [ segnum ] norm = ( 4.0 * stilde . delta_f ) / sqrt ( template_norm ) correlate ( htilde [ self . kmin_red : self . kmax_red ] , stilde [ self . kmin_red : self . kmax_red ] , self . corr_mem [ self . km...
def _create_connection ( self ) : """Create a connection . : return :"""
attempts = 0 while True : attempts += 1 if self . _stopped . is_set ( ) : break try : self . _connection = Connection ( self . hostname , self . username , self . password ) break except amqpstorm . AMQPError as why : LOGGER . warning ( why ) if self . max_retries...
def _mom ( self , key , left , right , cache ) : """Statistical moments . Example : > > > print ( numpy . around ( chaospy . Uniform ( ) . mom ( [ 0 , 1 , 2 , 3 ] ) , 4 ) ) [1 . 0.5 0.3333 0.25 ] > > > print ( numpy . around ( Mul ( chaospy . Uniform ( ) , 2 ) . mom ( [ 0 , 1 , 2 , 3 ] ) , 4 ) ) [1 . 1 . ...
if evaluation . get_dependencies ( left , right ) : raise evaluation . DependencyError ( "sum of dependent distributions not feasible: " "{} and {}" . format ( left , right ) ) if isinstance ( left , Dist ) : left = evaluation . evaluate_moment ( left , key , cache = cache ) else : left = ( numpy . array ( ...
def extend ( self , iterable ) : """Extend the right side of this GeventDeque by appending elements from the iterable argument ."""
self . _deque . extend ( iterable ) if len ( self . _deque ) > 0 : self . notEmpty . set ( )
def volumes ( self ) : """Gets the Volumes API client . Returns : Volumes :"""
if not self . __volumes : self . __volumes = Volumes ( self . __connection ) return self . __volumes
def from_tuples ( cls , tuples ) : """Creates a graph from an iterable of tuples describing edges like ( source , target , sign ) Parameters tuples : iterable [ ( str , str , int ) ) ] Tuples describing signed and directed edges Returns caspo . core . graph . Graph Created object instance"""
return cls ( it . imap ( lambda ( source , target , sign ) : ( source , target , { 'sign' : sign } ) , tuples ) )
def get_additions_status ( self , level ) : """Retrieve the current status of a certain Guest Additions run level . in level of type : class : ` AdditionsRunLevelType ` Status level to check return active of type bool Flag whether the status level has been reached or not raises : class : ` VBoxErrorNotSup...
if not isinstance ( level , AdditionsRunLevelType ) : raise TypeError ( "level can only be an instance of type AdditionsRunLevelType" ) active = self . _call ( "getAdditionsStatus" , in_p = [ level ] ) return active
def csv ( self , output ) : """Output data as excel - compatible CSV"""
import csv csvwriter = csv . writer ( self . outfile ) csvwriter . writerows ( output )
def _local_map ( match , loc : str = 'lr' ) -> list : """: param match : : param loc : str " l " or " r " or " lr " turns on / off left / right local area calculation : return : list list of the same size as the string + 2 it ' s the local map that counted { and } list can contain : None or int > = 0 ...
s = match . string map_ = [ None ] * ( len ( s ) + 2 ) if loc == 'l' or loc == 'lr' : balance = 0 for i in reversed ( range ( 0 , match . start ( ) ) ) : map_ [ i ] = balance c , prev = s [ i ] , ( s [ i - 1 ] if i > 0 else '' ) if ( c == '}' or c == '˲' ) and prev != '\\' : ...
def disconnect ( self , abandon_session = False ) : """Disconnects from the Responsys soap service Calls the service logout method and destroys the client ' s session information . Returns True on success , False otherwise ."""
self . connected = False if ( self . session and self . session . is_expired ) or abandon_session : try : self . logout ( ) except : log . warning ( 'Logout call to responsys failed, session may have not been terminated' , exc_info = True ) del self . session return True
def libvlc_audio_set_mute ( p_mi , status ) : '''Set mute status . @ param p _ mi : media player . @ param status : If status is true then mute , otherwise unmute @ warning This function does not always work . If there are no active audio playback stream , the mute status might not be available . If digital pas...
f = _Cfunctions . get ( 'libvlc_audio_set_mute' , None ) or _Cfunction ( 'libvlc_audio_set_mute' , ( ( 1 , ) , ( 1 , ) , ) , None , None , MediaPlayer , ctypes . c_int ) return f ( p_mi , status )
def backup ( ) : """zips into db _ backups _ dir and uploads to bucket _ name / s3 _ folder fab - f . / fabfile . py backup _ dbs"""
args = parser . parse_args ( ) s3_backup_dir ( args . datadir , args . aws_access_key_id , args . aws_secret_access_key , args . bucket_name , args . zip_backups_dir , args . backup_aging_time , args . s3_folder , args . project )
def filter_dict ( self , query , ** kwargs ) : '''Filter for : func : ` ~ ommongo . fields . mapping . DictField ` . * * Examples * * : ` ` query . filter _ dict ( { " User . Fullname " : " Oji " } ) ` `'''
for name , value in query . items ( ) : field = name . split ( "." ) [ 0 ] try : getattr ( self . type , field ) except AttributeError : raise FieldNotFoundException ( "Field not found %s" % ( field ) ) self . query_bypass ( query , raw_output = False , ** kwargs ) return self
def _get_on_reboot ( dom ) : '''Return ` on _ reboot ` setting from the named vm CLI Example : . . code - block : : bash salt ' * ' virt . get _ on _ reboot < domain >'''
node = ElementTree . fromstring ( get_xml ( dom ) ) . find ( 'on_reboot' ) return node . text if node is not None else ''
def read_image ( img_path , image_dims = None , mean = None ) : """Reads an image from file path or URL , optionally resizing to given image dimensions and subtracting mean . : param img _ path : path to file , or url to download : param image _ dims : image dimensions to resize to , or None : param mean : ...
import urllib filename = img_path . split ( "/" ) [ - 1 ] if img_path . startswith ( 'http' ) : urllib . urlretrieve ( img_path , filename ) img = cv2 . imread ( filename ) else : img = cv2 . imread ( img_path ) img = cv2 . cvtColor ( img , cv2 . COLOR_BGR2RGB ) if image_dims is not None : img = cv2 . r...
def summary ( self ) : """Gets summary ( e . g . residuals , deviance , pValues ) of model on training set . An exception is thrown if ` trainingSummary is None ` ."""
if self . hasSummary : return GeneralizedLinearRegressionTrainingSummary ( super ( GeneralizedLinearRegressionModel , self ) . summary ) else : raise RuntimeError ( "No training summary available for this %s" % self . __class__ . __name__ )
def count ( self , Class , set = None , recursive = True , ignore = True ) : """See : meth : ` AbstractElement . count `"""
if self . mode == Mode . MEMORY : s = 0 for t in self . data : s += sum ( 1 for e in t . select ( Class , recursive , True ) ) return s
def userKicked ( self , kickee , channel , kicker , message ) : """Called when I see another user get kicked ."""
self . dispatch ( 'population' , 'userKicked' , kickee , channel , kicker , message )
def is_jar ( path ) : """Check whether given file is a JAR file . JARs are ZIP files which usually include a manifest at the canonical location ' META - INF / MANIFEST . MF ' ."""
if os . path . isfile ( path ) and zipfile . is_zipfile ( path ) : try : with zipfile . ZipFile ( path ) as f : if "META-INF/MANIFEST.MF" in f . namelist ( ) : return True except ( IOError , zipfile . BadZipfile ) : pass return False
def status ( name , location = '\\' ) : r'''Determine the status of a task . Is it Running , Queued , Ready , etc . : param str name : The name of the task for which to return the status : param str location : A string value representing the location of the task . Default is ' \ \ ' which is the root for the ...
# Check for existing folder if name not in list_tasks ( location ) : return '{0} not found in {1}' . format ( name , location ) # connect to the task scheduler with salt . utils . winapi . Com ( ) : task_service = win32com . client . Dispatch ( "Schedule.Service" ) task_service . Connect ( ) # get the folder wh...
def read_http ( self , length ) : """Read Hypertext Transfer Protocol version 2. Structure of HTTP / 2 packet [ RFC 7230 ] : | Length ( 24 ) | | Type ( 8 ) | Flags ( 8 ) | | R | Stream Identifier ( 31 ) | | Frame Payload ( 0 . . . ) . . ."""
_plen = self . _read_binary ( 3 ) _type = self . _read_unpack ( 1 ) _flag = self . _read_binary ( 1 ) _stid = self . _read_binary ( 4 )
def K ( self ) : """Kernel matrix Returns K : array - like , shape = [ n _ samples , n _ samples ] kernel matrix defined as the adjacency matrix with ones down the diagonal"""
try : return self . _kernel except AttributeError : self . _kernel = self . _build_kernel ( ) return self . _kernel
def response_data_to_model_instance ( self , response_data ) : """Convert response data to a task type model . Args : response _ data ( dict ) : The data from the request ' s response . Returns : : class : ` saltant . models . base _ task _ type . BaseTaskType ` : A model instance representing the task ty...
# Coerce datetime strings into datetime objects response_data [ "datetime_created" ] = dateutil . parser . parse ( response_data [ "datetime_created" ] ) # Instantiate a model for the task instance return super ( BaseTaskTypeManager , self ) . response_data_to_model_instance ( response_data )
def _stump ( f , * args , ** kwargs ) : """Worker for the common actions of all stump methods , aka the secret sauce . * Keyword parameters * - log : : integer - Specifies a custom level of logging to pass to the active logger . - Default : INFO - print _ time : : bool - Include timestamp in message ...
global LOGGER def aux ( * xs , ** kws ) : f_kws = kws . copy ( ) f_kws . update ( dict ( zip ( inspect . getfullargspec ( f ) . args , xs ) ) ) level = kwargs . get ( 'log' , logging . INFO ) post = kwargs . get ( 'postfix_only' , False ) pre = kwargs . get ( 'prefix_only' , False ) print_return...
def density ( self , R , Rs , rho0 ) : """three dimenstional NFW profile : param R : radius of interest : type R : float / numpy array : param Rs : scale radius : type Rs : float : param rho0 : density normalization ( characteristic density ) : type rho0 : float : return : rho ( R ) density"""
return rho0 / ( R / Rs * ( 1 + R / Rs ) ** 2 )
def _construct_state_machines ( self ) : """: return : dict in format < state _ machine _ common _ name : instance _ of _ the _ state _ machine >"""
state_machines = dict ( ) for state_machine in [ StateMachineRecomputing ( self . logger , self ) , StateMachineContinuous ( self . logger , self ) , StateMachineDiscrete ( self . logger , self ) , StateMachineFreerun ( self . logger ) ] : state_machines [ state_machine . name ] = state_machine return state_machine...
def _get_best_word_indices_for_kth_hypotheses ( ks : np . ndarray , all_hyp_indices : np . ndarray ) -> np . ndarray : """Traverses the matrix of best hypotheses indices collected during beam search in reversed order by using the kth hypotheses index as a backpointer . Returns an array containing the indices in...
batch_size = ks . shape [ 0 ] num_steps = all_hyp_indices . shape [ 1 ] result = np . zeros ( ( batch_size , num_steps - 1 ) , dtype = all_hyp_indices . dtype ) # first index into the history of the desired hypotheses . pointer = all_hyp_indices [ ks , - 1 ] # for each column / step follow the pointer , starting from t...
def get_leads ( self , * guids , ** options ) : """Supports all the search parameters in the API as well as python underscored variants"""
original_options = options options = self . camelcase_search_options ( options . copy ( ) ) params = { } for i in xrange ( len ( guids ) ) : params [ 'guids[%s]' % i ] = guids [ i ] for k in options . keys ( ) : if k in SEARCH_OPTIONS : params [ k ] = options [ k ] del options [ k ] leads = self...
def _meanvalueattr ( self , v ) : """find new position of vertex v according to adjacency in prevlayer . position is given by the mean value of adjacent positions . experiments show that meanvalue heuristic performs better than median ."""
sug = self . layout if not self . prevlayer ( ) : return sug . grx [ v ] . bar bars = [ sug . grx [ x ] . bar for x in self . _neighbors ( v ) ] return sug . grx [ v ] . bar if len ( bars ) == 0 else float ( sum ( bars ) ) / len ( bars )
def leastsqbound ( func , x0 , bounds , args = ( ) , ** kw ) : """Constrained multivariant Levenberg - Marquard optimization Minimize the sum of squares of a given function using the Levenberg - Marquard algorithm . Contraints on parameters are inforced using variable transformations as described in the MINUI...
# check for full output if "full_output" in kw and kw [ "full_output" ] : full = True else : full = False # convert x0 to internal variables i0 = external2internal ( x0 , bounds ) # perfrom unconstrained optimization using internal variables r = leastsq ( err , i0 , args = ( bounds , func , args ) , ** kw ) # u...
def run ( self ) : "sets up the desired services and runs the requested action"
self . addServices ( ) self . catalogServers ( self . hendrix ) action = self . action fd = self . options [ 'fd' ] if action . startswith ( 'start' ) : chalk . blue ( self . _listening_message ( ) ) getattr ( self , action ) ( fd ) # annnnd run the reactor ! # try : self . reactor . run ( ) ...
def get_books_by_comment ( self , * args , ** kwargs ) : """Pass through to provider CommentBookSession . get _ books _ by _ comment"""
# Implemented from kitosid template for - # osid . resource . ResourceBinSession . get _ bins _ by _ resource catalogs = self . _get_provider_session ( 'comment_book_session' ) . get_books_by_comment ( * args , ** kwargs ) cat_list = [ ] for cat in catalogs : cat_list . append ( Book ( self . _provider_manager , ca...
def calc_gamma_components ( Data_ref , Data ) : """Calculates the components of Gamma ( Gamma0 and delta _ Gamma ) , assuming that the Data _ ref is uncooled data ( ideally at 3mbar for best fitting ) . It uses the fact that A _ prime = A / Gamma0 should be constant for a particular particle under changes in ...
A_prime = Data_ref . A / Data_ref . Gamma Gamma0 = Data . A / A_prime delta_Gamma = Data . Gamma - Gamma0 return Gamma0 , delta_Gamma
def configure_update ( self , ns , definition ) : """Register an update endpoint . The definition ' s func should be an update function , which must : - accept kwargs for the request and path data - return an updated item : param ns : the namespace : param definition : the endpoint definition"""
@ self . add_route ( ns . instance_path , Operation . Update , ns ) @ request ( definition . request_schema ) @ response ( definition . response_schema ) @ wraps ( definition . func ) def update ( ** path_data ) : headers = dict ( ) # NB : using partial here means that marshmallow will not validate required fie...
def delete_store_credit_payment_by_id ( cls , store_credit_payment_id , ** kwargs ) : """Delete StoreCreditPayment Delete an instance of StoreCreditPayment by its ID . This method makes a synchronous HTTP request by default . To make an asynchronous HTTP request , please pass async = True > > > thread = api...
kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'async' ) : return cls . _delete_store_credit_payment_by_id_with_http_info ( store_credit_payment_id , ** kwargs ) else : ( data ) = cls . _delete_store_credit_payment_by_id_with_http_info ( store_credit_payment_id , ** kwargs ) return data
def _make_info ( identifier , verbose ) : """Generates the script for displaying compile - time info ."""
module , method = identifier . split ( "." ) redirect = "| tee -a" if verbose else " >>" return """ info: echo -e "\\nCompile time:" > $(LOG) date >> $(LOG) echo "------------------------------------------------------"{2} $(LOG) echo " FORTPY" {2} $(LOG) echo " ...
def QueryValueEx ( key , value_name ) : """This calls the Windows QueryValueEx function in a Unicode safe way ."""
regqueryvalueex = advapi32 [ "RegQueryValueExW" ] regqueryvalueex . restype = ctypes . c_long regqueryvalueex . argtypes = [ ctypes . c_void_p , ctypes . c_wchar_p , LPDWORD , LPDWORD , LPBYTE , LPDWORD ] size = 256 data_type = ctypes . wintypes . DWORD ( ) while True : tmp_size = ctypes . wintypes . DWORD ( size )...
def pyc2py ( filename ) : """Find corresponding . py name given a . pyc or . pyo"""
if re . match ( ".*py[co]$" , filename ) : if PYTHON3 : return re . sub ( r'(.*)__pycache__/(.+)\.cpython-%s.py[co]$' % PYVER , '\\1\\2.py' , filename ) else : return filename [ : - 1 ] return filename