signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def pickFilepath ( self ) : """Prompts the user to select a filepath from the system based on the current filepath mode ."""
mode = self . filepathMode ( ) filepath = '' filepaths = [ ] curr_dir = nativestring ( self . _filepathEdit . text ( ) ) if ( not curr_dir ) : curr_dir = QDir . currentPath ( ) if mode == XFilepathEdit . Mode . SaveFile : filepath = QFileDialog . getSaveFileName ( self , self . windowTitle ( ) , curr_dir , self...
def job_stats_enhanced ( job_id ) : """Get full job and step stats for job _ id"""
stats_dict = { } with os . popen ( 'bjobs -o "jobid run_time cpu_used queue slots stat exit_code start_time estimated_start_time finish_time delimiter=\'|\'" -noheader ' + str ( job_id ) ) as f : try : line = f . readline ( ) cols = line . split ( '|' ) stats_dict [ 'job_id' ] = cols [ 0 ]...
def decompressBWT ( inputDir , outputDir , numProcs , logger ) : '''This is called for taking a BWT and decompressing it back out to it ' s original form . While unusual to do , it ' s included in this package for completion purposes . @ param inputDir - the directory of the compressed BWT we plan on decompress...
# load it , force it to be a compressed bwt also msbwt = MultiStringBWT . CompressedMSBWT ( ) msbwt . loadMsbwt ( inputDir , logger ) # make the output file outputFile = np . lib . format . open_memmap ( outputDir + '/msbwt.npy' , 'w+' , '<u1' , ( msbwt . getTotalSize ( ) , ) ) del outputFile worksize = 1000000 tups = ...
def print_tree ( self , ast_obj = None ) : """Convert AST object to tree view of BEL AST Returns : prints tree of BEL AST to STDOUT"""
if not ast_obj : ast_obj = self if hasattr ( self , "bel_subject" ) : print ( "Subject:" ) self . bel_subject . print_tree ( self . bel_subject , indent = 0 ) if hasattr ( self , "bel_relation" ) : print ( "Relation:" , self . bel_relation ) if hasattr ( self , "bel_object" ) : if self . bel_object ...
def send_message ( message : str , subject : str , recip : list , recip_email : list , html_message : str = None ) : """Sends message to specified value . Source : Himanshu Shankar ( https : / / github . com / iamhssingh ) Parameters message : str Message that is to be sent to user . subject : str Subje...
import smtplib from django . conf import settings from django . core . mail import send_mail from sendsms import api sent = { 'success' : False , 'message' : None } if not getattr ( settings , 'EMAIL_HOST' , None ) : raise ValueError ( 'EMAIL_HOST must be defined in django ' 'setting for sending mail.' ) if not get...
def add_items_to_tree_iter ( self , input_dict , treeiter , parent_dict_path = None ) : """Adds all values of the input dict to self . tree _ store : param input _ dict : The input dictionary holds all values , which are going to be added . : param treeiter : The pointer inside the tree store to add the input d...
if parent_dict_path is None : parent_dict_path = [ ] self . get_view_selection ( ) for key , value in sorted ( input_dict . items ( ) ) : element_dict_path = copy . copy ( parent_dict_path ) + [ key ] if isinstance ( value , dict ) : new_iter = self . tree_store . append ( treeiter , [ key , "" , Tr...
def identifier_list_cmp ( a , b ) : """Compare two identifier list ( pre - release / build components ) . The rule is : - Identifiers are paired between lists - They are compared from left to right - If all first identifiers match , the longest list is greater . > > > identifier _ list _ cmp ( [ ' 1 ' , '...
identifier_pairs = zip ( a , b ) for id_a , id_b in identifier_pairs : cmp_res = identifier_cmp ( id_a , id_b ) if cmp_res != 0 : return cmp_res # alpha1.3 < alpha1.3.1 return base_cmp ( len ( a ) , len ( b ) )
def slice ( self , start , stop = None , axis = 0 ) : """Restrict histogram to bins whose data values ( not bin numbers ) along axis are between start and stop ( both inclusive ) . Returns d dimensional histogram ."""
if stop is None : # Make a 1 = bin slice stop = start axis = self . get_axis_number ( axis ) start_bin = max ( 0 , self . get_axis_bin_index ( start , axis ) ) stop_bin = min ( len ( self . bin_centers ( axis ) ) - 1 , # TODO : test off by one ! self . get_axis_bin_index ( stop , axis ) ) new_bin_edges = self . bin...
def get_packet_type ( cls , type_ ) : """Override method for the Length / Type field ( self . ethertype ) . The Length / Type field means Length or Type interpretation , same as ethernet IEEE802.3. If the value of Length / Type field is less than or equal to 1500 decimal ( 05DC hexadecimal ) , it means Leng...
if type_ <= ether . ETH_TYPE_IEEE802_3 : type_ = ether . ETH_TYPE_IEEE802_3 return cls . _TYPES . get ( type_ )
def render_pulp_tag ( self ) : """Configure the pulp _ tag plugin ."""
if not self . dj . dock_json_has_plugin_conf ( 'postbuild_plugins' , 'pulp_tag' ) : return pulp_registry = self . spec . pulp_registry . value if pulp_registry : self . dj . dock_json_set_arg ( 'postbuild_plugins' , 'pulp_tag' , 'pulp_registry_name' , pulp_registry ) # Verify we have either a secret or user...
def get_config ( config_spec ) : """Like get _ json _ config but does not parse result as JSON"""
config_file = None if config_spec . startswith ( "http" ) : # URL : fetch it config_file = urllib . urlopen ( config_spec ) else : # string : open file with that name config_file = open ( config_spec ) config = json . load ( config_file ) # Close any open files try : config_file . close ( ) except : pas...
def p_const_vector_elem_list ( p ) : """const _ number _ list : expr"""
if p [ 1 ] is None : return if not is_static ( p [ 1 ] ) : if isinstance ( p [ 1 ] , symbols . UNARY ) : tmp = make_constexpr ( p . lineno ( 1 ) , p [ 1 ] ) else : api . errmsg . syntax_error_not_constant ( p . lexer . lineno ) p [ 0 ] = None return else : tmp = p [ 1 ] p...
def createSessionFile ( self , file , verbose = None ) : """Saves the current session to a file . If successful , the session file location will be returned . : param file : Session file location as an absolute path : param verbose : print more : returns : 200 : successful operation"""
PARAMS = set_param ( [ 'file' ] , [ file ] ) response = api ( url = self . ___url + 'session' , PARAMS = PARAMS , method = "POST" , verbose = verbose ) return response
def _get_objects ( self , o_type ) : """Get an object list from the scheduler Returns None if the required object type ( ` o _ type ` ) is not known or an exception is raised . Else returns the objects list : param o _ type : searched object type : type o _ type : str : return : objects list : rtype : a...
if o_type not in [ t for t in self . app . sched . pushed_conf . types_creations ] : return None try : _ , _ , strclss , _ , _ = self . app . sched . pushed_conf . types_creations [ o_type ] o_list = getattr ( self . app . sched , strclss ) except Exception : # pylint : disable = broad - except return N...
def _vpc_config ( self ) : """Get VPC config ."""
if self . vpc_enabled : subnets = get_subnets ( env = self . env , region = self . region , purpose = 'internal' ) [ 'subnet_ids' ] [ self . region ] security_groups = self . _get_sg_ids ( ) vpc_config = { 'SubnetIds' : subnets , 'SecurityGroupIds' : security_groups } else : vpc_config = { 'SubnetIds' :...
def _get_dep_to_dot_name_mapping ( dependencies ) : """Creates mapping between Dependency classes and names used in DOT graph"""
dot_name_to_deps = { } for dep in dependencies : dot_name = dep . name if dot_name not in dot_name_to_deps : dot_name_to_deps [ dot_name ] = [ dep ] else : dot_name_to_deps [ dot_name ] . append ( dep ) dep_to_dot_name = { } for dot_name , deps in dot_name_to_deps . items ( ) : if len ( ...
def get_colormap ( cls , names = [ ] , N = 10 , * args , ** kwargs ) : """Open a : class : ` ColormapDialog ` and get a colormap Parameters % ( ColormapModel . parameters ) s Other Parameters ` ` * args , * * kwargs ` ` Anything else that is passed to the ColormapDialog Returns str or matplotlib . col...
names = safe_list ( names ) obj = cls ( names , N , * args , ** kwargs ) vbox = obj . layout ( ) buttons = QDialogButtonBox ( QDialogButtonBox . Ok | QDialogButtonBox . Cancel , parent = obj ) buttons . button ( QDialogButtonBox . Ok ) . setEnabled ( False ) vbox . addWidget ( buttons ) buttons . accepted . connect ( o...
def transition ( self , duration , brightness = None ) : """Transition wrapper . Short - circuit transition if necessary . : param duration : Duration of transition . : param brightness : Transition to this brightness ."""
if duration == 0 : if brightness is not None : self . brightness = brightness return if brightness != self . brightness : self . _transition ( duration , brightness )
def start_cluster_server ( ctx , num_gpus = 1 , rdma = False ) : """Function that wraps the creation of TensorFlow ` ` tf . train . Server ` ` for a node in a distributed TensorFlow cluster . This is intended to be invoked from within the TF ` ` map _ fun ` ` , replacing explicit code to instantiate ` ` tf . trai...
import tensorflow as tf from . import gpu_info logging . info ( "{0}: ======== {1}:{2} ========" . format ( ctx . worker_num , ctx . job_name , ctx . task_index ) ) cluster_spec = ctx . cluster_spec logging . info ( "{0}: Cluster spec: {1}" . format ( ctx . worker_num , cluster_spec ) ) if tf . test . is_built_with_cud...
def table_to_source_list ( table , src_type = OutputSource ) : """Convert a table of data into a list of sources . A single table must have consistent source types given by src _ type . src _ type should be one of : class : ` AegeanTools . models . OutputSource ` , : class : ` AegeanTools . models . SimpleSourc...
source_list = [ ] if table is None : return source_list for row in table : # Initialise our object src = src_type ( ) # look for the columns required by our source object for param in src_type . names : if param in table . colnames : # copy the value to our object val = row [ param ]...
def Chemistry ( self ) : '''Get cells chemistry'''
length = self . bus . read_byte_data ( self . address , 0x79 ) chem = [ ] for n in range ( length ) : chem . append ( self . bus . read_byte_data ( self . address , 0x7A + n ) ) return chem
def _traverse ( element , condition = None ) : """Traversal API intended for debugging ."""
if condition is None or condition ( element ) : yield element if isinstance ( element , DictElement ) : for child in element . values ( ) : for _ in BaseElement . _traverse ( child , condition ) : yield _ elif isinstance ( element , ListElement ) : for child in element : for _ in...
def main ( args ) : """API with args object containing configuration parameters"""
global logging , log args = parse_args ( args ) logging . basicConfig ( format = LOG_FORMAT , level = logging . DEBUG if args . verbose else logging . INFO , stream = sys . stdout ) df = cat_tweets ( path = args . path , verbosity = args . verbose + 1 , numtweets = args . numtweets , ignore_suspicious = False ) log . i...
def set_column_width ( self , n = 0 , width = 120 ) : """Sets the n ' th column width in pixels ."""
self . _widget . setColumnWidth ( n , width ) return self
def filter_headers ( data ) : """只设置host content - type 还有x开头的头部 . : param data ( dict ) : 所有的头部信息 . : return ( dict ) : 计算进签名的头部 ."""
headers = { } for i in data : if i == 'Content-Type' or i == 'Host' or i [ 0 ] == 'x' or i [ 0 ] == 'X' : headers [ i ] = data [ i ] return headers
def parse_connection_string ( self , connection ) : """Parse string as returned by the ` ` connected _ users _ info ` ` or ` ` user _ sessions _ info ` ` API calls . > > > EjabberdBackendBase ( ) . parse _ connection _ string ( ' c2s _ tls ' ) (0 , True , False ) > > > EjabberdBackendBase ( ) . parse _ connec...
# TODO : Websockets , HTTP Polling if connection == 'c2s_tls' : return CONNECTION_XMPP , True , False elif connection == 'c2s_compressed_tls' : return CONNECTION_XMPP , True , True elif connection == 'http_bind' : return CONNECTION_HTTP_BINDING , None , None elif connection == 'c2s' : return CONNECTION_...
def setLineEdit ( self , lineEdit ) : """Sets the line edit instance for this label . : param lineEdit | < XLineEdit >"""
self . _lineEdit = lineEdit if lineEdit : lineEdit . setFont ( self . font ( ) ) lineEdit . installEventFilter ( self ) lineEdit . resize ( self . size ( ) ) lineEdit . hide ( )
def del_node ( self , name ) : '''API : del _ node ( self , name ) Description : Removes node from Graph . Input : name : Name of the node . Pre : Graph should contain a node with this name . Post : self . neighbors , self . nodes and self . in _ neighbors are updated .'''
if name not in self . neighbors : raise Exception ( 'Node %s does not exist!' % str ( name ) ) for n in self . neighbors [ name ] : del self . edge_attr [ ( name , n ) ] if self . graph_type == UNDIRECTED_GRAPH : self . neighbors [ n ] . remove ( name ) else : self . in_neighbors [ n ] ....
def search_registered_query_deleted_entities ( self , ** kwargs ) : # noqa : E501 """Search over a customer ' s deleted derived metric definitions # noqa : E501 # noqa : E501 This method makes a synchronous HTTP request by default . To make an asynchronous HTTP request , please pass async _ req = True > > >...
kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'async_req' ) : return self . search_registered_query_deleted_entities_with_http_info ( ** kwargs ) # noqa : E501 else : ( data ) = self . search_registered_query_deleted_entities_with_http_info ( ** kwargs ) # noqa : E501 return data
def is_archived ( self , experiment , ignore_missing = True ) : """Convenience function to determine whether the given experiment has been archived already Parameters experiment : str The experiment to check Returns str or None The path to the archive if it has been archived , otherwise None"""
if ignore_missing : if isinstance ( self . config . experiments . get ( experiment , True ) , Archive ) : return self . config . experiments . get ( experiment , True ) else : if isinstance ( self . config . experiments [ experiment ] , Archive ) : return self . config . experiments [ experiment...
def _read_console_output ( self , ws , out ) : """Read Websocket and forward it to the telnet : param ws : Websocket connection : param out : Output stream"""
while True : msg = yield from ws . receive ( ) if msg . tp == aiohttp . WSMsgType . text : out . feed_data ( msg . data . encode ( ) ) elif msg . tp == aiohttp . WSMsgType . BINARY : out . feed_data ( msg . data ) elif msg . tp == aiohttp . WSMsgType . ERROR : log . critical ( "D...
def serialize ( self , method = "urlencoded" , lev = 0 , ** kwargs ) : """Convert this instance to another representation . Which representation is given by the choice of serialization method . : param method : A serialization method . Presently ' urlencoded ' , ' json ' , ' jwt ' and ' dict ' is supported . ...
return getattr ( self , "to_%s" % method ) ( lev = lev , ** kwargs )
def send_reset_password_email ( person ) : """Sends an email to user allowing them to set their password ."""
uid = urlsafe_base64_encode ( force_bytes ( person . pk ) ) . decode ( "ascii" ) token = default_token_generator . make_token ( person ) url = '%s/persons/reset/%s/%s/' % ( settings . REGISTRATION_BASE_URL , uid , token ) context = CONTEXT . copy ( ) context . update ( { 'url' : url , 'receiver' : person , } ) to_email...
def value ( self ) : """Value of a reference property . You can set the reference with a Part , Part id or None value . Ensure that the model of the provided part , matches the configured model : return : a : class : ` Part ` or None : raises APIError : When unable to find the associated : class : ` Part ` ...
if not self . _value : return None if not self . _cached_values and isinstance ( self . _value , ( list , tuple ) ) : ids = [ v . get ( 'id' ) for v in self . _value ] self . _cached_values = list ( self . _client . parts ( id__in = ',' . join ( ids ) , category = None ) ) return self . _cached_values
def get_url_reports ( self , resources ) : """Retrieves a scan report on a given URL . Args : resources : list of URLs . Returns : A dict with the URL as key and the VT report as value ."""
api_name = 'virustotal-url-reports' ( all_responses , resources ) = self . _bulk_cache_lookup ( api_name , resources ) resource_chunks = self . _prepare_resource_chunks ( resources , '\n' ) response_chunks = self . _request_reports ( "resource" , resource_chunks , 'url/report' ) self . _extract_response_chunks ( all_re...
def argument ( * args , ** kwargs ) : """Decorator to define an argparse option or argument . The arguments to this decorator are the same as the ` ArgumentParser . add _ argument < https : / / docs . python . org / 3 / library / argparse . html # the - add - argument - method > ` _ method ."""
def decorator ( f ) : if not hasattr ( f , '_arguments' ) : f . _arguments = [ ] if not hasattr ( f , '_argnames' ) : f . _argnames = [ ] f . _arguments . append ( ( args , kwargs ) ) f . _argnames . append ( _get_dest ( * args , ** kwargs ) ) return f return decorator
def trim ( self : 'Variable' , lower = None , upper = None ) -> None : """Trim the value ( s ) of a | Variable | instance . Usually , users do not need to apply function | trim | directly . Instead , some | Variable | subclasses implement their own ` trim ` methods relying on function | trim | . Model develop...
if hydpy . pub . options . trimvariables : if lower is None : lower = self . SPAN [ 0 ] if upper is None : upper = self . SPAN [ 1 ] type_ = getattr ( self , 'TYPE' , float ) if type_ is float : if self . NDIM == 0 : _trim_float_0d ( self , lower , upper ) els...
def clear_texts ( self ) : """stub"""
if self . get_texts_metadata ( ) . is_read_only ( ) : raise NoAccess ( ) self . my_osid_object_form . _my_map [ 'texts' ] = self . _texts_metadata [ 'default_object_values' ] [ 0 ]
def in_domain ( self , points ) : """Returns ` ` True ` ` if all of the given points are in the domain , ` ` False ` ` otherwise . : param np . ndarray points : An ` np . ndarray ` of type ` self . dtype ` . : rtype : ` bool `"""
return all ( [ domain . in_domain ( array ) for domain , array in zip ( self . _domains , separate_struct_array ( points , self . _dtypes ) ) ] )
def enable_global_auto_override_decorator ( flag = True , retrospective = True ) : """Enables or disables global auto _ override mode via decorators . See flag global _ auto _ override _ decorator . In contrast to setting the flag directly , this function provides a retrospective option . If retrospective is ...
global global_auto_override_decorator global_auto_override_decorator = flag if import_hook_enabled : _install_import_hook ( ) if global_auto_override_decorator and retrospective : _catch_up_global_auto_override_decorator ( ) return global_auto_override_decorator
def ccmod_ystep ( ) : """Do the Y step of the ccmod stage . There are no parameters or return values because all inputs and outputs are from and to global variables ."""
mAXU = np . mean ( mp_D_X + mp_D_U , axis = 0 ) mp_D_Y [ : ] = mp_dprox ( mAXU )
def _paragraph ( self , sentences ) : """Generate a paragraph"""
paragraph = [ ] for i in range ( sentences ) : sentence = self . _sentence ( random . randint ( 5 , 16 ) ) paragraph . append ( sentence ) return ' ' . join ( paragraph )
def get_core ( self ) : """Get an unsatisfiable core if the formula was previously unsatisfied ."""
if self . maplesat and self . status == False : return pysolvers . maplechrono_core ( self . maplesat )
def build_select ( query_obj ) : """Given a Query obj , return the corresponding sql"""
return build_select_query ( query_obj . source , query_obj . fields , query_obj . filter , skip = query_obj . skip , limit = query_obj . limit , sort = query_obj . sort , distinct = query_obj . distinct )
def show_domain ( self , domain_id ) : """This method returns the specified domain . Required parameters domain _ id : Integer or Domain Name ( e . g . domain . com ) , specifies the domain to display ."""
json = self . request ( '/domains/%s' % domain_id , method = 'GET' ) status = json . get ( 'status' ) if status == 'OK' : domain_json = json . get ( 'domain' ) domain = Domain . from_json ( domain_json ) return domain else : message = json . get ( 'message' ) raise DOPException ( '[%s]: %s' % ( stat...
def read_price_data ( files , name_func = None ) : """Convenience function for reading in pricing data from csv files Parameters files : list List of strings refering to csv files to read data in from , first column should be dates name _ func : func A function to apply to the file strings to infer the ...
if name_func is None : def name_func ( x ) : return os . path . split ( x ) [ 1 ] . split ( "." ) [ 0 ] dfs = [ ] for f in files : name = name_func ( f ) df = pd . read_csv ( f , index_col = 0 , parse_dates = True ) df . sort_index ( inplace = True ) df . index = pd . MultiIndex . from_produ...
def get_prefix_dir ( archive ) : """Often , all files are in a single directory . If so , they ' ll all have the same prefix . Determine any such prefix . archive is a ZipFile"""
names = archive . namelist ( ) shortest_name = sorted ( names , key = len ) [ 0 ] candidate_prefixes = [ shortest_name [ : length ] for length in range ( len ( shortest_name ) , - 1 , - 1 ) ] for prefix in candidate_prefixes : if all ( name . startswith ( prefix ) for name in names ) : return prefix return ...
def mapper_from_prior_arguments ( self , arguments ) : """Creates a new model mapper from a dictionary mapping _ matrix existing priors to new priors . Parameters arguments : { Prior : Prior } A dictionary mapping _ matrix priors to priors Returns model _ mapper : ModelMapper A new model mapper with upd...
mapper = copy . deepcopy ( self ) for prior_model_tuple in self . prior_model_tuples : setattr ( mapper , prior_model_tuple . name , prior_model_tuple . prior_model . gaussian_prior_model_for_arguments ( arguments ) ) return mapper
def from_conll ( this_class , stream ) : """Construct a Sentence . stream is an iterable over strings where each string is a line in CoNLL - X format . If there are multiple sentences in this stream , we only return the first one ."""
stream = iter ( stream ) sentence = this_class ( ) for line in stream : line = line . strip ( ) if line : sentence . append ( Token . from_conll ( line ) ) elif sentence : return sentence return sentence
def lu_solve ( LU , b ) : r"""Solve for LU decomposition . Solve the linear equations : math : ` \ mathrm A \ mathbf x = \ mathbf b ` , given the LU factorization of : math : ` \ mathrm A ` . Args : LU ( array _ like ) : LU decomposition . b ( array _ like ) : Right - hand side . Returns : : class : `...
from scipy . linalg import lu_solve as sp_lu_solve LU = ( asarray ( LU [ 0 ] , float ) , asarray ( LU [ 1 ] , float ) ) b = asarray ( b , float ) return sp_lu_solve ( LU , b , check_finite = False )
def sort_by ( self , * ids ) : """Update files order . : param ids : List of ids specifying the final status of the list ."""
# Support sorting by file _ ids or keys . files = { str ( f_ . file_id ) : f_ . key for f_ in self } # self . record [ ' _ files ' ] = [ { ' key ' : files . get ( id _ , id _ ) } for id _ in ids ] self . filesmap = OrderedDict ( [ ( files . get ( id_ , id_ ) , self [ files . get ( id_ , id_ ) ] . dumps ( ) ) for id_ in...
def l_constraint ( model , name , constraints , * args ) : """A replacement for pyomo ' s Constraint that quickly builds linear constraints . Instead of model . name = Constraint ( index1 , index2 , . . . , rule = f ) call instead l _ constraint ( model , name , constraints , index1 , index2 , . . . ) w...
setattr ( model , name , Constraint ( * args , noruleinit = True ) ) v = getattr ( model , name ) for i in v . _index : c = constraints [ i ] if type ( c ) is LConstraint : variables = c . lhs . variables + [ ( - item [ 0 ] , item [ 1 ] ) for item in c . rhs . variables ] sense = c . sense ...
def stream ( self , device_sid = values . unset , limit = None , page_size = None ) : """Streams KeyInstance records from the API as a generator stream . This operation lazily loads records as efficiently as possible until the limit is reached . The results are returned as a generator , so this operation is m...
limits = self . _version . read_limits ( limit , page_size ) page = self . page ( device_sid = device_sid , page_size = limits [ 'page_size' ] , ) return self . _version . stream ( page , limits [ 'limit' ] , limits [ 'page_limit' ] )
def dot_product_single_head ( q , k , v , gates_q , gates_k , bi ) : """Perform a dot product attention on a single sequence on a single head . This function dispatch the q , k , v and loop over the buckets to compute the attention dot product on each subsequences . Args : q ( tf . Tensor ) : [ length _ q ,...
nb_buckets = gates_q . get_shape ( ) . as_list ( ) [ - 1 ] q_dispatcher = expert_utils . SparseDispatcher ( nb_buckets , gates_q ) k_dispatcher = expert_utils . SparseDispatcher ( nb_buckets , gates_k ) def eventually_dispatch ( dispatcher , value ) : if value is not None : return dispatcher . dispatch ( va...
def compute_n_splits ( cv , X , y = None , groups = None ) : """Return the number of splits . Parameters cv : BaseCrossValidator X , y , groups : array _ like , dask object , or None Returns n _ splits : int"""
if not any ( is_dask_collection ( i ) for i in ( X , y , groups ) ) : return cv . get_n_splits ( X , y , groups ) if isinstance ( cv , ( _BaseKFold , BaseShuffleSplit ) ) : return cv . n_splits elif isinstance ( cv , PredefinedSplit ) : return len ( cv . unique_folds ) elif isinstance ( cv , _CVIterableWrap...
def local_open ( url ) : """Read a local path , with special support for directories"""
scheme , server , path , param , query , frag = urllib . parse . urlparse ( url ) filename = urllib . request . url2pathname ( path ) if os . path . isfile ( filename ) : return urllib . request . urlopen ( url ) elif path . endswith ( '/' ) and os . path . isdir ( filename ) : files = [ ] for f in os . lis...
def bresenham_circle_octant ( radius ) : """Uses Bresenham ' s algorithm to draw a single octant of a circle with thickness 1, centered on the origin and with the given radius . : param radius : The radius of the circle to draw : return : A list of integer coordinates representing pixels . Starts at ( radiu...
x , y = radius , 0 r2 = radius * radius coords = [ ] while x >= y : coords . append ( ( x , y ) ) y += 1 if abs ( ( x - 1 ) * ( x - 1 ) + y * y - r2 ) < abs ( x * x + y * y - r2 ) : x -= 1 # add a point on the line x = y at the end if it ' s not already there . if coords [ - 1 ] [ 0 ] != coords [ - ...
def CopyToDateTimeString ( self ) : """Copies the FILETIME timestamp to a date and time string . Returns : str : date and time value formatted as : " YYYY - MM - DD hh : mm : ss . # # # # # " or None if the timestamp is missing or invalid ."""
if ( self . _timestamp is None or self . _timestamp < 0 or self . _timestamp > self . _UINT64_MAX ) : return None timestamp , remainder = divmod ( self . _timestamp , self . _100NS_PER_SECOND ) number_of_days , hours , minutes , seconds = self . _GetTimeValues ( timestamp ) year , month , day_of_month = self . _Get...
def reverse ( self , query , exactly_one = DEFAULT_SENTINEL , timeout = DEFAULT_SENTINEL , kind = None , ) : """Return an address by location point . : param query : The coordinates for which you wish to obtain the closest human - readable addresses . : type query : : class : ` geopy . point . Point ` , list ...
if exactly_one is DEFAULT_SENTINEL : warnings . warn ( '%s.reverse: default value for `exactly_one` ' 'argument will become True in geopy 2.0. ' 'Specify `exactly_one=False` as the argument ' 'explicitly to get rid of this warning.' % type ( self ) . __name__ , DeprecationWarning , stacklevel = 2 ) exactly_one ...
def get_locations_list ( self , lower_bound = 0 , upper_bound = None ) : """Return the internal location list . Args : lower _ bound : upper _ bound : Returns :"""
real_upper_bound = upper_bound if upper_bound is None : real_upper_bound = self . nbr_of_sub_locations ( ) try : return self . _locations_list [ lower_bound : real_upper_bound ] except : return list ( )
def want_host_notification ( self , notifways , timeperiods , timestamp , state , n_type , business_impact , cmd = None ) : """Check if notification options match the state of the host : param timestamp : time we want to notify the contact ( usually now ) : type timestamp : int : param state : host or service...
if not self . host_notifications_enabled : return False # If we are in downtime , we do not want notification for downtime in self . downtimes : if downtime . is_in_effect : self . in_scheduled_downtime = True return False self . in_scheduled_downtime = False # Now it ' s all for sub notificatio...
def pitch ( self ) : """Calculates the Pitch of the Quaternion ."""
x , y , z , w = self . x , self . y , self . z , self . w return math . atan2 ( 2 * x * w - 2 * y * z , 1 - 2 * x * x - 2 * z * z )
def add_codes ( err_cls ) : """Add error codes to string messages via class attribute names ."""
class ErrorsWithCodes ( object ) : def __getattribute__ ( self , code ) : msg = getattr ( err_cls , code ) return '[{code}] {msg}' . format ( code = code , msg = msg ) return ErrorsWithCodes ( )
def lines ( self ) : """List of file lines ."""
if self . _lines is None : with io . open ( self . path , 'r' , encoding = 'utf-8' ) as fh : self . _lines = fh . read ( ) . split ( '\n' ) return self . _lines
def tcache ( parser , token ) : """This will cache the contents of a template fragment for a given amount of time with support tags . Usage : : { % tcache [ expire _ time ] [ fragment _ name ] [ tags = ' tag1 , tag2 ' ] % } . . some expensive processing . . { % endtcache % } This tag also supports varyi...
nodelist = parser . parse ( ( 'endtcache' , ) ) parser . delete_first_token ( ) tokens = token . split_contents ( ) if len ( tokens ) < 3 : raise template . TemplateSyntaxError ( "'%r' tag requires at least 2 arguments." % tokens [ 0 ] ) tags = None if len ( tokens ) > 3 and 'tags=' in tokens [ - 1 ] : tags = p...
def _bounds ( component , glyph_set ) : """Return the ( xmin , ymin ) of the bounds of ` component ` ."""
if hasattr ( component , "bounds" ) : # e . g . defcon return component . bounds [ : 2 ] elif hasattr ( component , "draw" ) : # e . g . ufoLib2 pen = fontTools . pens . boundsPen . BoundsPen ( glyphSet = glyph_set ) component . draw ( pen ) return pen . bounds [ : 2 ] else : raise ValueError ( "Don...
def calc_outuh_quh_v1 ( self ) : """Calculate the unit hydrograph output ( convolution ) . Required derived parameters : | UH | Required flux sequences : | Q0 | | Q1 | | InUH | Updated log sequence : | QUH | Calculated flux sequence : | OutUH | Examples : Prepare a unit hydrograph with only ...
der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess log = self . sequences . logs . fastaccess flu . outuh = der . uh [ 0 ] * flu . inuh + log . quh [ 0 ] for jdx in range ( 1 , len ( der . uh ) ) : log . quh [ jdx - 1 ] = der . uh [ jdx ] * flu . inuh + log . quh [ jdx ]
def start_kex ( self ) : """Start the GSS - API / SSPI Authenticated Diffie - Hellman Key Exchange ."""
self . _generate_x ( ) if self . transport . server_mode : # compute f = g ^ x mod p , but don ' t send it yet self . f = pow ( self . G , self . x , self . P ) self . transport . _expect_packet ( MSG_KEXGSS_INIT ) return # compute e = g ^ x mod p ( where g = 2 ) , and send it self . e = pow ( self . G , se...
def getWindowByTitle ( self , wildcard , order = 0 ) : """Returns a handle for the first window that matches the provided " wildcard " regex"""
EnumWindowsProc = ctypes . WINFUNCTYPE ( ctypes . c_bool , ctypes . POINTER ( ctypes . c_int ) , ctypes . py_object ) def callback ( hwnd , context ) : if ctypes . windll . user32 . IsWindowVisible ( hwnd ) : length = ctypes . windll . user32 . GetWindowTextLengthW ( hwnd ) buff = ctypes . create_un...
def HuntIDToInt ( hunt_id ) : """Convert hunt id string to an integer ."""
# TODO ( user ) : This code is only needed for a brief period of time when we # allow running new rel - db flows with old aff4 - based hunts . In this scenario # parent _ hunt _ id is effectively not used , but it has to be an # integer . Stripping " H : " from hunt ids then makes the rel - db happy . Remove # this cod...
def _find_and_replace ( self , date_string , captures ) : """: warning : when multiple tz matches exist the last sorted capture will trump : param date _ string : : return : date _ string , tz _ string"""
# add timezones to replace cloned_replacements = copy . copy ( REPLACEMENTS ) # don ' t mutate for tz_string in captures . get ( "timezones" , [ ] ) : cloned_replacements . update ( { tz_string : " " } ) date_string = date_string . lower ( ) for key , replacement in cloned_replacements . items ( ) : # we really wan...
def stop_workers ( self , clean ) : """Stop workers and deferred events ."""
with executor_lock : self . executor . shutdown ( clean ) del self . executor with self . worker_lock : if clean : self . pool . close ( ) else : self . pool . terminate ( ) self . pool . join ( ) del self . pool for x in self . events . values ( ) : x . event . cance...
def Minus ( self , other ) : """Returns a new point which is the pointwise subtraction of other from self ."""
return Point ( self . x - other . x , self . y - other . y , self . z - other . z )
def _handle_429 ( self , data ) : """Handle Lain being helpful"""
ex = IOError ( "Too fast" , data ) self . conn . reraise ( ex )
def _assemble_and_send_request ( self ) : """Fires off the Fedex request . @ warning : NEVER CALL THIS METHOD DIRECTLY . CALL send _ request ( ) , WHICH RESIDES ON FedexBaseService AND IS INHERITED ."""
# We get an exception like this when specifying an IntegratorId : # suds . TypeNotFound : Type not found : ' IntegratorId ' # Setting it to None does not seem to appease it . del self . ClientDetail . IntegratorId self . logger . debug ( self . WebAuthenticationDetail ) self . logger . debug ( self . ClientDetail ) sel...
def get_all_paths_from ( self , start , seen = None ) : '''Return a list of all paths to all nodes from a given start node'''
if seen is None : seen = frozenset ( ) results = [ ( 0 , ( start , ) ) ] if start in seen or start not in self . edges : return results seen = seen | frozenset ( ( start , ) ) for node , edge_weight in self . edges [ start ] . items ( ) : for subpath_weight , subpath in self . get_all_paths_from ( node , se...
def create_cache ( name ) : """Create a cache by name . Defaults to ` NaiveCache `"""
caches = { subclass . name ( ) : subclass for subclass in Cache . __subclasses__ ( ) } return caches . get ( name , NaiveCache ) ( )
def set_lim ( min , max , name ) : """Set the domain bounds of the scale associated with the provided key . Parameters name : hashable Any variable that can be used as a key for a dictionary Raises KeyError When no context figure is associated with the provided key ."""
scale = _context [ 'scales' ] [ _get_attribute_dimension ( name ) ] scale . min = min scale . max = max return scale
def feedback ( self ) : """Access the feedback : returns : twilio . rest . api . v2010 . account . message . feedback . FeedbackList : rtype : twilio . rest . api . v2010 . account . message . feedback . FeedbackList"""
if self . _feedback is None : self . _feedback = FeedbackList ( self . _version , account_sid = self . _solution [ 'account_sid' ] , message_sid = self . _solution [ 'sid' ] , ) return self . _feedback
def stop_trial ( self , trial_id ) : """Requests to stop trial by trial _ id ."""
response = requests . put ( urljoin ( self . _path , "trials/{}" . format ( trial_id ) ) ) return self . _deserialize ( response )
def get_input ( self ) : """Loads web input , initialise default values and check / sanitise some inputs from users"""
user_input = web . input ( user = [ ] , task = [ ] , aggregation = [ ] , org_tags = [ ] , grade_min = '' , grade_max = '' , sort_by = "submitted_on" , order = '0' , # "0 " for pymongo . DESCENDING , anything else for pymongo . ASCENDING limit = '' , filter_tags = [ ] , filter_tags_presence = [ ] , date_after = '' , dat...
def start ( ** kwargs ) : '''Start KodeDrive daemon .'''
output , err = cli_syncthing_adapter . start ( ** kwargs ) click . echo ( "%s" % output , err = err )
def check_rules_dict ( rules ) : """Verify the ` rules ` that classes may use for the ` _ rules ` or ` _ binary _ rules ` class attribute . Specifically , ` rules ` must be a : class : ` ~ collections . OrderedDict ` - compatible object ( list of key - value tuples , : class : ` dict ` , : class : ` ~ col...
from qnet . algebra . pattern_matching import Pattern , ProtoExpr if hasattr ( rules , 'items' ) : items = rules . items ( ) # ` rules ` is already a dict / OrderedDict else : items = rules # ` rules ` is a list of ( key , value ) tuples keys = set ( ) for key_rule in items : try : key , rul...
def get_delivery_stats ( api_key = None , secure = None , test = None , ** request_args ) : '''Get delivery stats for your Postmark account . : param api _ key : Your Postmark API key . Required , if ` test ` is not ` True ` . : param secure : Use the https scheme for the Postmark API . Defaults to ` True ` ...
return _default_delivery_stats . get ( api_key = api_key , secure = secure , test = test , ** request_args )
def read_stream ( self , stream_id , since_epoch ) : '''get datafeed'''
response , status_code = self . __agent__ . Messages . get_v4_stream_sid_message ( sessionToken = self . __session__ , keyManagerToken = self . __keymngr__ , sid = stream_id , since = since_epoch ) . result ( ) self . logger . debug ( '%s: %s' % ( status_code , response ) ) return status_code , response
def provision_product ( AcceptLanguage = None , ProductId = None , ProvisioningArtifactId = None , PathId = None , ProvisionedProductName = None , ProvisioningParameters = None , Tags = None , NotificationArns = None , ProvisionToken = None ) : """Requests a Provision of a specified product . A ProvisionedProduct i...
pass
def _seconds_have_elapsed ( token , num_seconds ) : """Tests if ' num _ seconds ' have passed since ' token ' was requested . Not strictly thread - safe - may log with the wrong frequency if called concurrently from multiple threads . Accuracy depends on resolution of ' timeit . default _ timer ( ) ' . Alwa...
now = timeit . default_timer ( ) then = _log_timer_per_token . get ( token , None ) if then is None or ( now - then ) >= num_seconds : _log_timer_per_token [ token ] = now return True else : return False
def node_received_infos ( node_id ) : """Get all the infos a node has been sent and has received . You must specify the node id in the url . You can also pass the info type ."""
exp = Experiment ( session ) # get the parameters info_type = request_parameter ( parameter = "info_type" , parameter_type = "known_class" , default = models . Info ) if type ( info_type ) == Response : return info_type # check the node exists node = models . Node . query . get ( node_id ) if node is None : ret...
def averageSize ( self ) : """Calculate the average size of a mesh . This is the mean of the vertex distances from the center of mass ."""
cm = self . centerOfMass ( ) coords = self . coordinates ( copy = False ) if not len ( coords ) : return 0 s , c = 0.0 , 0.0 n = len ( coords ) step = int ( n / 10000.0 ) + 1 for i in np . arange ( 0 , n , step ) : s += utils . mag ( coords [ i ] - cm ) c += 1 return s / c
def _get_mapping ( self , section ) : '''mapping will take the section name from a Singularity recipe and return a map function to add it to the appropriate place . Any lines that don ' t cleanly map are assumed to be comments . Parameters section : the name of the Singularity recipe section Returns fun...
# Ensure section is lowercase section = section . lower ( ) mapping = { "environment" : self . _env , "comments" : self . _comments , "runscript" : self . _run , "labels" : self . _labels , "setup" : self . _setup , "files" : self . _files , "from" : self . _from , "post" : self . _post , "test" : self . _test , "help"...
def people ( self ) : """Retrieve all people of the company : return : list of people objects : rtype : list"""
return fields . ListField ( name = HightonConstants . PEOPLE , init_class = Person ) . decode ( self . element_from_string ( self . _get_request ( endpoint = self . ENDPOINT + '/' + str ( self . id ) + '/people' , ) . text ) )
def create_eventhub ( self , ** kwargs ) : """todo make it so the client can be customised to publish / subscribe Creates an instance of eventhub service"""
eventhub = predix . admin . eventhub . EventHub ( ** kwargs ) eventhub . create ( ) eventhub . add_to_manifest ( self ) eventhub . grant_client ( client_id = self . get_client_id ( ) , ** kwargs ) eventhub . add_to_manifest ( self ) return eventhub
def sign ( ctx , filename ) : """Sign a json - formatted transaction"""
if filename : tx = filename . read ( ) else : tx = sys . stdin . read ( ) tx = TransactionBuilder ( eval ( tx ) , bitshares_instance = ctx . bitshares ) tx . appendMissingSignatures ( ) tx . sign ( ) print_tx ( tx . json ( ) )
def get_dot_atom_text ( value ) : """dot - text = 1 * atext * ( " . " 1 * atext )"""
dot_atom_text = DotAtomText ( ) if not value or value [ 0 ] in ATOM_ENDS : raise errors . HeaderParseError ( "expected atom at a start of " "dot-atom-text but found '{}'" . format ( value ) ) while value and value [ 0 ] not in ATOM_ENDS : token , value = get_atext ( value ) dot_atom_text . append ( token ) ...
def bundle ( self , bundle_id , channel = None ) : '''Get the default data for a bundle . @ param bundle _ id The bundle ' s id . @ param channel Optional channel name .'''
return self . entity ( bundle_id , get_files = True , channel = channel )
def delete_policy_set ( self , policy_set_id ) : """Delete a specific policy set by id . Method is idempotent ."""
uri = self . _get_policy_set_uri ( guid = policy_set_id ) return self . service . _delete ( uri )
def database_clone ( targetcall , databasepath , complete = False ) : """Checks to see if the database has already been downloaded . If not , runs the system call to download the database , and writes stdout and stderr to the logfile : param targetcall : system call to download , and possibly set - up the datab...
# Create a file to store the logs ; it will be used to determine if the database was downloaded and set - up completefile = os . path . join ( databasepath , 'complete' ) # Run the system call if the database is not already downloaded if not os . path . isfile ( completefile ) : out , err = run_subprocess ( targetc...
def height_map ( lookup , height_stops , default_height = 0.0 ) : """Return a height value ( in meters ) interpolated from given height _ stops ; for use with vector - based visualizations using fill - extrusion layers"""
# if no height _ stops , use default height if len ( height_stops ) == 0 : return default_height # dictionary to lookup height from match - type height _ stops match_map = dict ( ( x , y ) for ( x , y ) in height_stops ) # if lookup matches stop exactly , return corresponding height ( first priority ) # ( includes ...
def docs ( ** kwargs ) : """Annotate the decorated view function with the specified Swagger attributes . Usage : . . code - block : : python from aiohttp import web @ docs ( tags = [ ' my _ tag ' ] , summary = ' Test method summary ' , description = ' Test method description ' , parameters = [ { '...
def wrapper ( func ) : kwargs [ "produces" ] = [ "application/json" ] if not hasattr ( func , "__apispec__" ) : func . __apispec__ = { "parameters" : [ ] , "responses" : { } } extra_parameters = kwargs . pop ( "parameters" , [ ] ) extra_responses = kwargs . pop ( "responses" , { } ) func . _...
def init_indexes ( self ) : """Create indexes for schemas ."""
state = self . app_state for name , schema in self . schemas . items ( ) : if current_app . testing : storage = TestingStorage ( ) else : index_path = ( Path ( state . whoosh_base ) / name ) . absolute ( ) if not index_path . exists ( ) : index_path . mkdir ( parents = True )...