signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def get_erasure_profile ( service , name ) : """: param service : six . string _ types . The Ceph user name to run the command under : param name : : return :"""
try : out = check_output ( [ 'ceph' , '--id' , service , 'osd' , 'erasure-code-profile' , 'get' , name , '--format=json' ] ) if six . PY3 : out = out . decode ( 'UTF-8' ) return json . loads ( out ) except ( CalledProcessError , OSError , ValueError ) : return None
def create_apppool ( name ) : '''Create an IIS application pool . . . note : : This function only validates against the application pool name , and will return True even if the application pool already exists with a different configuration . It will not modify the configuration of an existing application ...
current_apppools = list_apppools ( ) apppool_path = r'IIS:\AppPools\{0}' . format ( name ) if name in current_apppools : log . debug ( "Application pool '%s' already present." , name ) return True ps_cmd = [ 'New-Item' , '-Path' , r"'{0}'" . format ( apppool_path ) ] cmd_ret = _srvmgr ( ps_cmd ) if cmd_ret [ 'r...
def p_l_comma_l ( self , p ) : '''l : expression COMMA l'''
_LOGGER . debug ( "l -> expresion , l" ) if p [ 3 ] . type != TypedClass . LIST : raise TypeError ( "list expected" ) l = TypedList ( [ p [ 1 ] ] + p [ 3 ] . value ) p [ 0 ] = l
def _add_new_init_method ( cls ) : """Replace the existing cls . _ _ init _ _ ( ) method with a new one which calls the original one and in addition performs the following actions : (1 ) Finds all instances of tohu . BaseGenerator in the namespace and collects them in the dictionary ` self . field _ gens ` ...
orig_init = cls . __init__ def new_init_method ( self , * args , ** kwargs ) : logger . debug ( f"Initialising new {self} (type: {type(self)})" ) # Call original _ _ init _ _ function to ensure we pick up # any tohu generators that are defined there . logger . debug ( f" orig_init: {orig_init}" ) ...
def remap_label_indexers ( data_obj , indexers , method = None , tolerance = None ) : """Given an xarray data object and label based indexers , return a mapping of equivalent location based indexers . Also return a mapping of updated pandas index objects ( in case of multi - index level drop ) ."""
if method is not None and not isinstance ( method , str ) : raise TypeError ( '``method`` must be a string' ) pos_indexers = { } new_indexes = { } dim_indexers = get_dim_indexers ( data_obj , indexers ) for dim , label in dim_indexers . items ( ) : try : index = data_obj . indexes [ dim ] except Key...
def get_validation_description ( view , method ) : """Returns validation description in format : # # # Validation : validate method docstring * field1 name * field1 validation docstring * field2 name * field2 validation docstring"""
if method not in ( 'PUT' , 'PATCH' , 'POST' ) or not hasattr ( view , 'get_serializer' ) : return '' serializer = view . get_serializer ( ) description = '' if hasattr ( serializer , 'validate' ) and serializer . validate . __doc__ is not None : description += formatting . dedent ( smart_text ( serializer . val...
def xyz_to_cielab ( xyz , refwhite ) : """Convert CIE XYZ color values to CIE L * a * b * . * xyz * should be of shape ( * , 3 ) . * refwhite * is the reference white value , of shape ( 3 , ) . Return value will have same shape as * xyz * , but be in CIE L * a * b * coordinates ."""
norm = xyz / refwhite pow = norm ** 0.333333333333333 scale = 7.787037 * norm + 16. / 116 mapped = np . where ( norm > 0.008856 , pow , scale ) cielab = np . empty_like ( xyz ) cielab [ ... , L ] = 116 * mapped [ ... , Y ] - 16 cielab [ ... , A ] = 500 * ( mapped [ ... , X ] - mapped [ ... , Y ] ) cielab [ ... , B ] = ...
def get_xblock_settings ( self , default = None ) : """Gets XBlock - specific settigns for current XBlock Returns default if settings service is not available . Parameters : default - default value to be used in two cases : * No settings service is available * As a ` default ` parameter to ` SettingsServi...
settings_service = self . runtime . service ( self , "settings" ) if settings_service : return settings_service . get_settings_bucket ( self , default = default ) return default
def get_redis ( ** options ) : """if no options defined , then it ' ll use settings options # unix _ socket _ path = ' / tmp / redis . sock ' connection _ pool = { ' host ' : ' localhost ' , ' port ' : 6379} # if test after created redis client object test _ first = False"""
from uliweb import settings from uliweb . utils . common import log import redis options = ( options or { } ) options . update ( settings . REDIS ) if 'unix_socket_path' in options : client = redis . Redis ( unix_socket_path = options [ 'unix_socket_path' ] ) else : global __connection_pool__ if not __conne...
def addCodedValue ( self , name , code ) : """adds a coded value to the domain Inputs : name - name of the domain code - value"""
i = { "name" : name , "code" : code } if i not in self . _codedValues : self . _codedValues . append ( i )
def reduce_tashkeel ( text ) : """Reduce the Tashkeel , by deleting evident cases . @ param text : the input text fully vocalized . @ type text : unicode . @ return : partially vocalized text . @ rtype : unicode ."""
patterns = [ # delete all fathat , except on waw and yeh u"(?<!(%s|%s))(%s|%s)" % ( WAW , YEH , SUKUN , FATHA ) , # delete damma if followed by waw . u"%s(?=%s)" % ( DAMMA , WAW ) , # delete kasra if followed by yeh . u"%s(?=%s)" % ( KASRA , YEH ) , # delete fatha if followed by alef to reduce yeh maftouha # and waw ma...
def status ( self , name = '' ) : """Return a list of the statuses of the ` name ` service , or if name is omitted , a list of the status of all services for this specific init system . There should be a standardization around the status fields . There currently isn ' t . ` self . services ` is set in ` b...
super ( SystemD , self ) . status ( name = name ) svc_list = sh . systemctl ( '--no-legend' , '--no-pager' , t = 'service' ) svcs_info = [ self . _parse_service_info ( svc ) for svc in svc_list ] if name : names = ( name , name + '.service' ) # return list of one item for specific service svcs_info = [ s fo...
def clear ( self ) : """Restart with a clean config"""
self . _config = configparser . RawConfigParser ( ) # Override config from command line even if we modify the config file and live reload it . self . _override_config = { } self . read_config ( )
def verify ( self , type_ ) : """Check whether a type implements ` ` self ` ` . Parameters type _ : type The type to check . Raises TypeError If ` ` type _ ` ` doesn ' t conform to our interface . Returns None"""
raw_missing , mistyped , mismatched = self . _diff_signatures ( type_ ) # See if we have defaults for missing methods . missing = [ ] defaults_to_use = { } for name in raw_missing : try : defaults_to_use [ name ] = self . _defaults [ name ] . implementation except KeyError : missing . append ( n...
def geodetic2geocentric ( theta , alt ) : """Conversion from geodetic to geocentric coordinates by using the WGS84 spheroid . : param theta : colatitude ( float , rad ) : param alt : altitude ( float , km ) : return gccolat : geocentric colatitude ( float , rad ) d : gccolat minus theta ( float , rad ) r ...
ct = np . cos ( theta ) st = np . sin ( theta ) a2 = 40680631.6 b2 = 40408296.0 one = a2 * st * st two = b2 * ct * ct three = one + two rho = np . sqrt ( three ) r = np . sqrt ( alt * ( alt + 2.0 * rho ) + ( a2 * one + b2 * two ) / three ) cd = ( alt + rho ) / r sd = ( a2 - b2 ) / rho * ct * st / r one = ct ct = ct * c...
def set_sleep ( minutes ) : '''Sets the amount of idle time until the machine sleeps . Sets the same value for Computer , Display , and Hard Disk . Pass " Never " or " Off " for computers that should never sleep . : param minutes : Can be an integer between 1 and 180 or " Never " or " Off " : ptype : int , ...
value = _validate_sleep ( minutes ) cmd = 'systemsetup -setsleep {0}' . format ( value ) salt . utils . mac_utils . execute_return_success ( cmd ) state = [ ] for check in ( get_computer_sleep , get_display_sleep , get_harddisk_sleep ) : state . append ( salt . utils . mac_utils . confirm_updated ( value , check , ...
def event_return ( events ) : '''Return event to Pg server Requires that configuration be enabled via ' event _ return ' option in master config .'''
with _get_serv ( events , commit = True ) as cur : for event in events : tag = event . get ( 'tag' , '' ) data = event . get ( 'data' , '' ) sql = '''INSERT INTO salt_events (tag, data, master_id, alter_time) VALUES (%s, %s, %s, to_timestamp(%s))''' cur . execute...
def dynamic_load ( name ) : """Equivalent of " from X import Y " statement using dot notation to specify what to import and return . For example , foo . bar . thing returns the item " thing " in the module " foo . bar " """
pieces = name . split ( '.' ) item = pieces [ - 1 ] mod_name = '.' . join ( pieces [ : - 1 ] ) mod = __import__ ( mod_name , globals ( ) , locals ( ) , [ item ] ) return getattr ( mod , item )
def port_number_range ( prange ) : """Port number range validation and expansion ."""
# first , try it as a normal port number try : return port_number ( prange ) except ValueError : pass # then , consider it as a range with the format " x - y " and expand it try : bounds = list ( map ( int , re . match ( r'^(\d+)\-(\d+)$' , prange ) . groups ( ) ) ) if bounds [ 0 ] > bounds [ 1 ] : ...
def all_floating_ips ( self ) : """Lists all of the Floating IPs available on the account ."""
if self . api_version == 2 : json = self . request ( '/floating_ips' ) return json [ 'floating_ips' ] else : raise DoError ( v2_api_required_str )
def _track ( metric_type , metric_call , metric_kwargs , name , description , labels , registry , before = None ) : """Internal method decorator logic . : param metric _ type : the type of the metric from the ` prometheus _ client ` library : param metric _ call : the invocation to execute as a callable with ` ...
if labels is not None and not isinstance ( labels , dict ) : raise TypeError ( 'labels needs to be a dictionary of {labelname: callable}' ) label_names = labels . keys ( ) if labels else tuple ( ) parent_metric = metric_type ( name , description , labelnames = label_names , registry = registry , ** metric_kwargs ) ...
def update_service_profile ( self , service_profile , body ) : """Update a Neutron service profile ."""
return self . put ( self . service_profile_path % ( service_profile ) , body = body )
def reconnect ( self , logfile = None , max_timeout = 360 , force_discovery = False , tracefile = None , retry = True ) : """Reconnect to the device . It can be called when after device reloads or the session was disconnected either by device or jumphost . If multiple jumphosts are used then ` reconnect ` start...
self . _enable_logging ( logfile , tracefile ) self . log ( "-" * 20 ) self . log ( "Condoor Version {}" . format ( __version__ ) ) self . log ( "Cache filename: {}" . format ( _CACHE_FILE ) ) self . connection_chains = [ Chain ( self , url_list ) for url_list in normalize_urls ( self . _urls ) ] self . log ( "Connecti...
def write ( self , file ) : """Write the image to the open file object . See ` . save ( ) ` if you have a filename . In general , you can only call this method once ; after it has been called the first time the PNG image is written , the source data will have been streamed , and cannot be streamed again ....
w = Writer ( ** self . info ) w . write ( file , self . rows )
def mad ( y_true , y_pred ) : """Median absolute deviation"""
y_true , y_pred = _mask_nan ( y_true , y_pred ) return np . mean ( np . abs ( y_true - y_pred ) )
def _parse_caps_cpu ( node ) : '''Parse the < cpu > element of the domain capabilities'''
result = { } for mode in node . findall ( 'mode' ) : if not mode . get ( 'supported' ) == 'yes' : continue name = mode . get ( 'name' ) if name == 'host-passthrough' : result [ name ] = True elif name == 'host-model' : host_model = { } model_node = mode . find ( 'model' )...
def mles ( self ) : """return the maximum likelihood estimates for each of the energy bins"""
mle_vals = np . ndarray ( ( self . _nx ) ) for i in range ( self . _nx ) : mle_vals [ i ] = self . _loglikes [ i ] . mle ( ) return mle_vals
def getSegmentOnCell ( self , c , i , segIdx ) : """Return the segment on cell ( c , i ) with index sidx . Returns the segment as following list : [ [ segmentID , sequenceSegmentFlag , frequency ] , [ col1 , idx1 , perm1 ] , [ col2 , idx2 , perm2 ] , . . ."""
return self . cells [ c ] [ i ] [ segIdx ]
def send_message ( self , opcode , message ) : """Send a message to the peer over the socket . : param int opcode : The opcode for the message to send . : param bytes message : The message data to send ."""
if not isinstance ( message , bytes ) : message = message . encode ( 'utf-8' ) length = len ( message ) if not select . select ( [ ] , [ self . handler . wfile ] , [ ] , 0 ) [ 1 ] : self . logger . error ( 'the socket is not ready for writing' ) self . close ( ) return buffer = b'' buffer += struct . pa...
def prox_lim ( xy , step , boundary = None ) : """Proximal projection operator"""
if boundary == "circle" : return prox_circle ( xy , step ) if boundary == "line" : return prox_line ( xy , step ) # default : do nothing return xy
def config_from_prefix ( prefix ) : """Get config from zmq prefix"""
settings = { } if prefix . lower ( ) in ( 'default' , 'auto' , '' ) : settings [ 'zmq_prefix' ] = '' settings [ 'libzmq_extension' ] = False settings [ 'no_libzmq_extension' ] = False elif prefix . lower ( ) in ( 'bundled' , 'extension' ) : settings [ 'zmq_prefix' ] = '' settings [ 'libzmq_extension...
def activate_firmware_and_wait ( self , rollback_override = None , timeout = 2 , interval = 1 ) : """Activate the new uploaded firmware and wait for long running command ."""
try : self . activate_firmware ( rollback_override ) except CompletionCodeError as e : if e . cc == CC_LONG_DURATION_CMD_IN_PROGRESS : self . wait_for_long_duration_command ( constants . CMDID_HPM_ACTIVATE_FIRMWARE , timeout , interval ) else : raise HpmError ( 'activate_firmware CC=0x%02x' ...
def request ( self , target ) : """Delete a configuration datastore . * target * specifies the name or URL of configuration datastore to delete : seealso : : ref : ` srctarget _ params `"""
node = new_ele ( "delete-config" ) node . append ( util . datastore_or_url ( "target" , target , self . _assert ) ) return self . _request ( node )
def getProvince ( self , default = None ) : """Return the Province from the Physical or Postal Address"""
physical_address = self . getPhysicalAddress ( ) . get ( "state" , default ) postal_address = self . getPostalAddress ( ) . get ( "state" , default ) return physical_address or postal_address
def search_uris ( self , uri , threat_types , retry = google . api_core . gapic_v1 . method . DEFAULT , timeout = google . api_core . gapic_v1 . method . DEFAULT , metadata = None , ) : """This method is used to check whether a URI is on a given threatList . Example : > > > from google . cloud import webrisk _ ...
# Wrap the transport method to add retry and timeout logic . if "search_uris" not in self . _inner_api_calls : self . _inner_api_calls [ "search_uris" ] = google . api_core . gapic_v1 . method . wrap_method ( self . transport . search_uris , default_retry = self . _method_configs [ "SearchUris" ] . retry , default_...
def set_config ( config ) : """Set bigchaindb . config equal to the default config dict , then update that with whatever is in the provided config dict , and then set bigchaindb . config [ ' CONFIGURED ' ] = True Args : config ( dict ) : the config dict to read for changes to the default config Note : ...
# Deep copy the default config into bigchaindb . config bigchaindb . config = copy . deepcopy ( bigchaindb . _config ) # Update the default config with whatever is in the passed config update ( bigchaindb . config , update_types ( config , bigchaindb . config ) ) bigchaindb . config [ 'CONFIGURED' ] = True
def Browser ( driver_name = "firefox" , * args , ** kwargs ) : """Returns a driver instance for the given name . When working with ` ` firefox ` ` , it ' s possible to provide a profile name and a list of extensions . If you don ' t provide any driver _ name , then ` ` firefox ` ` will be used . If there is...
try : driver = _DRIVERS [ driver_name ] except KeyError : raise DriverNotFoundError ( "No driver for %s" % driver_name ) return driver ( * args , ** kwargs )
def write ( file , system ) : """Write data in system to a dm file"""
# TODO : Check for bugs ! ! ! out = list ( ) out . append ( '# DOME format version 1.0' ) ppl = 7 # parameter per line retval = True dev_list = sorted ( system . devman . devices ) for dev in dev_list : model = system . __dict__ [ dev ] if not model . n : continue out . append ( '' ) header = de...
def TableDescriptionParser ( table_description , depth = 0 ) : """Parses the table _ description object for internal use . Parses the user - submitted table description into an internal format used by the Python DataTable class . Returns the flat list of parsed columns . Args : table _ description : A descr...
# For the recursion step , we check for a scalar object ( string or tuple ) if isinstance ( table_description , ( six . string_types , tuple ) ) : parsed_col = DataTable . ColumnTypeParser ( table_description ) parsed_col [ "depth" ] = depth parsed_col [ "container" ] = "scalar" return [ parsed_col ] # ...
def set_ratio ( self , new_ratio ) : """Set a new conversion ratio immediately ."""
from samplerate . lowlevel import src_set_ratio return src_set_ratio ( self . _state , new_ratio )
def _deserialize ( s , proto ) : # type : ( bytes , _ Proto ) - > _ Proto '''Parse bytes into a in - memory proto @ params s is bytes containing serialized proto proto is a in - memory proto object @ return The proto instance filled in by s'''
if not isinstance ( s , bytes ) : raise ValueError ( 'Parameter s must be bytes, but got type: {}' . format ( type ( s ) ) ) if not ( hasattr ( proto , 'ParseFromString' ) and callable ( proto . ParseFromString ) ) : raise ValueError ( 'No ParseFromString method is detected. ' '\ntype is {}' . format ( type ( p...
def umount ( self , all = False , force = True ) : """unmount container filesystem : param all : bool , option to unmount all mounted containers : param force : bool , force the unmounting of specified containers ' root file system : return : str , the output from cmd"""
# FIXME : handle error if unmount didn ' t work options = [ ] if force : options . append ( '--force' ) if all : options . append ( '--all' ) cmd = [ "podman" , "umount" ] + options + [ self . get_id ( ) if not all else "" ] return run_cmd ( cmd , return_output = True )
def _finalize_block_blob ( self , ud , metadata ) : # type : ( Uploader , blobxfer . models . upload . Descriptor , dict ) - > None """Finalize Block blob : param Uploader self : this : param blobxfer . models . upload . Descriptor ud : upload descriptor : param dict metadata : metadata dict"""
if not ud . entity . is_encrypted and ud . must_compute_md5 : digest = blobxfer . util . base64_encode_as_string ( ud . md5 . digest ( ) ) else : digest = None blobxfer . operations . azure . blob . block . put_block_list ( ud . entity , ud . last_block_num , digest , metadata ) if blobxfer . util . is_not_empt...
def run_cmds_on_all_switches ( self , cmds ) : """Runs all cmds on all configured switches This helper is used for ACL and rule creation / deletion as ACLs and rules must exist on all switches ."""
for switch in self . _switches . values ( ) : self . run_openstack_sg_cmds ( cmds , switch )
def _read ( self , directory , filename , session , path , name , extension , spatial , spatialReferenceID , replaceParamFile ) : """Generic Output Location Read from File Method"""
# Assign file extension attribute to file object self . fileExtension = extension # Open file and parse into a data structure with open ( path , 'r' ) as f : for line in f : sline = line . strip ( ) . split ( ) if len ( sline ) == 1 : self . numLocations = sline [ 0 ] else : # Cr...
def _handle_exception ( self , sender , exception = None ) : """Actual code handling the exception and sending it to honeybadger if it ' s enabled . : param T sender : the object sending the exception event . : param Exception exception : the exception to handle ."""
honeybadger . notify ( exception ) if self . reset_context_after_request : self . _reset_context ( )
def is_all_field_none ( self ) : """: rtype : bool"""
if self . _id_ is not None : return False if self . _created is not None : return False if self . _updated is not None : return False if self . _label_user_creator is not None : return False if self . _content is not None : return False return True
def _parseElfHeader ( self , data ) : """Returns the elf header"""
ehdr = self . __classes . EHDR . from_buffer ( data ) return EhdrData ( header = ehdr )
def chunks ( items , chunksize ) : """Turn generator sequence into sequence of chunks ."""
items = iter ( items ) for first in items : chunk = chain ( ( first , ) , islice ( items , chunksize - 1 ) ) yield chunk deque ( chunk , 0 )
def make_movie ( structures , output_filename = "movie.mp4" , zoom = 1.0 , fps = 20 , bitrate = "10000k" , quality = 1 , ** kwargs ) : """Generate a movie from a sequence of structures using vtk and ffmpeg . Args : structures ( [ Structure ] ) : sequence of structures output _ filename ( str ) : filename for ...
vis = StructureVis ( ** kwargs ) vis . show_help = False vis . redraw ( ) vis . zoom ( zoom ) sigfig = int ( math . floor ( math . log10 ( len ( structures ) ) ) + 1 ) filename = "image{0:0" + str ( sigfig ) + "d}.png" for i , s in enumerate ( structures ) : vis . set_structure ( s ) vis . write_image ( filenam...
def _construct_axes_from_arguments ( self , args , kwargs , require_all = False , sentinel = None ) : """Construct and returns axes if supplied in args / kwargs . If require _ all , raise if all axis arguments are not supplied return a tuple of ( axes , kwargs ) . sentinel specifies the default parameter when...
# construct the args args = list ( args ) for a in self . _AXIS_ORDERS : # if we have an alias for this axis alias = self . _AXIS_IALIASES . get ( a ) if alias is not None : if a in kwargs : if alias in kwargs : raise TypeError ( "arguments are mutually exclusive " "for [%s,%...
def handle ( self , * args , ** opt ) : """Args : * args : * * opt :"""
d1_gmn . app . management . commands . util . util . log_setup ( opt [ "debug" ] ) logging . info ( "Running management command: {}" . format ( __name__ ) # util . get _ command _ name ( ) ) ) d1_gmn . app . management . commands . util . util . exit_if_other_instance_is_running ( __name__ ) self . _opt = opt try : # p...
def _read_geneset ( self , study_fn , pop_fn ) : """Open files containing genes . Return study genes and population genes ."""
pop = set ( _ . strip ( ) for _ in open ( pop_fn ) if _ . strip ( ) ) study = frozenset ( _ . strip ( ) for _ in open ( study_fn ) if _ . strip ( ) ) if next ( iter ( pop ) ) . isdigit ( ) : pop = set ( int ( g ) for g in pop ) study = frozenset ( int ( g ) for g in study ) # some times the pop is a second grou...
def create_insert_dict_string ( self , tblname , d , PKfields = [ ] , fields = None , check_existing = False ) : '''The main function of the insert _ dict functions . This creates and returns the SQL query and parameters used by the other functions but does not insert any data into the database . Simple functio...
if type ( PKfields ) == type ( "" ) : PKfields = [ PKfields ] if fields == None : fields = sorted ( d . keys ( ) ) values = None SQL = None try : # Search for existing records wherestr = [ ] PKvalues = [ ] for PKfield in PKfields : if d [ PKfield ] == None : wherestr . append ( "...
def as_list ( self ) : """Returns a list of integers or ` None ` for each dimension . Returns : A list of integers or ` None ` for each dimension . Raises : ValueError : If ` self ` is an unknown shape with an unknown rank ."""
if self . _dims is None : raise ValueError ( "as_list() is not defined on an unknown TensorShape." ) return [ dim . value for dim in self . _dims ]
def image ( self , url , title = "" , width = 800 ) : """* create MMD image link * * * Key Arguments : * * - ` ` title ` ` - - the title for the image - ` ` url ` ` - - the image URL - ` ` width ` ` - - the width in pixels of the image . Default * 800* * * Return : * * - ` ` imageLink ` ` - - the MMD im...
title = title . strip ( ) caption = title now = datetime . now ( ) figId = now . strftime ( "%Y%m%dt%H%M%S.%f" ) if len ( title ) : figId = "%(title)s %(figId)s" % locals ( ) imageLink = """\n\n![%(caption)s][%(figId)s] [%(figId)s]: %(url)s "%(title)s" width=%(width)spx\n\n""" % locals ( ) return imageLink
def email ( value , whitelist = None ) : """Validate an email address . This validator is based on ` Django ' s email validator ` _ . Returns ` ` True ` ` on success and : class : ` ~ validators . utils . ValidationFailure ` when validation fails . Examples : : > > > email ( ' someone @ example . com ' ) ...
if whitelist is None : whitelist = domain_whitelist if not value or '@' not in value : return False user_part , domain_part = value . rsplit ( '@' , 1 ) if not user_regex . match ( user_part ) : return False if domain_part not in whitelist and not domain_regex . match ( domain_part ) : # Try for possible ID...
def _can_change_or_view ( model , user ) : """Return True iff ` user ` has either change or view permission for ` model ` ."""
model_name = model . _meta . model_name app_label = model . _meta . app_label can_change = user . has_perm ( app_label + '.change_' + model_name ) can_view = user . has_perm ( app_label + '.view_' + model_name ) return can_change or can_view
def SSL_CTX_set_info_callback ( ctx , app_info_cb ) : """Set the info callback : param callback : The Python callback to use : return : None"""
def py_info_callback ( ssl , where , ret ) : try : app_info_cb ( SSL ( ssl ) , where , ret ) except : pass return global _info_callback _info_callback [ ctx ] = _rvoid_voidp_int_int ( py_info_callback ) _SSL_CTX_set_info_callback ( ctx , _info_callback [ ctx ] )
def mod_repo ( repo , ** kwargs ) : '''Modify one or more values for a repo . If the repo does not exist , it will be created , so long as the following values are specified : repo or alias alias by which Zypper refers to the repo url , mirrorlist or baseurl the URL for Zypper to reference enabled Ena...
root = kwargs . get ( 'root' ) or None repos_cfg = _get_configured_repos ( root = root ) added = False # An attempt to add new one ? if repo not in repos_cfg . sections ( ) : url = kwargs . get ( 'url' , kwargs . get ( 'mirrorlist' , kwargs . get ( 'baseurl' ) ) ) if not url : raise CommandExecutionErro...
def reduce_after ( method ) : '''reduce ( ) the result of this method call ( unless you already reduced it ) .'''
def new_method ( self , * args , ** kwargs ) : result = method ( self , * args , ** kwargs ) if result == self : return result return result . reduce ( ) return new_method
def process_job ( self , job_request ) : """Validate , execute , and run the job request , wrapping it with any applicable job middleware . : param job _ request : The job request : type job _ request : dict : return : A ` JobResponse ` object : rtype : JobResponse : raise : JobError"""
try : # Validate JobRequest message validation_errors = [ Error ( code = error . code , message = error . message , field = error . pointer , ) for error in ( JobRequestSchema . errors ( job_request ) or [ ] ) ] if validation_errors : raise JobError ( errors = validation_errors ) # Add the client ob...
def to_dict ( self ) : """Return a dict representation of KnwKBRVAL ."""
# FIXME remove ' id ' dependency from invenio modules return { 'id' : self . m_key + "_" + str ( self . id_knwKB ) , 'key' : self . m_key , 'value' : self . m_value , 'kbid' : self . kb . id if self . kb else None , 'kbname' : self . kb . name if self . kb else None }
def _ssh_cmd ( self , * args ) : """Execute a gerrit command over SSH ."""
command = "gerrit {0}" . format ( " " . join ( args ) ) _ , stdout , stderr = self . _client . exec_command ( command ) return ( stdout . readlines ( ) , stderr . readlines ( ) )
def _get_spark_app_ids ( self , running_apps , requests_config , tags ) : """Traverses the Spark application master in YARN to get a Spark application ID . Return a dictionary of { app _ id : ( app _ name , tracking _ url ) } for Spark applications"""
spark_apps = { } for app_id , ( app_name , tracking_url ) in iteritems ( running_apps ) : response = self . _rest_request_to_json ( tracking_url , SPARK_APPS_PATH , SPARK_SERVICE_CHECK , requests_config , tags ) for app in response : app_id = app . get ( 'id' ) app_name = app . get ( 'name' ) ...
def bundle_attacks_with_goal ( sess , model , x , y , adv_x , attack_configs , run_counts , goal , report , report_path , attack_batch_size = BATCH_SIZE , eval_batch_size = BATCH_SIZE ) : """Runs attack bundling , working on one specific AttackGoal . This function is mostly intended to be called by ` bundle _ att...
goal . start ( run_counts ) _logger . info ( "Running criteria for new goal..." ) criteria = goal . get_criteria ( sess , model , adv_x , y , batch_size = eval_batch_size ) assert 'correctness' in criteria _logger . info ( "Accuracy: " + str ( criteria [ 'correctness' ] . mean ( ) ) ) assert 'confidence' in criteria wh...
def read_setup_py_source ( self ) : # type : ( ) - > None """Read setup . py to string : return :"""
if not self . setup_file_name : self . setup_source = "" if not self . setup_source : self . setup_source = self . _read_file ( self . setup_file_name )
def indexlist ( self ) : """Returns the list of files in alphabetical order for index lookups . : return [ ( < str > name , < str > url ) , . . ]"""
output = [ ( child . text ( 0 ) , child . url ( ) ) for child in self . children ( recursive = True ) ] output . sort ( ) return output
def init_logger ( self ) : """Init logger ."""
if not self . result_logger : if not os . path . exists ( self . local_dir ) : os . makedirs ( self . local_dir ) if not self . logdir : self . logdir = tempfile . mkdtemp ( prefix = "{}_{}" . format ( str ( self ) [ : MAX_LEN_IDENTIFIER ] , date_str ( ) ) , dir = self . local_dir ) elif not...
def del_property ( self , t_property_name , sync = True ) : """delete property from this transport . if this transport has no id then it ' s like sync = False . : param t _ property _ name : property name to remove : param sync : If sync = True ( default ) synchronize with Ariane server . If sync = False , ad...
LOGGER . debug ( "Transport.del_property" ) if not sync or self . id is None : self . properties_2_rm . append ( t_property_name ) else : params = SessionService . complete_transactional_req ( { 'ID' : self . id , 'propertyName' : t_property_name } ) if MappingService . driver_type != DriverFactory . DRIVER...
def submissions ( self ) : """List job submissions in workspace ."""
r = fapi . get_submissions ( self . namespace , self . name , self . api_url ) fapi . _check_response_code ( r , 200 ) return r . json ( )
def description ( self , description ) : """Sets the description of this AdditionalRecipient . The description of the additional recipient . : param description : The description of this AdditionalRecipient . : type : str"""
if description is None : raise ValueError ( "Invalid value for `description`, must not be `None`" ) if len ( description ) > 100 : raise ValueError ( "Invalid value for `description`, length must be less than `100`" ) if len ( description ) < 1 : raise ValueError ( "Invalid value for `description`, length m...
def get_entities ( seq , suffix = False ) : """Gets entities from sequence . Args : seq ( list ) : sequence of labels . Returns : list : list of ( chunk _ type , chunk _ start , chunk _ end ) . Example : > > > from seqeval . metrics . sequence _ labeling import get _ entities > > > seq = [ ' B - PER '...
# for nested list if any ( isinstance ( s , list ) for s in seq ) : seq = [ item for sublist in seq for item in sublist + [ 'O' ] ] prev_tag = 'O' prev_type = '' begin_offset = 0 chunks = [ ] for i , chunk in enumerate ( seq + [ 'O' ] ) : if suffix : tag = chunk [ - 1 ] type_ = chunk . split ( '...
def get_cdn_metadata ( self , container ) : """Returns a dictionary containing the CDN metadata for the container . If the container does not exist , a NotFound exception is raised . If the container exists , but is not CDN - enabled , a NotCDNEnabled exception is raised ."""
uri = "%s/%s" % ( self . uri_base , utils . get_name ( container ) ) resp , resp_body = self . api . cdn_request ( uri , "HEAD" ) ret = dict ( resp . headers ) # Remove non - CDN headers ret . pop ( "content-length" , None ) ret . pop ( "content-type" , None ) ret . pop ( "date" , None ) return ret
def set_var_log_arr ( self , value ) : '''setter'''
if isinstance ( value , np . ndarray ) : self . __var_log_arr = value else : raise TypeError ( )
def get ( self , block = True , timeout = None ) : """Get a Task from the queue : param block : Block application until a Task is received : param timeout : Timeout after n seconds : return : : class : ` ~ redisqueue . AbstractTask ` instance : exception : ConnectionError if queue is not connected"""
if not self . connected : raise QueueNotConnectedError ( "Queue is not Connected" ) if block : payload = self . __db . brpop ( self . _key , timeout = timeout ) else : payload = self . __db . rpop ( self . _key ) if not payload : return None task = self . task_class ( payload [ 1 ] ) # if task was marke...
def _read ( self , fp , fpname ) : """A direct copy of the py2.4 version of the super class ' s _ read method to assure it uses ordered dicts . Had to change one line to make it work . Future versions have this fixed , but in fact its quite embarrassing for the guys not to have done it right in the first plac...
cursect = None # None , or a dictionary optname = None lineno = 0 is_multi_line = False e = None # None , or an exception def string_decode ( v ) : if v [ - 1 ] == '\\' : v = v [ : - 1 ] # end cut trailing escapes to prevent decode error if PY3 : return v . encode ( defenc ) . decode ( 'unic...
def resize ( self , capacity ) : """Re - sizes the ` Array ` by appending new * array elements * or removing * array elements * from the end . : param int capacity : new capacity of the ` Array ` in number of * array elements * ."""
count = max ( int ( capacity ) , 0 ) - len ( self ) if count == 0 : pass elif - count == len ( self ) : self . clear ( ) elif count > 0 : for i in range ( count ) : self . append ( ) else : for i in range ( abs ( count ) ) : self . pop ( )
def add_bookmark ( self , url , favorite = False , archive = False , allow_duplicates = True ) : """Adds given bookmark to the authenticated user . : param url : URL of the article to bookmark : param favorite : whether or not the bookmark should be favorited : param archive : whether or not the bookmark shou...
rdb_url = self . _generate_url ( 'bookmarks' ) params = { "url" : url , "favorite" : int ( favorite ) , "archive" : int ( archive ) , "allow_duplicates" : int ( allow_duplicates ) } return self . post ( rdb_url , params )
def create_choice_model ( data , alt_id_col , obs_id_col , choice_col , specification , model_type , intercept_ref_pos = None , shape_ref_pos = None , names = None , intercept_names = None , shape_names = None , nest_spec = None , mixing_id_col = None , mixing_vars = None ) : """Parameters data : string or pandas...
# Make sure the model type is valid ensure_valid_model_type ( model_type , valid_model_types ) # Carry out the appropriate instantiation process for the chosen # choice model model_kwargs = { "intercept_ref_pos" : intercept_ref_pos , "shape_ref_pos" : shape_ref_pos , "names" : names , "intercept_names" : intercept_name...
def emit ( self , span_datas ) : """: type span _ datas : list of : class : ` ~ opencensus . trace . span _ data . SpanData ` : param list of opencensus . trace . span _ data . SpanData span _ datas : SpanData tuples to emit"""
envelopes = [ self . span_data_to_envelope ( sd ) for sd in span_datas ] result = self . _transmit ( envelopes ) if result > 0 : self . storage . put ( envelopes , result )
def pop ( self , key , default = None ) : "Standard pop semantics for all mapping types"
if not isinstance ( key , tuple ) : key = ( key , ) return self . data . pop ( key , default )
def open ( filename , mode = "r" , iline = 189 , xline = 193 , strict = True , ignore_geometry = False , endian = 'big' ) : """Open a segy file . Opens a segy file and tries to figure out its sorting , inline numbers , crossline numbers , and offsets , and enables reading and writing to this file in a simple ...
if 'w' in mode : problem = 'w in mode would truncate the file' solution = 'use r+ to open in read-write' raise ValueError ( ', ' . join ( ( problem , solution ) ) ) endians = { 'little' : 256 , # (1 < < 8) 'lsb' : 256 , 'big' : 0 , 'msb' : 0 , } if endian not in endians : problem = 'unknown endianness {...
def create_ecdsa_public_and_private_from_pem ( pem , password = None ) : """< Purpose > Create public and private ECDSA keys from a private ' pem ' . The public and private keys are strings in PEM format : public : ' - - - - - BEGIN PUBLIC KEY - - - - - . . . - - - - - END PUBLIC KEY - - - - - ' , private :...
# Does ' pem ' have the correct format ? # This check will ensure ' pem ' conforms to # ' securesystemslib . formats . ECDSARSA _ SCHEMA ' . securesystemslib . formats . PEMECDSA_SCHEMA . check_match ( pem ) if password is not None : securesystemslib . formats . PASSWORD_SCHEMA . check_match ( password ) passwo...
async def generate_image ( self , imgtype , face = None , hair = None ) : """Generate a basic image using the auto - image endpoint of weeb . sh . This function is a coroutine . Parameters : imgtype : str - type of the generation to create , possible types are awooo , eyes , or won . face : str - only used ...
if not isinstance ( imgtype , str ) : raise TypeError ( "type of 'imgtype' must be str." ) if face and not isinstance ( face , str ) : raise TypeError ( "type of 'face' must be str." ) if hair and not isinstance ( hair , str ) : raise TypeError ( "type of 'hair' must be str." ) if ( face or hair ) and imgty...
def run_get_clusters_from_file ( self , clusters_infile , all_ref_seqs , rename_dict = None ) : '''Instead of running cdhit , gets the clusters info from the input file .'''
if rename_dict is None : rename_dict = { } # check that every sequence in the clusters file can be # found in the fasta file seq_reader = pyfastaq . sequences . file_reader ( self . infile ) names_list_from_fasta_file = [ seq . id for seq in seq_reader ] names_set_from_fasta_file = set ( names_list_from_fasta_file ...
def read ( self , sensors ) : """Read a set of keys ."""
payload = { 'destDev' : [ ] , 'keys' : list ( set ( [ s . key for s in sensors ] ) ) } if self . sma_sid is None : yield from self . new_session ( ) if self . sma_sid is None : return False body = yield from self . _fetch_json ( URL_VALUES , payload = payload ) # On the first 401 error we close the sess...
def create_pool ( module , max_conns , * args , ** kwargs ) : """Create a connection pool appropriate to the driver module ' s capabilities ."""
if not hasattr ( module , 'threadsafety' ) : raise NotSupported ( "Cannot determine driver threadsafety." ) if max_conns < 1 : raise ValueError ( "Minimum number of connections is 1." ) if module . threadsafety >= 2 : return Pool ( module , max_conns , * args , ** kwargs ) if module . threadsafety >= 1 : ...
def unload ( self , keepables = None ) : """Unload all unneeded datasets . Datasets are considered unneeded if they weren ' t directly requested or added to the Scene by the user or they are no longer needed to generate composites that have yet to be generated . Args : keepables ( iterable ) : DatasetIDs ...
to_del = [ ds_id for ds_id , projectable in self . datasets . items ( ) if ds_id not in self . wishlist and ( not keepables or ds_id not in keepables ) ] for ds_id in to_del : LOG . debug ( "Unloading dataset: %r" , ds_id ) del self . datasets [ ds_id ]
def factorial ( N ) : """Compute the factorial of N . If N < = 10 , use a fast lookup table ; otherwise use scipy . special . factorial"""
if N < len ( FACTORIALS ) : return FACTORIALS [ N ] else : from scipy import special return int ( special . factorial ( N ) )
def _sim_atoi_inner ( self , str_addr , region , base = 10 , read_length = None ) : """Return the result of invoking the atoi simprocedure on ` str _ addr ` ."""
from . . import SIM_PROCEDURES strtol = SIM_PROCEDURES [ 'libc' ] [ 'strtol' ] return strtol . strtol_inner ( str_addr , self . state , region , base , True , read_length = read_length )
def GetRunlevelsNonLSB ( states ) : """Accepts a string and returns a list of strings of numeric LSB runlevels ."""
if not states : return set ( ) convert_table = { "0" : "0" , "1" : "1" , "2" : "2" , "3" : "3" , "4" : "4" , "5" : "5" , "6" : "6" , # SysV , Gentoo , Solaris , HP - UX all allow an alpha variant # for single user . https : / / en . wikipedia . org / wiki / Runlevel "S" : "1" , "s" : "1" } _LogInvalidRunLevels ( st...
def findall_operations_with_gate_type ( self , gate_type : Type [ T_DESIRED_GATE_TYPE ] ) -> Iterable [ Tuple [ int , ops . GateOperation , T_DESIRED_GATE_TYPE ] ] : """Find the locations of all gate operations of a given type . Args : gate _ type : The type of gate to find , e . g . XPowGate or MeasurementGa...
result = self . findall_operations ( lambda operation : bool ( ops . op_gate_of_type ( operation , gate_type ) ) ) for index , op in result : gate_op = cast ( ops . GateOperation , op ) yield index , gate_op , cast ( T_DESIRED_GATE_TYPE , gate_op . gate )
def adjust_text ( texts , x = None , y = None , add_objects = None , ax = None , expand_text = ( 1.05 , 1.2 ) , expand_points = ( 1.05 , 1.2 ) , expand_objects = ( 1.05 , 1.2 ) , expand_align = ( 1.05 , 1.2 ) , autoalign = 'xy' , va = 'center' , ha = 'center' , force_text = ( 0.1 , 0.25 ) , force_points = ( 0.2 , 0.5 )...
plt . draw ( ) if ax is None : ax = plt . gca ( ) r = get_renderer ( ax . get_figure ( ) ) orig_xy = [ get_text_position ( text , ax = ax ) for text in texts ] orig_x = [ xy [ 0 ] for xy in orig_xy ] orig_y = [ xy [ 1 ] for xy in orig_xy ] force_objects = float_to_tuple ( force_objects ) force_text = float_to_tuple...
def get_date_yyyymmdd ( yyyymmdd ) : """Return datetime . date given string ."""
return date ( int ( yyyymmdd [ : 4 ] ) , int ( yyyymmdd [ 4 : 6 ] , base = 10 ) , int ( yyyymmdd [ 6 : ] , base = 10 ) )
def pr_num ( self ) : """Return the PR number or None if not on a PR"""
result = get_pr_num ( repo = self . repo ) if result is None : result = get_travis_pr_num ( ) return result
def _parseline ( self , line ) : """All lines come to this method . : param line : a to parse : returns : the number of rows to jump and parse the next data line or return the code error - 1"""
sline = line . split ( SEPARATOR ) segment = sline [ 0 ] handlers = { SEGMENT_HEADER : self . _handle_header , SEGMENT_EOF : self . _handle_eof , SEGMENT_RESULT : self . _handle_result_line , SEGMENT_OBSERVATION_ORDER : self . _handle_new_record } handler = handlers . get ( segment ) if handler : return handler ( s...
def adapters ( self , adapters ) : """Sets the number of Ethernet adapters for this VMware VM instance . : param adapters : number of adapters"""
# VMware VMs are limited to 10 adapters if adapters > 10 : raise VMwareError ( "Number of adapters above the maximum supported of 10" ) self . _ethernet_adapters . clear ( ) for adapter_number in range ( 0 , adapters ) : self . _ethernet_adapters [ adapter_number ] = EthernetAdapter ( ) self . _adapters = len (...
def msjd ( theta ) : """Mean squared jumping distance ."""
s = 0. for p in theta . dtype . names : s += np . sum ( np . diff ( theta [ p ] , axis = 0 ) ** 2 ) return s
def StreamFile ( self , filedesc , offset = 0 , amount = None ) : """Streams chunks of a given file starting at given offset . Args : filedesc : A ` file ` object to stream . offset : An integer offset at which the file stream should start on . amount : An upper bound on number of bytes to read . Returns ...
reader = FileReader ( filedesc , offset = offset ) return self . Stream ( reader , amount = amount )