signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def collections ( self ) :
"""List top - level collections of the client ' s database .
Returns :
Sequence [ ~ . firestore _ v1beta1 . collection . CollectionReference ] :
iterator of subcollections of the current document .""" | iterator = self . _firestore_api . list_collection_ids ( self . _database_string , metadata = self . _rpc_metadata )
iterator . client = self
iterator . item_to_value = _item_to_collection_ref
return iterator |
def subsol ( datetime ) :
"""Finds subsolar geocentric latitude and longitude .
Parameters
datetime : : class : ` datetime . datetime `
Returns
sbsllat : float
Latitude of subsolar point
sbsllon : float
Longitude of subsolar point
Notes
Based on formulas in Astronomical Almanac for the year 1996 ,... | # convert to year , day of year and seconds since midnight
year = datetime . year
doy = datetime . timetuple ( ) . tm_yday
ut = datetime . hour * 3600 + datetime . minute * 60 + datetime . second
if not 1601 <= year <= 2100 :
raise ValueError ( 'Year must be in [1601, 2100]' )
yr = year - 2000
nleap = int ( np . fl... |
def file_cmd ( context , yes , file_id ) :
"""Delete a file .""" | file_obj = context . obj [ 'store' ] . File . get ( file_id )
if file_obj . is_included :
question = f"remove file from file system and database: {file_obj.full_path}"
else :
question = f"remove file from database: {file_obj.full_path}"
if yes or click . confirm ( question ) :
if file_obj . is_included and ... |
def agent_version ( self ) :
"""Get the version of the Juju machine agent .
May return None if the agent is not yet available .""" | version = self . safe_data [ 'agent-status' ] [ 'version' ]
if version :
return client . Number . from_json ( version )
else :
return None |
def getWindowTitle ( self , hwnd ) :
"""Gets the title for the specified window""" | for w in self . _get_window_list ( ) :
if "kCGWindowNumber" in w and w [ "kCGWindowNumber" ] == hwnd :
return w [ "kCGWindowName" ] |
def save_list ( key , * values ) :
"""Convert the given list of parameters to a JSON object .
JSON object is of the form :
{ key : [ values [ 0 ] , values [ 1 ] , . . . ] } ,
where values represent the given list of parameters .""" | return json . dumps ( { key : [ _get_json ( value ) for value in values ] } ) |
def character_span ( self ) :
"""Returns the character span of the token""" | begin , end = self . token_span
return ( self . sentence [ begin ] . character_span [ 0 ] , self . sentence [ end - 1 ] . character_span [ - 1 ] ) |
def _dens ( self , R , z , phi = 0. , t = 0. ) :
"""NAME :
_ dens
PURPOSE :
evaluate the density for this potential
INPUT :
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT :
the density
HISTORY :
2010-08-08 - Written - Bovy ( NYU )""" | # Cylindrical distance
Rdist = _cylR ( R , phi , self . _orb . R ( t ) , self . _orb . phi ( t ) )
# Difference vector
( xd , yd , zd ) = _cyldiff ( self . _orb . R ( t ) , self . _orb . phi ( t ) , self . _orb . z ( t ) , R , phi , z )
# Return the density
return evaluateDensities ( self . _pot , Rdist , zd , use_phys... |
def bake ( self ) :
"""Bake a ` ` shell ` ` command so it ' s ready to execute and returns None .
: return : None""" | command_list = self . command . split ( ' ' )
command , args = command_list [ 0 ] , command_list [ 1 : ]
self . _sh_command = getattr ( sh , command )
# Reconstruct command with remaining args .
self . _sh_command = self . _sh_command . bake ( args , _env = self . env , _out = LOG . out , _err = LOG . error ) |
def singleaxis ( self , apparent_zenith , apparent_azimuth ) :
"""Get tracking data . See : py : func : ` pvlib . tracking . singleaxis ` more
detail .
Parameters
apparent _ zenith : float , 1d array , or Series
Solar apparent zenith angles in decimal degrees .
apparent _ azimuth : float , 1d array , or S... | tracking_data = singleaxis ( apparent_zenith , apparent_azimuth , self . axis_tilt , self . axis_azimuth , self . max_angle , self . backtrack , self . gcr )
return tracking_data |
def page ( self , log = values . unset , message_date_before = values . unset , message_date = values . unset , message_date_after = values . unset , page_token = values . unset , page_number = values . unset , page_size = values . unset ) :
"""Retrieve a single page of NotificationInstance records from the API .
... | params = values . of ( { 'Log' : log , 'MessageDate<' : serialize . iso8601_date ( message_date_before ) , 'MessageDate' : serialize . iso8601_date ( message_date ) , 'MessageDate>' : serialize . iso8601_date ( message_date_after ) , 'PageToken' : page_token , 'Page' : page_number , 'PageSize' : page_size , } )
respons... |
def collision_encode ( self , src , id , action , threat_level , time_to_minimum_delta , altitude_minimum_delta , horizontal_minimum_delta ) :
'''Information about a potential collision
src : Collision data source ( uint8 _ t )
id : Unique identifier , domain based on src field ( uint32 _ t )
action : Action ... | return MAVLink_collision_message ( src , id , action , threat_level , time_to_minimum_delta , altitude_minimum_delta , horizontal_minimum_delta ) |
def get_next ( self , label ) :
"""Get the next section with the given label""" | while self . _get_current_label ( ) != label :
self . _skip_section ( )
return self . _read_section ( ) |
def assert_instance_deleted ( self , model_class , ** kwargs ) :
"""Checks if the model instance was deleted from the database .
For example : :
> > > with self . assert _ instance _ deleted ( Article , slug = ' lorem - ipsum ' ) :
. . . Article . objects . get ( slug = ' lorem - ipsum ' ) . delete ( )""" | return _InstanceContext ( self . assert_instance_exists , self . assert_instance_does_not_exist , model_class , ** kwargs ) |
def _clean_multiple_def ( self , ready ) :
"""Cleans the list of variable definitions extracted from the definition text to
get hold of the dimensions and default values .""" | result = [ ]
for entry in ready :
if isinstance ( entry , list ) : # This variable declaration has a default value specified , which is in the
# second slot of the list .
default = self . _collapse_default ( entry [ 1 ] )
# For hard - coded array defaults , add the parenthesis back in .
... |
def address ( cls , name , description = None , unit = '' , default = None , initial_status = None ) :
"""Instantiate a new IP address sensor object .
Parameters
name : str
The name of the sensor .
description : str
A short description of the sensor .
units : str
The units of the sensor value . May be... | return cls ( cls . ADDRESS , name , description , unit , None , default , initial_status ) |
def get ( self , block = True , timeout = None ) :
"""get .""" | try :
item = super ( ) . get ( block , timeout )
self . _getsocket . recv ( 1 )
return item
except queue . Empty :
raise queue . Empty |
def _sethex ( self , hexstring ) :
"""Reset the bitstring to have the value given in hexstring .""" | hexstring = tidy_input_string ( hexstring )
# remove any 0x if present
hexstring = hexstring . replace ( '0x' , '' )
length = len ( hexstring )
if length % 2 :
hexstring += '0'
try :
try :
data = bytearray . fromhex ( hexstring )
except TypeError : # Python 2.6 needs a unicode string ( a bug ) . 2.7... |
def execute ( self , sql , params = None ) :
"""Execute given SQL .
Calls ` rollback ` if there ' s a DB error and re - raises the exception .
Calls ` commit ` if autocommit is True and there was no error .
Returns number of rows affected ( for commands that affect rows ) .
> > > import getpass
> > > s = ... | logging . debug ( sql )
try :
self . _cursor . execute ( sql , params )
if self . autocommit :
self . _conn . commit ( )
if self . _cursor . rowcount > 0 :
return self . _cursor . rowcount
except psycopg2 . Error , error :
logging . debug ( 'PG error ({}): {}' . format ( error . pgcode ,... |
def add_observations ( self , ins_file , out_file = None , pst_path = None , inschek = True ) :
"""add new parameters to a control file
Parameters
ins _ file : str
instruction file
out _ file : str
model output file . If None , then ins _ file . replace ( " . ins " , " " ) is used . Default is None
pst ... | assert os . path . exists ( ins_file ) , "{0}, {1}" . format ( os . getcwd ( ) , ins_file )
if out_file is None :
out_file = ins_file . replace ( ".ins" , "" )
assert ins_file != out_file , "doh!"
# get the parameter names in the template file
obsnme = pst_utils . parse_ins_file ( ins_file )
sobsnme = set ( obsnme ... |
def dist ( self , * args , ** kwargs ) :
"""NAME :
dist
PURPOSE :
return distance from the observer
INPUT :
t - ( optional ) time at which to get dist ( can be Quantity )
obs = [ X , Y , Z ] - ( optional ) position of observer ( in kpc ; entries can be Quantity )
( default = [ 8.0,0 . , 0 . ] ) OR Orb... | out = self . _orb . dist ( * args , ** kwargs )
if len ( out ) == 1 :
return out [ 0 ]
else :
return out |
def on_connected ( self , headers , body ) :
"""Once the connection is established , and ' heart - beat ' is found in the headers , we calculate the real
heartbeat numbers ( based on what the server sent and what was specified by the client ) - if the heartbeats
are not 0 , we start up the heartbeat loop accord... | if 'heart-beat' in headers :
self . heartbeats = utils . calculate_heartbeats ( headers [ 'heart-beat' ] . replace ( ' ' , '' ) . split ( ',' ) , self . heartbeats )
if self . heartbeats != ( 0 , 0 ) :
self . send_sleep = self . heartbeats [ 0 ] / 1000
# by default , receive gets an additional g... |
def _setup ( self ) :
"""Run setup tasks after initialization""" | self . _populate_local ( )
try :
self . _populate_latest ( )
except Exception as e :
self . log . exception ( 'Unable to retrieve latest %s version information' , self . meta_name )
self . _sort ( ) |
def iter_referents ( self ) :
"""Generates target sets that are compatible with the current beliefstate .""" | tlow , thigh = self [ 'targetset_arity' ] . get_tuple ( )
clow , chigh = self [ 'contrast_arity' ] . get_tuple ( )
referents = list ( self . iter_singleton_referents ( ) )
t = len ( referents )
low = max ( 1 , tlow )
high = min ( [ t , thigh ] )
for targets in itertools . chain . from_iterable ( itertools . combination... |
def within_n_mads ( n , series ) :
"""Return true if all values in sequence are within n MADs""" | mad_score = ( series - series . mean ( ) ) / series . mad ( )
return ( mad_score . abs ( ) <= n ) . all ( ) |
def overviews ( self ) :
"""This method returns the properties overviews .
: return :""" | overviews = [ ]
try :
list_items = self . _ad_page_content . select ( "#overview li" )
except Exception as e :
if self . _debug :
logging . error ( "Error getting overviews. Error message: " + e . args [ 0 ] )
return
for li in list_items :
overviews . append ( li . text )
return overviews |
def bandwidth ( self ) :
"""Target bandwidth in bits / sec""" | self . _bandwidth = self . lib . iperf_get_test_rate ( self . _test )
return self . _bandwidth |
def get_relative_positions_of_waypoints ( transition_v ) :
"""This method takes the waypoints of a connection and returns all relative positions of these waypoints .
: param canvas : Canvas to check relative position in
: param transition _ v : Transition view to extract all relative waypoint positions
: retu... | handles_list = transition_v . handles ( )
rel_pos_list = [ ]
for handle in handles_list :
if handle in transition_v . end_handles ( include_waypoints = True ) :
continue
rel_pos = transition_v . canvas . get_matrix_i2i ( transition_v , transition_v . parent ) . transform_point ( * handle . pos )
rel... |
def _parse_downloadpage_html ( self , doc ) :
"""解析下载页面 , 返回下载链接""" | soup = bs4 . BeautifulSoup ( doc , 'lxml' )
a = soup . select ( '.download-box > a.btn-click' )
if a :
a = a [ 0 ]
link = a . get ( 'href' )
return link
return '' |
def _set_repository_view ( self , session ) :
"""Sets the underlying repository view to match current view""" | if self . _repository_view == COMPARATIVE :
try :
session . use_comparative_repository_view ( )
except AttributeError :
pass
else :
try :
session . use_plenary_repository_view ( )
except AttributeError :
pass |
def decompose_seconds_in_day ( seconds ) :
"""Decomposes seconds in day into hour , minute and second components .
Arguments
seconds : int
A time of day by the number of seconds passed since midnight .
Returns
hour : int
The hour component of the given time of day .
minut : int
The minute component ... | if seconds > SECONDS_IN_DAY :
seconds = seconds - SECONDS_IN_DAY
if seconds < 0 :
raise ValueError ( "seconds param must be non-negative!" )
hour = int ( seconds / 3600 )
leftover = seconds - hour * 3600
minute = int ( leftover / 60 )
second = leftover - minute * 60
return hour , minute , second |
def AddATR ( self , readernode , atr ) :
"""Add an ATR to a reader node .""" | capchild = self . AppendItem ( readernode , atr )
self . SetPyData ( capchild , None )
self . SetItemImage ( capchild , self . cardimageindex , wx . TreeItemIcon_Normal )
self . SetItemImage ( capchild , self . cardimageindex , wx . TreeItemIcon_Expanded )
self . Expand ( capchild )
return capchild |
def upload ( self , file_obj ) :
"""Replace the content of this object .
: param file file _ obj : The file ( or file - like object ) to upload .""" | return self . _client . upload_object ( self . _instance , self . _bucket , self . name , file_obj ) |
def read ( self , size = - 1 ) :
"""Read bytes and call the callback""" | bites = self . file . read ( size )
self . bytes_read += len ( bites )
self . callback ( len ( bites ) , self . bytes_read )
return bites |
def get_references ( profile_path , role , profile_name , server ) :
"""Get display and return the References for the path provided , ResultClass
CIM _ ReferencedProfile , and the role provided .""" | references_for_profile = server . conn . References ( ObjectName = profile_path , ResultClass = "CIM_ReferencedProfile" , Role = role )
if VERBOSE :
print ( 'References for profile=%s, path=%s, ResultClass=' 'CIM_ReferencedProfile, Role=%s' % ( profile_name , profile_path , role ) )
for ref in references_for_pr... |
def raise_freshness_log_entry ( self , t_stale_by ) :
"""Raise freshness alert entry ( warning level )
Example : " The freshness period of host ' host _ name ' is expired
by 0d 0h 17m 6s ( threshold = 0d 1h 0m 0s ) .
Attempt : 1 / 1.
I ' m forcing the state to freshness state ( d / HARD ) "
: param t _ st... | logger . warning ( "The freshness period of %s '%s' is expired by %ss " "(threshold=%ss + %ss). Attempt: %s / %s. " "I'm forcing the state to freshness state (%s / %s)." , self . my_type , self . get_full_name ( ) , t_stale_by , self . freshness_threshold , self . additional_freshness_latency , self . attempt , self . ... |
def _initialize_mtf_dimension_name_to_size_gcd ( self , mtf_graph ) :
"""Initializer for self . _ mtf _ dimension _ name _ to _ size _ gcd .
Args :
mtf _ graph : an mtf . Graph .
Returns :
A { string : int } , mapping the name of an MTF dimension to the greatest
common divisor of all the sizes it has . Al... | mtf_dimension_name_to_size_gcd = { }
for mtf_operation in mtf_graph . operations :
for mtf_tensor in mtf_operation . outputs :
for mtf_dimension in mtf_tensor . shape . dims :
mtf_dimension_name_to_size_gcd [ mtf_dimension . name ] = fractions . gcd ( mtf_dimension_name_to_size_gcd . get ( mtf_d... |
def update ( self , value = None ) :
'Updates the ProgressBar to a new value .' | if value is not None and value is not UnknownLength :
if ( self . maxval is not UnknownLength and not 0 <= value <= self . maxval and not value < self . currval ) :
raise ValueError ( 'Value out of range' )
self . currval = value
if self . start_time is None :
self . start ( )
self . update ( va... |
def _subcommand ( group , * args , ** kwargs ) :
"""Decorator to define a subcommand .
This decorator is used for the group ' s @ command decorator .""" | def decorator ( f ) :
if 'help' not in kwargs :
kwargs [ 'help' ] = f . __doc__
_parser_class = group . _subparsers . _parser_class
if 'parser' in kwargs : # use a copy of the given parser
group . _subparsers . _parser_class = _CopiedArgumentParser
if 'parents' in kwargs :
if not... |
def clean_text ( self , text , preserve_space ) :
"""Text cleaning as per https : / / www . w3 . org / TR / SVG / text . html # WhiteSpace""" | if text is None :
return
if preserve_space :
text = text . replace ( '\r\n' , ' ' ) . replace ( '\n' , ' ' ) . replace ( '\t' , ' ' )
else :
text = text . replace ( '\r\n' , '' ) . replace ( '\n' , '' ) . replace ( '\t' , ' ' )
text = text . strip ( )
while ( ' ' in text ) :
text = text . r... |
def Overlay_setShowDebugBorders ( self , show ) :
"""Function path : Overlay . setShowDebugBorders
Domain : Overlay
Method name : setShowDebugBorders
Parameters :
Required arguments :
' show ' ( type : boolean ) - > True for showing debug borders
No return value .
Description : Requests that backend s... | assert isinstance ( show , ( bool , ) ) , "Argument 'show' must be of type '['bool']'. Received type: '%s'" % type ( show )
subdom_funcs = self . synchronous_command ( 'Overlay.setShowDebugBorders' , show = show )
return subdom_funcs |
def pitch_contour ( times , frequencies , fs , amplitudes = None , function = np . sin , length = None , kind = 'linear' ) :
'''Sonify a pitch contour .
Parameters
times : np . ndarray
time indices for each frequency measurement , in seconds
frequencies : np . ndarray
frequency measurements , in Hz .
No... | fs = float ( fs )
if length is None :
length = int ( times . max ( ) * fs )
# Squash the negative frequencies .
# wave ( 0 ) = 0 , so clipping here will un - voice the corresponding instants
frequencies = np . maximum ( frequencies , 0.0 )
# Build a frequency interpolator
f_interp = interp1d ( times * fs , 2 * np .... |
def checkPos ( self ) :
"""check all positions""" | soup = BeautifulSoup ( self . css1 ( path [ 'movs-table' ] ) . html , 'html.parser' )
poss = [ ]
for label in soup . find_all ( "tr" ) :
pos_id = label [ 'id' ]
# init an empty list
# check if it already exist
pos_list = [ x for x in self . positions if x . id == pos_id ]
if pos_list : # and update ... |
def profiles ( weeks ) :
"""Number of weeks to build .
Starting with the current week .""" | profiles = Profiles ( store )
weeks = get_last_weeks ( weeks ) if isinstance ( weeks , int ) else weeks
print ( weeks )
profiles . create ( weeks ) |
def _parse_the_ned_object_results ( self ) :
"""* parse the ned results *
* * Key Arguments : * *
* * Return : * *
- None
. . todo : :
- @ review : when complete , clean _ parse _ the _ ned _ results method
- @ review : when complete add logging""" | self . log . info ( 'starting the ``_parse_the_ned_results`` method' )
results = [ ]
headers = [ "objectName" , "objectType" , "raDeg" , "decDeg" , "redshift" , "redshiftFlag" ]
if self . nedResults :
pathToReadFile = self . nedResults
try :
self . log . debug ( "attempting to open the file %s" % ( path... |
def get_postadres_by_huisnummer ( self , huisnummer ) :
'''Get the ` postadres ` for a : class : ` Huisnummer ` .
: param huisnummer : The : class : ` Huisnummer ` for which the ` postadres ` is wanted . OR A huisnummer id .
: rtype : A : class : ` str ` .''' | try :
id = huisnummer . id
except AttributeError :
id = huisnummer
def creator ( ) :
res = crab_gateway_request ( self . client , 'GetPostadresByHuisnummerId' , id )
if res == None :
raise GatewayResourceNotFoundException ( )
return res . Postadres
if self . caches [ 'short' ] . is_configure... |
def _compute_e2_factor ( self , imt , vs30 ) :
"""Compute and return e2 factor , equation 19 , page 80.""" | e2 = np . zeros_like ( vs30 )
if imt . name == "PGV" :
period = 1
elif imt . name == "PGA" :
period = 0
else :
period = imt . period
if period < 0.35 :
return e2
else :
idx = vs30 <= 1000
if period >= 0.35 and period <= 2.0 :
e2 [ idx ] = ( - 0.25 * np . log ( vs30 [ idx ] / 1000 ) * np ... |
def session ( self , master = '' , config = None ) :
"""Takes care of starting any local servers and stopping queues on exit .
In general , the Runner is designed to work with any user provided session ,
but this provides a convenience for properly stopping the queues .
Args :
master : The master session to... | session_manager = SESSION_MANAGER_FACTORY ( )
# Initialization is handled manually at a later point and session _ manager
# is just used for distributed compatibility .
with session_manager . prepare_session ( master , None , config = config , init_fn = lambda _ : None ) as sess :
try :
yield sess
final... |
def _normalize_stmt_idx ( self , block_addr , stmt_idx ) :
"""For each statement ID , convert ' default ' to ( last _ stmt _ idx + 1)
: param block _ addr : The block address .
: param stmt _ idx : Statement ID .
: returns : New statement ID .""" | if type ( stmt_idx ) is int :
return stmt_idx
if stmt_idx == DEFAULT_STATEMENT :
vex_block = self . project . factory . block ( block_addr ) . vex
return len ( vex_block . statements )
raise AngrBackwardSlicingError ( 'Unsupported statement ID "%s"' % stmt_idx ) |
def listen ( self ) :
"""Blocking call on widgets .""" | while self . _listen :
key = u''
key = self . term . inkey ( timeout = 0.2 )
try :
if key . code == KEY_ENTER :
self . on_enter ( key = key )
elif key . code in ( KEY_DOWN , KEY_UP ) :
self . on_key_arrow ( key = key )
elif key . code == KEY_ESCAPE or key == c... |
def send ( self , to , from_ , body , dm = False ) :
"""Send BODY as an @ message from FROM to TO
If we don ' t have the access tokens for FROM , raise AccountNotFoundError .
If the tweet resulting from ' @ { 0 } { 1 } ' . format ( TO , BODY ) is > 140 chars
raise TweetTooLongError .
If we want to send this... | tweet = '@{0} {1}' . format ( to , body )
if from_ not in self . accounts :
raise AccountNotFoundError ( )
if len ( tweet ) > 140 :
raise TweetTooLongError ( )
self . auth . set_access_token ( * self . accounts . get ( from_ ) )
api = tweepy . API ( self . auth )
if dm :
api . send_direct_message ( screen_n... |
def ready ( self ) :
"""Configure global XRay recorder based on django settings
under XRAY _ RECORDER namespace .
This method could be called twice during server startup
because of base command and reload command .
So this function must be idempotent""" | if not settings . AWS_XRAY_TRACING_NAME :
raise SegmentNameMissingException ( 'Segment name is required.' )
xray_recorder . configure ( daemon_address = settings . AWS_XRAY_DAEMON_ADDRESS , sampling = settings . SAMPLING , sampling_rules = settings . SAMPLING_RULES , context_missing = settings . AWS_XRAY_CONTEXT_MI... |
def is_forced_retry ( self , method , status_code ) :
"""Is this method / status code retryable ? ( Based on method / codes whitelists )""" | if self . method_whitelist and method . upper ( ) not in self . method_whitelist :
return False
return self . status_forcelist and status_code in self . status_forcelist |
def _prepare_client ( client_or_address ) :
""": param client _ or _ address : one of :
* None
* verbatim : ' local '
* string address
* a Client instance
: return : a tuple : ( Client instance , shutdown callback function ) .
: raises : ValueError if no valid client input was provided .""" | if client_or_address is None or str ( client_or_address ) . lower ( ) == 'local' :
local_cluster = LocalCluster ( diagnostics_port = None )
client = Client ( local_cluster )
def close_client_and_local_cluster ( verbose = False ) :
if verbose :
print ( 'shutting down client and local clus... |
def AddDischargingBattery ( self , device_name , model_name , percentage , seconds_to_empty ) :
'''Convenience method to add a discharging battery object
You have to specify a device name which must be a valid part of an object
path , e . g . " mock _ ac " , an arbitrary model name , the charge percentage , and... | path = '/org/freedesktop/UPower/devices/' + device_name
self . AddObject ( path , DEVICE_IFACE , { 'PowerSupply' : dbus . Boolean ( True , variant_level = 1 ) , 'IsPresent' : dbus . Boolean ( True , variant_level = 1 ) , 'Model' : dbus . String ( model_name , variant_level = 1 ) , 'Percentage' : dbus . Double ( percent... |
def load ( self , filename ) :
"""load data from a saved . lcopt file""" | if filename [ - 6 : ] != ".lcopt" :
filename += ".lcopt"
try :
savedInstance = pickle . load ( open ( "{}" . format ( filename ) , "rb" ) )
except FileNotFoundError :
savedInstance = pickle . load ( open ( fix_mac_path_escapes ( os . path . join ( storage . model_dir , "{}" . format ( filename ) ) ) , "rb" ... |
def source_uris ( self ) :
"""The fully - qualified URIs that point to your data in Google Cloud Storage .
Each URI can contain one ' * ' wildcard character and it must come after the ' bucket ' name .""" | return [ x . path for x in luigi . task . flatten ( self . input ( ) ) ] |
def index_all_layers ( self ) :
"""Index all layers in search engine .""" | from hypermap . aggregator . models import Layer
if not settings . REGISTRY_SKIP_CELERY :
layers_cache = set ( Layer . objects . filter ( is_valid = True ) . values_list ( 'id' , flat = True ) )
deleted_layers_cache = set ( Layer . objects . filter ( is_valid = False ) . values_list ( 'id' , flat = True ) )
... |
def queryWorkitems ( self , query_str , projectarea_id = None , projectarea_name = None , returned_properties = None , archived = False ) :
"""Query workitems with the query string in a certain
: class : ` rtcclient . project _ area . ProjectArea `
At least either of ` projectarea _ id ` and ` projectarea _ nam... | pa_id = ( self . rtc_obj . _pre_get_resource ( projectarea_id = projectarea_id , projectarea_name = projectarea_name ) )
self . log . info ( "Start to query workitems with query string: %s" , query_str )
query_str = urlquote ( query_str )
rp = returned_properties
return ( self . rtc_obj . _get_paged_resources ( "Query"... |
def attributes ( self , ** kwargs ) : # pragma : no cover
"""Retrieve the attribute configuration object .
Retrieves a mapping that identifies the custom directory
attributes configured for the Directory SyncService instance ,
and the mapping of the custom attributes to standard directory
attributes .
Arg... | path = "/directory-sync-service/v1/attributes"
r = self . _httpclient . request ( method = "GET" , path = path , url = self . url , ** kwargs )
return r |
def __fire_callback ( self , type_ , * args , ** kwargs ) :
"""Returns True if at least one callback was called""" | called = False
plain_submit = self . __threadpool . submit
with self . __callbacks :
submit = self . __crud_threadpool . submit if type_ in _CB_CRUD_TYPES else plain_submit
for func , serialised_if_crud in self . __callbacks [ type_ ] :
called = True
# allow CRUD callbacks to not be serialised i... |
def create_variable ( descriptor ) :
"""Creates a variable from a dictionary descriptor""" | if descriptor [ 'type' ] == 'continuous' :
return ContinuousVariable ( descriptor [ 'name' ] , descriptor [ 'domain' ] , descriptor . get ( 'dimensionality' , 1 ) )
elif descriptor [ 'type' ] == 'bandit' :
return BanditVariable ( descriptor [ 'name' ] , descriptor [ 'domain' ] , descriptor . get ( 'dimensionali... |
def get_project ( self , projectname ) :
"""Get the project details from Slurm .""" | cmd = [ "list" , "accounts" , "where" , "name=%s" % projectname ]
results = self . _read_output ( cmd )
if len ( results ) == 0 :
return None
elif len ( results ) > 1 :
logger . error ( "Command returned multiple results for '%s'." % projectname )
raise RuntimeError ( "Command returned multiple results for ... |
def load_pdb ( pdb , path = True , pdb_id = '' , ignore_end = False ) :
"""Converts a PDB file into an AMPAL object .
Parameters
pdb : str
Either a path to a PDB file or a string containing PDB
format structural data .
path : bool , optional
If ` true ` , flags ` pdb ` as a path and not a PDB string .
... | pdb_p = PdbParser ( pdb , path = path , pdb_id = pdb_id , ignore_end = ignore_end )
return pdb_p . make_ampal ( ) |
def _pick_lead_item ( items ) :
"""Choose lead item for a set of samples .
Picks tumors for tumor / normal pairs and first sample for batch groups .""" | paired = vcfutils . get_paired ( items )
if paired :
return paired . tumor_data
else :
return list ( items ) [ 0 ] |
def _save_or_delete_workflow ( self ) :
"""Calls the real save method if we pass the beggining of the wf""" | if not self . current . task_type . startswith ( 'Start' ) :
if self . current . task_name . startswith ( 'End' ) and not self . are_we_in_subprocess ( ) :
self . wf_state [ 'finished' ] = True
self . wf_state [ 'finish_date' ] = datetime . now ( ) . strftime ( settings . DATETIME_DEFAULT_FORMAT )
... |
async def update ( self , db = None , data = None ) :
'''Update the entire document by replacing its content with new data , retaining its primary key''' | db = db or self . db
if data : # update model explicitely with a new data structure
# merge the current model ' s data with the new data
self . import_data ( data )
# prepare data for database update
data = self . prepare_data ( )
# data = { x : ndata [ x ] for x in ndata if x in data or x = = self . pr... |
def channels_history ( self , room_id , ** kwargs ) :
"""Retrieves the messages from a channel .""" | return self . __call_api_get ( 'channels.history' , roomId = room_id , kwargs = kwargs ) |
def parse_model ( self , model ) :
"""Split the given model _ name into controller and model parts .
If the controller part is empty , the current controller will be used .
If the model part is empty , the current model will be used for
the controller .
The returned model name will always be qualified with ... | # TODO if model is empty , use $ JUJU _ MODEL environment variable .
if model and ':' in model : # explicit controller given
controller_name , model_name = model . split ( ':' )
else : # use the current controller if one isn ' t explicitly given
controller_name = self . current_controller ( )
model_name = m... |
def next_fat ( self , current ) :
"""Helper gives you seekable position of next FAT sector . Should not be
called from external code .""" | sector_size = self . header . sector_size // 4
block = current // sector_size
difat_position = 76
if block >= 109 :
block -= 109
sector = self . header . difat_sector_start
while block >= sector_size :
position = ( sector + 1 ) << self . header . sector_shift
position += self . header . sect... |
def call ( cmd , stdout = PIPE , stderr = PIPE , on_error = 'raise' , ** kwargs ) :
"""Call out to the shell using ` subprocess . Popen `
Parameters
stdout : ` file - like ` , optional
stream for stdout
stderr : ` file - like ` , optional
stderr for stderr
on _ error : ` str ` , optional
what to do wh... | if isinstance ( cmd , ( list , tuple ) ) :
cmdstr = ' ' . join ( cmd )
kwargs . setdefault ( 'shell' , False )
else :
cmdstr = str ( cmd )
kwargs . setdefault ( 'shell' , True )
proc = Popen ( cmd , stdout = stdout , stderr = stderr , ** kwargs )
out , err = proc . communicate ( )
if proc . returncode :... |
def rows ( self ) -> List [ List [ str ] ] :
"""Returns the table rows .""" | return [ list ( d . values ( ) ) for d in self . data ] |
def get_status ( self , instance ) :
"""Retrives a status of a field from cache . Fields in state ' error ' and
' complete ' will not retain the status after the call .""" | status_key , status = self . _get_status ( instance )
if status [ 'state' ] in [ 'complete' , 'error' ] :
cache . delete ( status_key )
return status |
def _gettype ( self ) :
'''Return current type of this struct
: returns : a typedef object ( e . g . nstruct )''' | current = self
lastname = getattr ( current . _parser , 'typedef' , None )
while hasattr ( current , '_sub' ) :
current = current . _sub
tn = getattr ( current . _parser , 'typedef' , None )
if tn is not None :
lastname = tn
return lastname |
def extend_regex2 ( regexpr , reflags = 0 ) :
"""also preprocesses flags""" | regexpr = extend_regex ( regexpr )
IGNORE_CASE_PREF = '\\c'
if regexpr . startswith ( IGNORE_CASE_PREF ) : # hack for vim - like ignore case
regexpr = regexpr [ len ( IGNORE_CASE_PREF ) : ]
reflags = reflags | re . IGNORECASE
return regexpr , reflags |
def get_numpy_array ( self ) :
"""Dump this color into NumPy array .""" | # This holds the obect ' s spectral data , and will be passed to
# numpy . array ( ) to create a numpy array ( matrix ) for the matrix math
# that will be done during the conversion to XYZ .
values = [ ]
# Use the required value list to build this dynamically . Default to
# 0.0 , since that ultimately won ' t affect th... |
def detect_missing_relations ( self , obj , exc ) :
"""Parse error messages and collect the missing - relationship errors
as a dict of Resource - > { id set }""" | missing = defaultdict ( set )
for name , err in exc . error_dict . items ( ) : # check if it was a relationship that doesnt exist locally
pattern = r".+ with id (\d+) does not exist.+"
m = re . match ( pattern , str ( err ) )
if m :
field = obj . _meta . get_field ( name )
res = self . get_r... |
def local_fehdist ( feh ) :
"""feh PDF based on local SDSS distribution
From Jo Bovy :
https : / / github . com / jobovy / apogee / blob / master / apogee / util / _ _ init _ _ . py # L3
2D gaussian fit based on Casagrande ( 2011)""" | fehdist = 0.8 / 0.15 * np . exp ( - 0.5 * ( feh - 0.016 ) ** 2. / 0.15 ** 2. ) + 0.2 / 0.22 * np . exp ( - 0.5 * ( feh + 0.15 ) ** 2. / 0.22 ** 2. )
return fehdist |
def get_data ( self , path , ** params ) :
"""Giving a service path and optional specific arguments , returns
the XML data from the API parsed as a dict structure .""" | xml = self . get_response ( path , ** params )
try :
return parse ( xml )
except Exception as err :
print ( path )
print ( params )
print ( err )
raise |
def on_edited_dataframe_sync ( cell_renderer , iter , new_value , column , df_py_dtypes , list_store , df_data ) :
'''Handle the ` ' edited ' ` signal from a ` gtk . CellRenderer ` to :
* Update the corresponding entry in the list store .
* Update the corresponding entry in the provided data frame instance .
... | # Extract name of column ( name of TreeView column must match data frame
# column name ) .
column_name = column . get_name ( )
# Look up the list store column index and data type for column .
i , dtype = df_py_dtypes . ix [ column_name ]
# Update the list store with the new value .
if dtype == float :
value = si_pa... |
def _makeTextWidgets ( self ) :
"""Makes a text widget .""" | self . textWidget = urwid . Text ( self . text )
return [ self . textWidget ] |
def message_text ( self , m_data ) :
'''Raises ValueError if a value doesn ' t work out , and TypeError if
this isn ' t a message type''' | if m_data . get ( 'type' ) != 'message' :
raise TypeError ( 'This is not a message' )
# Edited messages have text in message
_text = m_data . get ( 'text' , None ) or m_data . get ( 'message' , { } ) . get ( 'text' , None )
try :
log . info ( 'Message is %s' , _text )
# this can violate the ascii codec
exce... |
def generate_patches ( self ) :
"""Generates a list of patches for each file underneath
self . root _ directory
that satisfy the given conditions given
query conditions , where patches for
each file are suggested by self . suggestor .""" | start_pos = self . start_position or Position ( None , None )
end_pos = self . end_position or Position ( None , None )
path_list = Query . _walk_directory ( self . root_directory )
path_list = Query . _sublist ( path_list , start_pos . path , end_pos . path )
path_list = ( path for path in path_list if Query . _path_l... |
def get_disk_usage ( self , path = None ) :
"""Return the total , used and free disk space in bytes as named tuple ,
or placeholder values simulating unlimited space if not set .
. . note : : This matches the return value of shutil . disk _ usage ( ) .
Args :
path : The disk space is returned for the file s... | DiskUsage = namedtuple ( 'usage' , 'total, used, free' )
if path is None :
mount_point = self . mount_points [ self . root . name ]
else :
mount_point = self . _mount_point_for_path ( path )
if mount_point and mount_point [ 'total_size' ] is not None :
return DiskUsage ( mount_point [ 'total_size' ] , mount... |
def false_negatives ( links_true , links_pred ) :
"""Count the number of False Negatives .
Returns the number of incorrect predictions of true links . ( true links ,
but predicted as non - links ) . This value is known as the number of False
Negatives ( FN ) .
Parameters
links _ true : pandas . MultiIndex... | links_true = _get_multiindex ( links_true )
links_pred = _get_multiindex ( links_pred )
return len ( links_true . difference ( links_pred ) ) |
def force_bytes ( bytes_or_unicode , encoding = 'utf-8' , errors = 'backslashreplace' ) :
'Convert passed string type to bytes , if necessary .' | if isinstance ( bytes_or_unicode , bytes ) :
return bytes_or_unicode
return bytes_or_unicode . encode ( encoding , errors ) |
def det_curve ( self , cost_miss = 100 , cost_fa = 1 , prior_target = 0.01 , return_latency = False ) :
"""DET curve
Parameters
cost _ miss : float , optional
Cost of missed detections . Defaults to 100.
cost _ fa : float , optional
Cost of false alarms . Defaults to 1.
prior _ target : float , optional... | if self . latencies is None :
y_true = np . array ( [ trial [ 'target' ] for _ , trial in self ] )
scores = np . array ( [ trial [ 'score' ] for _ , trial in self ] )
fpr , fnr , thresholds , eer = det_curve ( y_true , scores , distances = False )
fpr , fnr , thresholds = fpr [ : : - 1 ] , fnr [ : : - 1... |
def return_future ( fn ) :
"""Decorator that turns a synchronous function into one returning a future .
This should only be applied to non - blocking functions . Will do set _ result ( )
with the return value , or set _ exc _ info ( ) if an exception is raised .""" | @ wraps ( fn )
def decorated ( * args , ** kwargs ) :
return gen . maybe_future ( fn ( * args , ** kwargs ) )
return decorated |
def __get_state_by_id ( cls , job_id ) :
"""Get job state by id .
Args :
job _ id : job id .
Returns :
model . MapreduceState for the job .
Raises :
ValueError : if the job state is missing .""" | state = model . MapreduceState . get_by_job_id ( job_id )
if state is None :
raise ValueError ( "Job state for job %s is missing." % job_id )
return state |
def require_condition ( cls , expr , message , * format_args , ** format_kwds ) :
"""used to assert a certain state . If the expression renders a false
value , an exception will be raised with the supplied message
: param : message : The failure message to attach to the raised Buzz
: param : expr : A boolean ... | if not expr :
raise cls ( message , * format_args , ** format_kwds ) |
def get_next_action ( self , request , application , roles ) :
"""Retrieve the next state .""" | # Check for serious errors in submission .
# Should only happen in rare circumstances .
errors = application . check_valid ( )
if len ( errors ) > 0 :
for error in errors :
messages . error ( request , error )
return 'error'
# approve application
approved_by = request . user
created_person , created_acc... |
def removeProdable ( self , prodable : Prodable = None , name : str = None ) -> Optional [ Prodable ] :
"""Remove the specified Prodable object from this Looper ' s list of Prodables
: param prodable : the Prodable to remove""" | if prodable :
self . prodables . remove ( prodable )
return prodable
elif name :
for p in self . prodables :
if hasattr ( p , "name" ) and getattr ( p , "name" ) == name :
prodable = p
break
if prodable :
self . prodables . remove ( prodable )
return proda... |
def where ( self , other , cond , align = True , errors = 'raise' , try_cast = False , axis = 0 , transpose = False ) :
"""evaluate the block ; return result block ( s ) from the result
Parameters
other : a ndarray / object
cond : the condition to respect
align : boolean , perform alignment on other / cond ... | import pandas . core . computation . expressions as expressions
assert errors in [ 'raise' , 'ignore' ]
values = self . values
orig_other = other
if transpose :
values = values . T
other = getattr ( other , '_values' , getattr ( other , 'values' , other ) )
cond = getattr ( cond , 'values' , cond )
# If the default... |
def _server_connect ( self , s ) :
"""Sets up a TCP connection to the server .""" | self . _socket = socket . socket ( socket . AF_INET , socket . SOCK_STREAM )
self . _socket . setblocking ( 0 )
self . _socket . settimeout ( 1.0 )
if self . options [ "tcp_nodelay" ] :
self . _socket . setsockopt ( socket . IPPROTO_TCP , socket . TCP_NODELAY , 1 )
self . io = tornado . iostream . IOStream ( self .... |
def from_env ( parser_modules : t . Optional [ t . Union [ t . List [ str ] , t . Tuple [ str ] ] ] = DEFAULT_PARSER_MODULES , env : t . Optional [ t . Dict [ str , str ] ] = None , silent : bool = False , suppress_logs : bool = False , extra : t . Optional [ dict ] = None ) -> 'ConfigLoader' :
"""Creates an instan... | env = env or os . environ
extra = extra or { }
environment_parser = EnvironmentParser ( scope = 'config' , env = env )
silent = environment_parser . get ( 'silent' , silent , coerce_type = bool )
suppress_logs = environment_parser . get ( 'suppress_logs' , suppress_logs , coerce_type = bool )
env_parsers = environment_... |
def outlook ( self , qs ) :
"""CSV format suitable for importing into outlook""" | csvf = writer ( sys . stdout )
columns = [ 'Name' , 'E-mail Address' , 'Notes' , 'E-mail 2 Address' , 'E-mail 3 Address' , 'Mobile Phone' , 'Pager' , 'Company' , 'Job Title' , 'Home Phone' , 'Home Phone 2' , 'Home Fax' , 'Home Address' , 'Business Phone' , 'Business Phone 2' , 'Business Fax' , 'Business Address' , 'Oth... |
def traverse ( self ) :
"""Traverse proposal kernel""" | if self . verbose > 1 :
print_ ( '\t' + self . _id + ' Running Traverse proposal kernel' )
# Mask for values to move
phi = self . phi
theta = self . traverse_theta
# Calculate beta
if ( random ( ) < ( theta - 1 ) / ( 2 * theta ) ) :
beta = exp ( 1 / ( theta + 1 ) * log ( random ( ) ) )
else :
beta = exp ( 1... |
def get_tabular_rows ( self , url , dict_rows = False , ** kwargs ) : # type : ( str , bool , Any ) - > Iterator [ Dict ]
"""Get iterator for reading rows from tabular data . Each row is returned as a dictionary .
Args :
url ( str ) : URL to download
dict _ rows ( bool ) : Return dict ( requires headers param... | return self . get_tabular_stream ( url , ** kwargs ) . iter ( keyed = dict_rows ) |
def params ( self , ** kwargs ) :
"""Specify query params to be used when executing the search . All the
keyword arguments will override the current values . See
https : / / elasticsearch - py . readthedocs . io / en / master / api . html # elasticsearch . Elasticsearch . search
for all available parameters .... | s = self . _clone ( )
s . _params . update ( kwargs )
return s |
def join_where ( self , table , one , operator , two , type = 'inner' ) :
"""Add a " join where " clause to the query
: param table : The table to join with , can also be a JoinClause instance
: type table : str or JoinClause
: param one : The first column of the join condition
: type one : str
: param op... | return self . join ( table , one , operator , two , type , True ) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.