signature stringlengths 29 44.1k | implementation stringlengths 0 85.2k |
|---|---|
def anglesep_meeus ( lon0 : float , lat0 : float , lon1 : float , lat1 : float , deg : bool = True ) -> float :
"""Parameters
lon0 : float or numpy . ndarray of float
longitude of first point
lat0 : float or numpy . ndarray of float
latitude of first point
lon1 : float or numpy . ndarray of float
longit... | if deg :
lon0 = radians ( lon0 )
lat0 = radians ( lat0 )
lon1 = radians ( lon1 )
lat1 = radians ( lat1 )
sep_rad = 2 * arcsin ( sqrt ( haversine ( lat0 - lat1 ) + cos ( lat0 ) * cos ( lat1 ) * haversine ( lon0 - lon1 ) ) )
if deg :
return degrees ( sep_rad )
else :
return sep_rad |
def create ( self , name , ** params ) :
"""Creates a new entity in this collection .
This function makes either one or two roundtrips to the
server , depending on the type of entities in this
collection , plus at most two more if
the ` ` autologin ` ` field of : func : ` connect ` is set to ` ` True ` ` . ... | if not isinstance ( name , basestring ) :
raise InvalidNameException ( "%s is not a valid name for an entity." % name )
if 'namespace' in params :
namespace = params . pop ( 'namespace' )
params [ 'owner' ] = namespace . owner
params [ 'app' ] = namespace . app
params [ 'sharing' ] = namespace . sha... |
def versatile_options ( ) :
"""return list of options that can be changed at any time ( not
only be initialized ) , however the list might not be entirely up
to date .
The string ' # v ' in the default value indicates a ' versatile '
option that can be changed any time .""" | return tuple ( sorted ( i [ 0 ] for i in list ( CMAOptions . defaults ( ) . items ( ) ) if i [ 1 ] . find ( ' #v ' ) > 0 ) ) |
def get_binding ( self , schema , data ) :
"""For a given schema , get a binding mediator providing links to the
RDF terms matching that schema .""" | schema = self . parent . get_schema ( schema )
return Binding ( schema , self . parent . resolver , data = data ) |
def getValue ( self ) :
"""Returns PromisedRequirement value""" | func = dill . loads ( self . _func )
return func ( * self . _args ) |
def random_line_data ( chars_per_line = 80 ) :
"""Function to create a line of a random string
Args :
chars _ per _ line : An integer that says how many characters to return
Returns :
A String""" | return '' . join ( __random . choice ( __string . ascii_letters ) for x in range ( chars_per_line ) ) |
def remove_all ( self , locator ) :
"""Removes all component references that match the specified locator .
: param locator : a locator to remove reference by .
: return : a list , containing all removed references .""" | components = [ ]
if locator == None :
return components
self . _lock . acquire ( )
try :
for reference in reversed ( self . _references ) :
if reference . match ( locator ) :
self . _references . remove ( reference )
components . append ( reference . get_component ( ) )
finally :... |
def getMonitorByName ( self , monitorFriendlyName ) :
"""Returns monitor status and alltimeuptimeratio for a MonitorFriendlyName .""" | url = self . baseUrl
url += "getMonitors?apiKey=%s" % self . apiKey
url += "&noJsonCallback=1&format=json"
success , response = self . requestApi ( url )
if success :
monitors = response . get ( 'monitors' ) . get ( 'monitor' )
for i in range ( len ( monitors ) ) :
monitor = monitors [ i ]
if mo... |
def run_subprocess ( command , return_code = False , ** kwargs ) :
"""Run command using subprocess . Popen
Run command and wait for command to complete . If the return code was zero
then return , otherwise raise CalledProcessError .
By default , this will also add stdout = and stderr = subproces . PIPE
to t... | # code adapted with permission from mne - python
use_kwargs = dict ( stderr = subprocess . PIPE , stdout = subprocess . PIPE )
use_kwargs . update ( kwargs )
p = subprocess . Popen ( command , ** use_kwargs )
output = p . communicate ( )
# communicate ( ) may return bytes , str , or None depending on the kwargs
# passe... |
def script ( container , script_path , fail_nonzero = False , upload_dir = False , ** kwargs ) :
"""Runs a script inside a container , which is created with all its dependencies . The container is removed after it
has been run , whereas the dependencies are not destroyed . The output is printed to the console .
... | full_script_path = os . path . abspath ( script_path )
prefix , name = os . path . split ( full_script_path )
with temp_dir ( ) as remote_tmp :
if upload_dir :
prefix_path , prefix_name = os . path . split ( prefix )
remote_script = posixpath . join ( remote_tmp , prefix_name , name )
put ( ... |
def match_replace_binary ( cls , ops , kwargs ) :
"""Similar to func : ` match _ replace ` , but for arbitrary length operations ,
such that each two pairs of subsequent operands are matched pairwise .
> > > A = wc ( " A " )
> > > class FilterDupes ( Operation ) :
. . . _ binary _ rules = {
. . . ' filter... | assert assoc in cls . simplifications , ( cls . __name__ + " must be associative to use match_replace_binary" )
assert hasattr ( cls , '_neutral_element' ) , ( cls . __name__ + " must define a neutral element to use " "match_replace_binary" )
fops = _match_replace_binary ( cls , list ( ops ) )
if len ( fops ) == 1 :
... |
def snap ( self , instruction ) :
"""Returns a new MayaDT object modified by the given instruction .
Powered by snaptime . See https : / / github . com / zartstrom / snaptime
for a complete documentation about the snaptime instructions .""" | return self . from_datetime ( snaptime . snap ( self . datetime ( ) , instruction ) ) |
def read ( self , size = None ) :
"""Return the next size number of bytes from the stream .
If size is not defined , return all bytes of the stream
up to EOF .""" | if size is None :
t = [ ]
while True :
buf = self . _read ( self . bufsize )
if not buf :
break
t . append ( buf )
buf = "" . join ( t )
else :
buf = self . _read ( size )
self . pos += len ( buf )
return buf |
def get_slo_url ( self ) :
"""Gets the SLO URL .
: returns : An URL , the SLO endpoint of the IdP
: rtype : string""" | url = None
idp_data = self . __settings . get_idp_data ( )
if 'singleLogoutService' in idp_data . keys ( ) and 'url' in idp_data [ 'singleLogoutService' ] :
url = idp_data [ 'singleLogoutService' ] [ 'url' ]
return url |
def SetValue ( self , Channel , Parameter , Buffer ) :
"""Returns a descriptive text of a given TPCANStatus error
code , in any desired language
Remarks :
Parameters can be present or not according with the kind
of Hardware ( PCAN Channel ) being used . If a parameter is not available ,
a PCAN _ ERROR _ I... | try :
if Parameter == PCAN_LOG_LOCATION or Parameter == PCAN_LOG_TEXT or Parameter == PCAN_TRACE_LOCATION :
mybuffer = create_string_buffer ( 256 )
else :
mybuffer = c_int ( 0 )
mybuffer . value = Buffer
res = self . __m_dllBasic . CAN_SetValue ( Channel , Parameter , byref ( mybuffer ) ... |
def _is_visible ( cls , property_name ) :
"""private method to check visible object property to be visible""" | if isinstance ( property_name , list ) :
return [ cls . _is_visible ( p ) for p in property_name ]
if property_name . startswith ( '__' ) and property_name . endswith ( '__' ) :
return False
return property_name . startswith ( cls . STARTS_WITH ) and property_name . endswith ( cls . ENDS_WITH ) |
def status ( self , migration_rqst_id = "" , block_name = "" , dataset = "" , user = "" ) :
"""Interface to query status of a migration request
In this preference order of input parameters :
migration _ rqst _ id , block , dataset , user
( if multi parameters are provided , only the precedence order is follow... | try :
return self . dbsMigrate . listMigrationRequests ( migration_rqst_id , block_name , dataset , user )
except dbsException as de :
dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError )
except Exception as ex :
sError = "DBSMigrateModle/status. %s\n Exception tr... |
def node_label_absent ( name , node , ** kwargs ) :
'''Ensures that the named label is absent from the node .
name
The name of the label
node
The name of the node''' | ret = { 'name' : name , 'changes' : { } , 'result' : False , 'comment' : '' }
labels = __salt__ [ 'kubernetes.node_labels' ] ( node , ** kwargs )
if name not in labels :
ret [ 'result' ] = True if not __opts__ [ 'test' ] else None
ret [ 'comment' ] = 'The label does not exist'
return ret
if __opts__ [ 'test... |
def windspeed ( self , t ) :
"""Return the wind speed list at time ` t `""" | ws = [ 0 ] * self . n
for i in range ( self . n ) :
q = ceil ( t / self . dt [ i ] )
q_prev = 0 if q == 0 else q - 1
r = t % self . dt [ i ]
r = 0 if abs ( r ) < 1e-6 else r
if r == 0 :
ws [ i ] = self . speed [ i ] [ q ]
else :
t1 = self . time [ i ] [ q_prev ]
s1 = self... |
def index ( request , template_name = "tagging_ext/index.html" , min_size = 0 , limit = 10 ) :
"""min _ size : Smallest size count accepted for a tag
order _ by : asc or desc by count
limit : maximum number of tags to display
TODO : convert the hand - written query to an ORM call . Right now I know
this wor... | query = """
SELECT tag_item.tag_id as tag_id, COUNT(tag_item.tag_id) as counter
FROM tagging_taggeditem as tag_item
GROUP BY tag_id
HAVING COUNT(tag_item.tag_id) > %s
ORDER BY counter desc
LIMIT %s
"""
cursor = connection . cursor ( )
cursor . execute ( query , [ mi... |
def make_list ( args ) :
"""Generates . lst file .
Parameters
args : object that contains all the arguments""" | image_list = list_image ( args . root , args . recursive , args . exts )
image_list = list ( image_list )
if args . shuffle is True :
random . seed ( 100 )
random . shuffle ( image_list )
N = len ( image_list )
chunk_size = ( N + args . chunks - 1 ) // args . chunks
for i in range ( args . chunks ) :
chunk ... |
def reshape_line_plot ( df , x , y ) :
"""Reshape data from long form to " line plot form " .
Line plot form has x value as the index with one column for each line .
Each column has data points as values and all metadata as column headers .""" | idx = list ( df . columns . drop ( y ) )
if df . duplicated ( idx ) . any ( ) :
warnings . warn ( 'Duplicated index found.' )
df = df . drop_duplicates ( idx , keep = 'last' )
df = df . set_index ( idx ) [ y ] . unstack ( x ) . T
return df |
def computePWCorrelations ( spikeTrains , removeAutoCorr ) :
"""Computes pairwise correlations from spikeTrains
@ param spikeTrains ( array ) spike trains obtained from the activation of cells in the TM
the array dimensions are : numCells x timeSteps
@ param removeAutoCorr ( boolean ) if true , auto - correla... | numCells = np . shape ( spikeTrains ) [ 0 ]
corrMatrix = np . zeros ( ( numCells , numCells ) )
numNegPCC = 0
for i in range ( numCells ) :
for j in range ( numCells ) :
if i == j and removeAutoCorr == True :
continue
if not all ( spikeTrains [ i , : ] == 0 ) and not all ( spikeTrains [ ... |
def cli ( env , identifier ) :
"""Cancel a dedicated host server immediately""" | mgr = SoftLayer . DedicatedHostManager ( env . client )
host_id = helpers . resolve_id ( mgr . resolve_ids , identifier , 'dedicated host' )
if not ( env . skip_confirmations or formatting . no_going_back ( host_id ) ) :
raise exceptions . CLIAbort ( 'Aborted' )
mgr . cancel_host ( host_id )
click . secho ( 'Dedica... |
def ggplot2_style ( ax ) :
"""Styles an axes to appear like ggplot2
Must be called after all plot and axis manipulation operations have been
carried out ( needs to know final tick spacing )""" | # set the style of the major and minor grid lines , filled blocks
ax . grid ( True , 'major' , color = 'w' , linestyle = '-' , linewidth = 1.4 )
ax . grid ( True , 'minor' , color = '0.92' , linestyle = '-' , linewidth = 0.7 )
ax . patch . set_facecolor ( '0.85' )
ax . set_axisbelow ( True )
# set minor tick spacing to... |
def accelerated_dtw ( x , y , dist , warp = 1 ) :
"""Computes Dynamic Time Warping ( DTW ) of two sequences in a faster way .
Instead of iterating through each element and calculating each distance ,
this uses the cdist function from scipy ( https : / / docs . scipy . org / doc / scipy / reference / generated /... | assert len ( x )
assert len ( y )
if ndim ( x ) == 1 :
x = x . reshape ( - 1 , 1 )
if ndim ( y ) == 1 :
y = y . reshape ( - 1 , 1 )
r , c = len ( x ) , len ( y )
D0 = zeros ( ( r + 1 , c + 1 ) )
D0 [ 0 , 1 : ] = inf
D0 [ 1 : , 0 ] = inf
D1 = D0 [ 1 : , 1 : ]
D0 [ 1 : , 1 : ] = cdist ( x , y , dist )
C = D1 . co... |
def annotate_bed ( self , bt , name , col_name , complete = None , df_col = None , ) :
"""Annotate the input bed file using one of the annotation beds .
Parameters
bt : pybedtools . BedTool
BedTool for either one of the anchors , the loops ,
or the loop inners .
name : str
The key for the annoation bed ... | import numpy as np
import pandas as pd
has_name_col = len ( self . annot_beds [ name ] [ 0 ] . fields ) > 3
print ( 'one' )
if complete :
res = bt . intersect ( self . annot_beds [ name ] , sorted = True , wo = True , F = 1 )
else :
res = bt . intersect ( self . annot_beds [ name ] , sorted = True , wo = True )... |
def new_feed ( self , name : str , layer_shape : tuple ) :
"""Creates a feed layer . This is usually the first layer in the network .
: param name : name of the layer
: return :""" | feed_data = tf . placeholder ( tf . float32 , layer_shape , 'input' )
self . __network . add_layer ( name , layer_output = feed_data ) |
def table ( self , datatype = None ) :
"Deprecated method to convert any Element to a Table ." | if config . future_deprecations :
self . param . warning ( "The table method is deprecated and should no " "longer be used. Instead cast the %s to a " "a Table directly." % type ( self ) . __name__ )
if datatype and not isinstance ( datatype , list ) :
datatype = [ datatype ]
from . . element import Table
retur... |
def hasColumn ( self , column , recurse = True , flags = 0 ) :
"""Returns whether or not this column exists within the list of columns
for this schema .
: return < bool >""" | return column in self . columns ( recurse = recurse , flags = flags ) |
def transform_velocity_array ( array , pos_array , vel , euler , rotation_vel = ( 0 , 0 , 0 ) ) :
"""Transform any Nx3 velocity vector array by adding the center - of - mass ' vel ' ,
accounting for solid - body rotation , and applying an euler transformation .
: parameter array array : numpy array of Nx3 veloc... | trans_matrix = euler_trans_matrix ( * euler )
# v _ { rot , i } = omega x r _ i with omega = rotation _ vel
rotation_component = np . cross ( rotation_vel , pos_array , axisb = 1 )
orbital_component = np . asarray ( vel )
if isinstance ( array , ComputedColumn ) :
array = array . for_computations
new_vel = np . dot... |
def list_presubscriptions ( self , ** kwargs ) :
"""Get a list of pre - subscription data
: returns : a list of ` Presubscription ` objects
: rtype : list of mbed _ cloud . presubscription . Presubscription""" | api = self . _get_api ( mds . SubscriptionsApi )
resp = api . get_pre_subscriptions ( ** kwargs )
return [ Presubscription ( p ) for p in resp ] |
def update ( self , path , node ) :
'''Update the dict with a new color using a ' path ' through the dict . You can either pass an existing path e . g .
' Scaffold . mutations ' to override a color or part of the hierarchy or you can add a new leaf node or dict .''' | assert ( type ( path ) == type ( self . name ) )
assert ( type ( node ) == type ( self . name ) or type ( node ) == type ( predefined ) )
d = self . color_scheme
tokens = path . split ( '.' )
for t in tokens [ : - 1 ] :
d = d . get ( t )
if d == None :
raise Exception ( "Path '%s' not found." )
d [ toke... |
def from_dict ( data , ctx ) :
"""Instantiate a new UnitsAvailable from a dict ( generally from loading a
JSON response ) . The data used to instantiate the UnitsAvailable is a
shallow copy of the dict passed in , with any complex child types
instantiated appropriately .""" | data = data . copy ( )
if data . get ( 'default' ) is not None :
data [ 'default' ] = ctx . order . UnitsAvailableDetails . from_dict ( data [ 'default' ] , ctx )
if data . get ( 'reduceFirst' ) is not None :
data [ 'reduceFirst' ] = ctx . order . UnitsAvailableDetails . from_dict ( data [ 'reduceFirst' ] , ctx... |
def _updateConstructorAndMembers ( self ) :
"""We overwrite constructor and accessors every time because the constructor might have to consume all
members even if their decorator is below the " synthesizeConstructor " decorator and it also might need to update
the getters and setters because the naming conventi... | syntheticMetaData = self . _syntheticMetaData ( )
constructor = self . _constructorFactory . makeConstructor ( syntheticMetaData . originalConstructor ( ) , syntheticMetaData . syntheticMemberList ( ) , syntheticMetaData . doesConsumeArguments ( ) )
self . _class . __init__ = constructor
for syntheticMember in syntheti... |
def two_lorentzian ( freq , freq0_1 , freq0_2 , area1 , area2 , hwhm1 , hwhm2 , phase1 , phase2 , offset , drift ) :
"""A two - Lorentzian model .
This is simply the sum of two lorentzian functions in some part of the
spectrum . Each individual Lorentzian has its own peak frequency , area , hwhm
and phase , b... | return ( lorentzian ( freq , freq0_1 , area1 , hwhm1 , phase1 , offset , drift ) + lorentzian ( freq , freq0_2 , area2 , hwhm2 , phase2 , offset , drift ) ) |
def _extract_methods ( self ) :
"""Obtains the methods used in the service .""" | service = self . _service
all_urls = set ( )
urls_with_options = set ( )
if not service . http :
return
for rule in service . http . rules :
http_method , url = _detect_pattern_option ( rule )
if not url or not http_method or not rule . selector :
_logger . error ( u'invalid HTTP binding encountered... |
async def on_isupport_excepts ( self , value ) :
"""Server allows ban exceptions .""" | if not value :
value = BAN_EXCEPT_MODE
self . _channel_modes . add ( value )
self . _channel_modes_behaviour [ rfc1459 . protocol . BEHAVIOUR_LIST ] . add ( value ) |
def execution_time ( self , value ) :
"""Force the execution _ time to always be a datetime
: param value :
: return :""" | if value :
self . _execution_time = parse ( value ) if isinstance ( value , type_check ) else value |
def read ( self , ** keys ) :
"""read a data from an ascii table HDU
By default , all rows are read . Send rows = to select subsets of the
data . Table data are read into a recarray for multiple columns ,
plain array for a single column .
parameters
columns : list / array
An optional set of columns to r... | rows = keys . get ( 'rows' , None )
columns = keys . get ( 'columns' , None )
# if columns is None , returns all . Guaranteed to be unique and sorted
colnums = self . _extract_colnums ( columns )
if isinstance ( colnums , int ) : # scalar sent , don ' t read as a recarray
return self . read_column ( columns , ** ke... |
def run ( self ) :
"""Run install main logic .""" | try :
if not self . _is_rpm_all_lib_include_files_installed ( ) :
self . _make_lib_file_symbolic_links ( )
self . _copy_each_include_files_to_include_dir ( )
self . _make_dep_lib_file_sym_links_and_copy_include_files ( )
self . setup_py . add_patchs_to_build_without_pkg_config ( self... |
def split_data ( X , y , ratio = ( 0.8 , 0.1 , 0.1 ) ) :
"""Splits data into a training , validation , and test set .
Args :
X : text data
y : data labels
ratio : the ratio for splitting . Default : ( 0.8 , 0.1 , 0.1)
Returns :
split data : X _ train , X _ val , X _ test , y _ train , y _ val , y _ test... | assert ( sum ( ratio ) == 1 and len ( ratio ) == 3 )
X_train , X_rest , y_train , y_rest = train_test_split ( X , y , train_size = ratio [ 0 ] )
X_val , X_test , y_val , y_test = train_test_split ( X_rest , y_rest , train_size = ratio [ 1 ] )
return X_train , X_val , X_test , y_train , y_val , y_test |
def ReadClientFullInfo ( self , client_id ) :
"""Reads full client information for a single client .
Args :
client _ id : A GRR client id string , e . g . " C . ea3b2b71840d6fa7 " .
Returns :
A ` ClientFullInfo ` instance for given client .
Raises :
UnknownClientError : if no client with such id was fou... | result = self . MultiReadClientFullInfo ( [ client_id ] )
try :
return result [ client_id ]
except KeyError :
raise UnknownClientError ( client_id ) |
def _makeKey ( self , usern ) :
"""Make a new , probably unique key . This key will be sent in an email to
the user and is used to access the password change form .""" | return unicode ( hashlib . md5 ( str ( ( usern , time . time ( ) , random . random ( ) ) ) ) . hexdigest ( ) ) |
def TR ( self , ** kwargs ) : # pragma : no cover
"""NAME :
TR
PURPOSE :
Calculate the radial period for a power - law rotation curve
INPUT :
scipy . integrate . quadrature keywords
OUTPUT :
T _ R ( R , vT , vT ) * vc / ro + estimate of the error
HISTORY :
2010-12-01 - Written - Bovy ( NYU )""" | if hasattr ( self , '_TR' ) :
return self . _TR
( rperi , rap ) = self . calcRapRperi ( ** kwargs )
if nu . fabs ( rap - rperi ) / rap < 10. ** - 4. : # Rough limit
self . _TR = 2. * m . pi / epifreq ( self . _pot , self . _R , use_physical = False )
return self . _TR
Rmean = m . exp ( ( m . log ( rperi ) +... |
def write ( self , data ) :
'''Write method used by internal tarfile instance to output data .
This method blocks tarfile execution once internal buffer is full .
As this method is blocking , it is used inside the same thread of
: meth : ` fill ` .
: param data : bytes to write to internal buffer
: type d... | self . _add . wait ( )
self . _data += data
if len ( self . _data ) > self . _want :
self . _add . clear ( )
self . _result . set ( )
return len ( data ) |
def add_forward_workflow ( self , dag , sections , satisfies = None ) :
'''Add a forward - workflow , return number of nodes added''' | dag . new_forward_workflow ( )
if 'DAG' in env . config [ 'SOS_DEBUG' ] or 'ALL' in env . config [ 'SOS_DEBUG' ] :
env . log_to_file ( 'DAG' , f'Adding mini-workflow with {len(sections)} sections' )
default_input : sos_targets = sos_targets ( [ ] )
for idx , section in enumerate ( sections ) :
res = analyze_sec... |
def _find ( expr , sub , start = 0 , end = None ) :
"""Return lowest indexes in each strings in the sequence or scalar
where the substring is fully contained between [ start : end ] . Return - 1 on failure .
Equivalent to standard str . find ( ) .
: param expr :
: param sub : substring being searched
: pa... | return _string_op ( expr , Find , output_type = types . int64 , _sub = sub , _start = start , _end = end ) |
def add_send_last_message ( self , connection , send_last_message ) :
"""Adds a send _ last _ message function to the Dispatcher ' s
dictionary of functions indexed by connection .
Args :
connection ( str ) : A locally unique identifier
provided by the receiver of messages .
send _ last _ message ( fn ) :... | self . _send_last_message [ connection ] = send_last_message
LOGGER . debug ( "Added send_last_message function " "for connection %s" , connection ) |
def possible_public_pairs_for_signature ( self , value , signature , y_parity = None ) :
""": param : value : an integer value
: param : signature : an ` ` ( r , s ) ` ` pair of integers representing an ecdsa signature of ` ` value ` `
: param : y _ parity : ( optional ) for a given value and signature , there ... | r , s = signature
try :
points = self . points_for_x ( r )
except ValueError :
return [ ]
if y_parity is not None :
if y_parity & 1 :
points = points [ 1 : ]
else :
points = points [ : 1 ]
inv_r = self . inverse ( r )
s_over_r = s * inv_r
minus_E_over_r = - ( inv_r * value ) * self
try :... |
def _error_dm ( self , m , dm , s ) :
"""Error function .
Once self . goal has been defined , compute the error
of input using the generalized forward model .""" | pred = self . fmodel . predict_given_context ( np . hstack ( ( m , dm ) ) , s , range ( len ( s ) ) )
err_v = pred - self . goal
error = sum ( e * e for e in err_v )
return error |
def get_start_time_str ( self ) :
""": return :
| attr _ start _ datetime | as | str | formatted with
| attr _ start _ time _ format | .
Return | NaT | if the invalid value or the invalid format .
: rtype : str
: Sample Code :
. . code : : python
from datetimerange import DateTimeRange
time _ range ... | try :
return self . start_datetime . strftime ( self . start_time_format )
except AttributeError :
return self . NOT_A_TIME_STR |
def linspace2 ( a , b , n , dtype = None ) :
"""similar to numpy . linspace but excluding the boundaries
this is the normal numpy . linspace :
> > > print linspace ( 0,1,5)
[ 0 . 0.25 0.5 0.75 1 . ]
and this gives excludes the boundaries :
> > > print linspace2(0,1,5)
[ 0.1 0.3 0.5 0.7 0.9]""" | a = linspace ( a , b , n + 1 , dtype = dtype ) [ : - 1 ]
if len ( a ) > 1 :
diff01 = ( ( a [ 1 ] - a [ 0 ] ) / 2 ) . astype ( a . dtype )
a += diff01
return a |
def listdict_to_listlist_and_matrix ( sparse ) :
"""Transforms the adjacency list representation of a graph
of type listdict into the listlist + weight matrix representation
: param sparse : graph in listdict representation
: returns : couple with listlist representation , and weight matrix
: complexity : l... | V = range ( len ( sparse ) )
graph = [ [ ] for _ in V ]
weight = [ [ None for v in V ] for u in V ]
for u in V :
for v in sparse [ u ] :
graph [ u ] . append ( v )
weight [ u ] [ v ] = sparse [ u ] [ v ]
return graph , weight |
def return_file_objects ( connection , container , prefix = 'database' ) :
"""Given connecton and container find database dumps""" | options = [ ]
meta_data = objectstore . get_full_container_list ( connection , container , prefix = 'database' )
env = ENV . upper ( )
for o_info in meta_data :
expected_file = f'database.{ENV}'
if o_info [ 'name' ] . startswith ( expected_file ) :
dt = dateparser . parse ( o_info [ 'last_modified' ] )
... |
def on_add_vrf_conf ( self , evt ) :
"""Event handler for new VrfConf .
Creates a VrfTable to store routing information related to new Vrf .
Also arranges for related paths to be imported to this VrfTable .""" | vrf_conf = evt . value
route_family = vrf_conf . route_family
assert route_family in vrfs . SUPPORTED_VRF_RF
# Create VRF table with given configuration .
vrf_table = self . _table_manager . create_and_link_vrf_table ( vrf_conf )
# Attach VrfConf change listeners .
vrf_conf . add_listener ( ConfWithStats . UPDATE_STATS... |
def zip_offset ( * iterables , offsets , longest = False , fillvalue = None ) :
"""` ` zip ` ` the input * iterables * together , but offset the ` i ` - th iterable
by the ` i ` - th item in * offsets * .
> > > list ( zip _ offset ( ' 0123 ' , ' abcdef ' , offsets = ( 0 , 1 ) ) )
[ ( ' 0 ' , ' b ' ) , ( ' 1 '... | if len ( iterables ) != len ( offsets ) :
raise ValueError ( "Number of iterables and offsets didn't match" )
staggered = [ ]
for it , n in zip ( iterables , offsets ) :
if n < 0 :
staggered . append ( chain ( repeat ( fillvalue , - n ) , it ) )
elif n > 0 :
staggered . append ( islice ( it ... |
def seek ( self , pos = 0 ) :
"""Set the stream ' s file pointer to pos . Negative seeking
is forbidden .""" | if pos - self . pos >= 0 :
blocks , remainder = divmod ( pos - self . pos , self . bufsize )
for i in range ( blocks ) :
self . read ( self . bufsize )
self . read ( remainder )
else :
raise StreamError ( "seeking backwards is not allowed" )
return self . pos |
def insert_column ( self , data_array , ckey = 'temp' , index = None ) :
"""This will insert / overwrite a new column and fill it with the data from the
the supplied array .
Parameters
data _ array
Data ; can be a list , but will be converted to numpy array
ckey
Name of the column ; if an integer is sup... | # if it ' s an integer , use the ckey from the list
if type ( ckey ) in [ int , int ] :
ckey = self . ckeys [ ckey ]
# append / overwrite the column value
self . columns [ ckey ] = _n . array ( data_array )
if not ckey in self . ckeys :
if index is None :
self . ckeys . append ( ckey )
else :
... |
def list_conversions ( api_key , api_secret , video_key , ** kwargs ) :
"""Function which retrieves a list of a video object ' s conversions .
: param api _ key : < string > JWPlatform api - key
: param api _ secret : < string > JWPlatform shared - secret
: param video _ key : < string > Video ' s object ID .... | jwplatform_client = jwplatform . Client ( api_key , api_secret )
logging . info ( "Querying for video conversions." )
try :
response = jwplatform_client . videos . conversions . list ( video_key = video_key , ** kwargs )
except jwplatform . errors . JWPlatformError as e :
logging . error ( "Encountered an error... |
def get_tdata ( t_format , files ) :
"""Get the time information from file names
Parameters
t _ format : str
The string that can be used to get the time information in the files .
Any numeric datetime format string ( e . g . % Y , % m , % H ) can be used , but
not non - numeric strings like % b , etc . Se... | def median ( arr ) :
return arr . min ( ) + ( arr . max ( ) - arr . min ( ) ) / 2
import re
from pandas import Index
t_pattern = t_format
for fmt , patt in t_patterns . items ( ) :
t_pattern = t_pattern . replace ( fmt , patt )
t_pattern = re . compile ( t_pattern )
time = list ( range ( len ( files ) ) )
for i... |
def get_sigma_mu_adjustment ( self , C , imt , rup , dists ) :
"""Returns the sigma mu adjustment factor""" | if imt . name in "PGA PGV" : # PGA and PGV are 2D arrays of dimension [ nmags , ndists ]
sigma_mu = getattr ( self , imt . name . lower ( ) )
if rup . mag <= self . mags [ 0 ] :
sigma_mu_m = sigma_mu [ 0 , : ]
elif rup . mag >= self . mags [ - 1 ] :
sigma_mu_m = sigma_mu [ - 1 , : ]
else... |
def get ( self , path ) :
"""get renders the notebook template if a name is given , or
redirects to the ' / files / ' handler if the name is not given .""" | path = path . strip ( '/' )
self . log . info ( 'Appmode get: %s' , path )
# Abort if the app path is not below configured trusted _ path .
if not path . startswith ( self . trusted_path ) :
self . log . warn ( 'Appmode refused to launch %s outside trusted path %s.' , path , self . trusted_path )
raise web . HT... |
def delete_asset ( self ) :
"""Delete asset from the release .
: rtype : bool""" | headers , data = self . _requester . requestJsonAndCheck ( "DELETE" , self . url )
return True |
def load_js ( js_url = None , version = '5.2.0' ) :
"""Load Dropzone ' s js resources with given version .
. . versionadded : : 1.4.4
: param js _ url : The JS url for Dropzone . js .
: param version : The version of Dropzone . js .""" | js_filename = 'dropzone.min.js'
serve_local = current_app . config [ 'DROPZONE_SERVE_LOCAL' ]
if serve_local :
js = '<script src="%s"></script>\n' % url_for ( 'dropzone.static' , filename = js_filename )
else :
js = '<script src="https://cdn.jsdelivr.net/npm/dropzone@%s/dist/%s"></script>\n' % ( version , js_fi... |
def step_next ( self ) :
"""Go to the next step .""" | window_start = around ( self . parent . value ( 'window_start' ) + self . parent . value ( 'window_length' ) / self . parent . value ( 'window_step' ) , 2 )
self . parent . overview . update_position ( window_start ) |
def new ( cls , username , password = None , email = None , first_name = "" , last_name = "" , login_method = None , role = "MEMBER" ) :
"""Create a new user
: param username : str
: param password : str
: param email : str
: param first _ name : str
: param last _ name : str
: param login _ method : st... | data = { "first_name" : first_name , "last_name" : last_name , "email" : email }
if not password :
password = utils . generate_random_string ( )
username = username . strip ( ) . lower ( )
if "@" in username and not email :
if not utils . is_email_valid ( username ) :
exceptions . AuthError ( _ ( "Inval... |
def readPrefs_dms_tools_format ( f ) :
"""Reads the amino - acid preferences written by ` dms _ tools v1 < http : / / jbloomlab . github . io / dms _ tools / > ` _ .
This is an exact copy of the same code from
` dms _ tools . file _ io . ReadPreferences ` . It is copied because
` dms _ tools v1 < http : / / j... | charmatch = re . compile ( '^PI_([A-z\*\-]+)$' )
if isinstance ( f , str ) :
f = open ( f )
lines = f . readlines ( )
f . close ( )
else :
lines = f . readlines ( )
characters = [ ]
sites = [ ]
wts = { }
pi_means = { }
pi_95credint = { }
h = { }
for line in lines :
if line . isspace ( ) :
co... |
def get_generator ( tweet ) :
"""Get information about the application that generated the Tweet
Args :
tweet ( Tweet ) : A Tweet object ( or a dictionary )
Returns :
dict : keys are ' link ' and ' name ' , the web link and the name
of the application
Example :
> > > from tweet _ parser . getter _ meth... | if is_original_format ( tweet ) :
if sys . version_info [ 0 ] == 3 and sys . version_info [ 1 ] >= 4 :
parser = GeneratorHTMLParser ( convert_charrefs = True )
else :
parser = GeneratorHTMLParser ( )
parser . feed ( tweet [ "source" ] )
return { "link" : parser . generator_link , "name" ... |
def uhash ( self , val ) :
"""Calculate hash from unicode value and return hex value as unicode""" | if not isinstance ( val , string_types ) :
raise _TypeError ( "val" , "str" , val )
return codecs . encode ( self . hash ( val . encode ( "utf-8" ) ) , "hex_codec" ) . decode ( "utf-8" ) |
def round_to_x_digits ( number , digits ) :
"""Returns ' number ' rounded to ' digits ' digits .""" | return round ( number * math . pow ( 10 , digits ) ) / math . pow ( 10 , digits ) |
def all_origins ( m ) :
'''Generate all unique statement origins in the given model''' | seen = set ( )
for link in m . match ( ) :
origin = link [ ORIGIN ]
if origin not in seen :
seen . add ( origin )
yield origin |
def override ( func ) :
"""THIS DECORATOR WILL PUT ALL PARAMETERS INTO THE ` kwargs ` PARAMETER AND
THEN PUT ALL ` kwargs ` PARAMETERS INTO THE FUNCTION PARAMETERS . THIS HAS
THE BENEFIT OF HAVING ALL PARAMETERS IN ONE PLACE ( kwargs ) , PLUS ALL
PARAMETERS ARE EXPLICIT FOR CLARITY .
OF COURSE , THIS MEANS ... | func_name = get_function_name ( func )
params = get_function_arguments ( func )
if not get_function_defaults ( func ) :
defaults = { }
else :
defaults = { k : v for k , v in zip ( reversed ( params ) , reversed ( get_function_defaults ( func ) ) ) }
def raise_error ( e , packed ) :
err = text_type ( e )
... |
def delete_topics ( self , topics , timeout_ms = None ) :
"""Delete topics from the cluster .
: param topics : A list of topic name strings .
: param timeout _ ms : Milliseconds to wait for topics to be deleted
before the broker returns .
: return : Appropriate version of DeleteTopicsResponse class .""" | version = self . _matching_api_version ( DeleteTopicsRequest )
timeout_ms = self . _validate_timeout ( timeout_ms )
if version <= 1 :
request = DeleteTopicsRequest [ version ] ( topics = topics , timeout = timeout_ms )
response = self . _send_request_to_controller ( request )
else :
raise NotImplementedErro... |
def set_weather_from_metar ( metar : typing . Union [ Metar . Metar , str ] , in_file : typing . Union [ str , Path ] , out_file : typing . Union [ str , Path ] = None ) -> typing . Tuple [ typing . Union [ str , None ] , typing . Union [ str , None ] ] :
"""Applies the weather from a METAR object to a MIZ file
A... | error , metar = custom_metar . CustomMetar . get_metar ( metar )
if error :
return error , None
if metar :
LOGGER . debug ( 'METAR: %s' , metar . code )
in_file = elib . path . ensure_file ( in_file )
if out_file is None :
out_file = in_file
else :
out_file = elib . path . ensure_file ( out_file , must_... |
def property_as_list ( self , property_name ) :
"""property ( ) but encapsulates it in a list , if it ' s a
single - element property .""" | try :
res = self . _a_tags [ property_name ]
except KeyError :
return [ ]
if type ( res ) == list :
return res
else :
return [ res ] |
def from_arrays ( cls , arrays , sortorder = None , names = None ) :
"""Convert arrays to MultiIndex .
Parameters
arrays : list / sequence of array - likes
Each array - like gives one level ' s value for each data point .
len ( arrays ) is the number of levels .
sortorder : int or None
Level of sortedne... | error_msg = "Input must be a list / sequence of array-likes."
if not is_list_like ( arrays ) :
raise TypeError ( error_msg )
elif is_iterator ( arrays ) :
arrays = list ( arrays )
# Check if elements of array are list - like
for array in arrays :
if not is_list_like ( array ) :
raise TypeError ( err... |
def power_method_opnorm ( op , xstart = None , maxiter = 100 , rtol = 1e-05 , atol = 1e-08 , callback = None ) :
r"""Estimate the operator norm with the power method .
Parameters
op : ` Operator `
Operator whose norm is to be estimated . If its ` Operator . range `
range does not coincide with its ` Operato... | if maxiter is None :
maxiter = np . iinfo ( int ) . max
maxiter , maxiter_in = int ( maxiter ) , maxiter
if maxiter <= 0 :
raise ValueError ( '`maxiter` must be positive, got {}' '' . format ( maxiter_in ) )
if op . domain == op . range :
use_normal = False
ncalls = maxiter
else : # Do the power iterati... |
def handle_input ( self , input_hdr ) :
"""This method tries to ensure that the input data has the correct dimensions .
INPUTS :
input _ hdr ( no default ) Header from which data shape is to be extracted .""" | input_slice = input_hdr [ 'NAXIS' ] * [ 0 ]
for i in range ( input_hdr [ 'NAXIS' ] ) :
if input_hdr [ 'CTYPE%d' % ( i + 1 ) ] . startswith ( "RA" ) :
input_slice [ - 1 ] = slice ( None )
if input_hdr [ 'CTYPE%d' % ( i + 1 ) ] . startswith ( "DEC" ) :
input_slice [ - 2 ] = slice ( None )
return i... |
def _remote_folder ( dirpath , remotes , syn ) :
"""Retrieve the remote folder for files , creating if necessary .""" | if dirpath in remotes :
return remotes [ dirpath ] , remotes
else :
parent_dir , cur_dir = os . path . split ( dirpath )
parent_folder , remotes = _remote_folder ( parent_dir , remotes , syn )
s_cur_dir = syn . store ( synapseclient . Folder ( cur_dir , parent = parent_folder ) )
remotes [ dirpath ]... |
def feed_appdata ( self , data , offset = 0 ) :
"""Feed plaintext data into the pipe .
Return an ( ssldata , offset ) tuple . The ssldata element is a list of
buffers containing record level data that needs to be sent to the
remote SSL instance . The offset is the number of plaintext bytes that
were process... | if self . _state == self . S_UNWRAPPED : # pass through data in unwrapped mode
return ( [ data [ offset : ] ] if offset < len ( data ) else [ ] , len ( data ) )
ssldata = [ ]
view = memoryview ( data )
while True :
self . _need_ssldata = False
try :
if offset < len ( view ) :
offset += s... |
def argsort ( self , axis = - 1 , kind = 'quicksort' , order = None ) :
"""Returns the indices that would sort an array .
. . note : :
This method wraps ` numpy . argsort ` . This documentation is
modified from that of ` numpy . argsort ` .
Perform an indirect sort along the given axis using the algorithm
... | index_array = np . core . fromnumeric . _wrapit ( self , 'argsort' , axis , kind , order )
index_array = index_array . view ( np . ndarray )
return index_array |
def delete_all ( self ) :
'''Delete all books from the index''' | def delete_action_gen ( ) :
scanner = scan ( self . es , index = self . index_name , query = { 'query' : { 'match_all' : { } } } )
for v in scanner :
yield { '_op_type' : 'delete' , '_index' : self . index_name , '_type' : v [ '_type' ] , '_id' : v [ '_id' ] , }
bulk ( self . es , delete_action_gen ( ) ... |
def find_id ( self , element_id ) :
"""Find a single element with the given ID .
Parameters
element _ id : str
ID of the element to find
Returns
found element""" | element = _transform . FigureElement . find_id ( self , element_id )
return Element ( element . root ) |
def level_i18n_name ( self ) :
"""In use within templates for dynamic translations .""" | for level , name in spatial_granularities :
if self . level == level :
return name
return self . level_name |
def tauc_from_mass ( mass_g ) :
"""Estimate the convective turnover time from mass , using the method described
in Cook + ( 2014ApJ . . . 785 . . . 10C ) .
mass _ g - UCD mass in grams .
Returns : the convective turnover timescale in seconds .
Masses larger than 1.3 Msun are out of range and yield NaN . If ... | m = mass_g / cgs . msun
return np . piecewise ( m , [ m < 1.3 , m < 0.82 , m < 0.65 , m < 0.1 ] , [ lambda x : 61.7 - 44.7 * x , 25. , lambda x : 86.9 - 94.3 * x , 70. , np . nan ] ) * 86400. |
def action_display ( self ) :
'''The action text , with any hyperlinked related entities .''' | action = self [ 'action' ]
annotations = [ ]
abbr = self . bill [ settings . LEVEL_FIELD ]
if 'related_entities' in self :
for entity in self [ 'related_entities' ] :
name = entity [ 'name' ]
_id = entity [ 'id' ]
# If the importer couldn ' t ID the entity ,
# skip .
if _id i... |
def export_to_dicts ( table , * args , ** kwargs ) :
"""Export a ` rows . Table ` to a list of dicts""" | field_names = table . field_names
return [ { key : getattr ( row , key ) for key in field_names } for row in table ] |
def _index_range ( self , version , symbol , date_range = None , ** kwargs ) :
"""Given a version , read the segment _ index and return the chunks associated
with the date _ range . As the segment index is ( id - > last datetime )
we need to take care in choosing the correct chunks .""" | if date_range and 'segment_index' in version : # index is read - only but it ' s never written to
index = np . frombuffer ( decompress ( version [ 'segment_index' ] ) , dtype = INDEX_DTYPE )
dtcol = self . _datetime64_index ( index )
if dtcol and len ( index ) :
dts = index [ dtcol ]
start ,... |
def _determine_filtered_package_names ( self ) :
"""Return a list of package names to be filtered base on the configuration
file .""" | # This plugin only processes packages , if the line in the packages
# configuration contains a PEP440 specifier it will be processed by the
# blacklist release filter . So we need to remove any packages that
# are not applicable for this plugin .
filtered_packages = set ( )
try :
lines = self . configuration [ "bla... |
def from_api ( cls , api ) :
"""create an application description for the todo app ,
that based on the api can use either tha api or the ux for interaction""" | ux = TodoUX ( api )
from . pseudorpc import PseudoRpc
rpc = PseudoRpc ( api )
return cls ( { ViaAPI : api , ViaUX : ux , ViaRPC : rpc } ) |
def formatted ( self ) :
"""str : The IBAN formatted in blocks of 4 digits .""" | return ' ' . join ( self . compact [ i : i + 4 ] for i in range ( 0 , len ( self . compact ) , 4 ) ) |
def normalize_value ( text ) :
"""This removes newlines and multiple spaces from a string .""" | result = text . replace ( '\n' , ' ' )
result = re . subn ( '[ ]{2,}' , ' ' , result ) [ 0 ]
return result |
def get_name_locations ( self , name ) :
"""Return a list of ` ` ( resource , lineno ) ` ` tuples""" | result = [ ]
for module in self . names :
if name in self . names [ module ] :
try :
pymodule = self . project . get_module ( module )
if name in pymodule :
pyname = pymodule [ name ]
module , lineno = pyname . get_definition_location ( )
... |
def K_gate_valve_Crane ( D1 , D2 , angle , fd = None ) :
r'''Returns loss coefficient for a gate valve of types wedge disc , double
disc , or plug type , as shown in [ 1 ] _ .
If β = 1 and θ = 0:
. . math : :
K = K _ 1 = K _ 2 = 8f _ d
If β < 1 and θ < = 45 ° :
. . math : :
K _ 2 = \ frac { K + \ sin ... | angle = radians ( angle )
beta = D1 / D2
if fd is None :
fd = ft_Crane ( D2 )
K1 = 8.0 * fd
# This does not refer to upstream loss per se
if beta == 1 or angle == 0 :
return K1
# upstream and down
else :
beta2 = beta * beta
one_m_beta2 = 1.0 - beta2
if angle <= 0.7853981633974483 :
K = (... |
def update_from_dict ( self , keywords ) :
"""Update metadata value from a keywords dictionary .
: param keywords :
: return :""" | super ( ImpactLayerMetadata , self ) . update_from_dict ( keywords )
if 'if_provenance' in list ( keywords . keys ( ) ) :
if_provenance = keywords [ 'if_provenance' ]
for provenance_step in if_provenance :
self . provenance . append_provenance_step ( provenance_step ) |
def call_method ( self , method , * args ) :
"""Call a JSON - RPC method and wait for its result .
The * method * is called with positional arguments * args * .
On success , the ` ` result ` ` field from the JSON - RPC response is
returned . On error , a : class : ` JsonRpcError ` is raised , which you can
... | message = self . _version . create_request ( method , args )
msgid = message [ 'id' ]
try :
with switch_back ( self . _timeout ) as switcher :
self . _method_calls [ msgid ] = switcher
self . send_message ( message )
args , _ = self . _hub . switch ( )
finally :
self . _method_calls . po... |
def GlobForPaths ( self , paths , pathtype = "OS" , root_path = None , process_non_regular_files = False , collect_ext_attrs = False ) :
"""Starts the Glob .
This is the main entry point for this flow mixin .
First we convert the pattern into regex components , and then we
interpolate each component . Finally... | patterns = [ ]
if not paths : # Nothing to do .
return
self . state . pathtype = pathtype
self . state . root_path = root_path
self . state . process_non_regular_files = process_non_regular_files
self . state . collect_ext_attrs = collect_ext_attrs
# Transform the patterns by substitution of client attributes . Whe... |
def main ( ) :
"""Return 0 on success .""" | args = parse_args ( )
if not args . files :
return 0
with enable_sphinx_if_possible ( ) :
status = 0
pool = multiprocessing . Pool ( multiprocessing . cpu_count ( ) )
try :
if len ( args . files ) > 1 :
results = pool . map ( _check_file , [ ( name , args ) for name in args . files ]... |
def get_symbol_returns_from_yahoo ( symbol , start = None , end = None ) :
"""Wrapper for pandas . io . data . get _ data _ yahoo ( ) .
Retrieves prices for symbol from yahoo and computes returns
based on adjusted closing prices .
Parameters
symbol : str
Symbol name to load , e . g . ' SPY '
start : pan... | try :
px = web . get_data_yahoo ( symbol , start = start , end = end )
px [ 'date' ] = pd . to_datetime ( px [ 'date' ] )
px . set_index ( 'date' , drop = False , inplace = True )
rets = px [ [ 'adjclose' ] ] . pct_change ( ) . dropna ( )
except Exception as e :
warnings . warn ( 'Yahoo Finance read... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.