signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def blogroll ( request , btype ) : 'View that handles the generation of blogrolls .'
response , site , cachekey = initview ( request ) if response : return response [ 0 ] template = loader . get_template ( 'feedjack/{0}.xml' . format ( btype ) ) ctx = dict ( ) fjlib . get_extra_context ( site , ctx ) ctx = Context ( ctx ) response = HttpResponse ( template . render ( ctx ) , content_type = 'text/xm...
def get_scale_text ( self ) : """Report current scaling in human - readable format . Returns text : str ` ` ' < num > x ' ` ` if enlarged , or ` ` ' 1 / < num > x ' ` ` if shrunken ."""
scalefactor = self . get_scale_max ( ) if scalefactor >= 1.0 : text = '%.2fx' % ( scalefactor ) else : text = '1/%.2fx' % ( 1.0 / scalefactor ) return text
def mknts ( self , add_dct ) : """Add information from add _ dct to a new copy of namedtuples stored in nts ."""
nts = [ ] assert len ( add_dct ) == len ( self . nts ) flds = list ( next ( iter ( self . nts ) ) . _fields ) + list ( next ( iter ( add_dct ) ) . keys ( ) ) ntobj = cx . namedtuple ( "ntgoea" , " " . join ( flds ) ) for dct_new , ntgoea in zip ( add_dct , self . nts ) : dct_curr = ntgoea . _asdict ( ) for key ...
def power_chisq_at_points_from_precomputed ( corr , snr , snr_norm , bins , indices ) : """Calculate the chisq timeseries from precomputed values for only select points . This function calculates the chisq at each point by explicitly time shifting and summing each bin . No FFT is involved . Parameters corr ...
num_bins = len ( bins ) - 1 chisq = shift_sum ( corr , indices , bins ) # pylint : disable = assignment - from - no - return return ( chisq * num_bins - ( snr . conj ( ) * snr ) . real ) * ( snr_norm ** 2.0 )
def _coo_to_sparse_series ( A , dense_index = False ) : """Convert a scipy . sparse . coo _ matrix to a SparseSeries . Use the defaults given in the SparseSeries constructor ."""
s = Series ( A . data , MultiIndex . from_arrays ( ( A . row , A . col ) ) ) s = s . sort_index ( ) s = s . to_sparse ( ) # TODO : specify kind ? if dense_index : # is there a better constructor method to use here ? i = range ( A . shape [ 0 ] ) j = range ( A . shape [ 1 ] ) ind = MultiIndex . from_product ...
def convert_data_to_ndarray ( self ) : """Converts the data from dataframe to ndarray format . Assumption : df - columns are ndarray - layers ( 3rd dim . )"""
if self . _data_structure != "DataFrame" : raise Exception ( f"Data is not a DataFrame but {self._data_structure}." ) self . _data = self . _convert_to_ndarray ( self . _data ) self . _update_data_structure ( ) return self
def lstm_seq2seq_internal ( inputs , targets , hparams , train ) : """The basic LSTM seq2seq model , main step used for training ."""
with tf . variable_scope ( "lstm_seq2seq" ) : if inputs is not None : inputs_length = common_layers . length_from_embedding ( inputs ) # Flatten inputs . inputs = common_layers . flatten4d3d ( inputs ) # LSTM encoder . inputs = tf . reverse_sequence ( inputs , inputs_length ,...
def _get_win_argv ( ) : """Returns a unicode argv under Windows and standard sys . argv otherwise Returns : List [ ` fsnative ` ]"""
assert is_win argc = ctypes . c_int ( ) try : argv = winapi . CommandLineToArgvW ( winapi . GetCommandLineW ( ) , ctypes . byref ( argc ) ) except WindowsError : return [ ] if not argv : return [ ] res = argv [ max ( 0 , argc . value - len ( sys . argv ) ) : argc . value ] winapi . LocalFree ( argv ) return...
def do_install ( ctx , verbose , fake ) : """Installs legit git aliases ."""
click . echo ( 'The following git aliases will be installed:\n' ) aliases = cli . list_commands ( ctx ) output_aliases ( aliases ) if click . confirm ( '\n{}Install aliases above?' . format ( 'FAKE ' if fake else '' ) , default = fake ) : for alias in aliases : cmd = '!legit ' + alias system_command...
def get_vmpolicy_macaddr_output_vmpolicy_macaddr_name ( self , ** kwargs ) : """Auto Generated Code"""
config = ET . Element ( "config" ) get_vmpolicy_macaddr = ET . Element ( "get_vmpolicy_macaddr" ) config = get_vmpolicy_macaddr output = ET . SubElement ( get_vmpolicy_macaddr , "output" ) vmpolicy_macaddr = ET . SubElement ( output , "vmpolicy-macaddr" ) name = ET . SubElement ( vmpolicy_macaddr , "name" ) name . text...
def _getMethodsVoc ( self ) : """Return the registered methods as DisplayList"""
methods = api . search ( { "portal_type" : "Method" , "is_active" : True } , "bika_setup_catalog" ) items = map ( lambda m : ( api . get_uid ( m ) , api . get_title ( m ) ) , methods ) items . sort ( lambda x , y : cmp ( x [ 1 ] , y [ 1 ] ) ) items . insert ( 0 , ( "" , _ ( "Not specified" ) ) ) return DisplayList ( li...
def p_inputunit ( p ) : '''inputunit : simple _ list simple _ list _ terminator | NEWLINE | error NEWLINE | EOF'''
# XXX if p . lexer . _parserstate & flags . parser . CMDSUBST : p . lexer . _parserstate . add ( flags . parser . EOFTOKEN ) if isinstance ( p [ 1 ] , ast . node ) : p [ 0 ] = p [ 1 ] # accept right here in case the input contains more lines that are # not part of the current command p . accept ( )
def authorize ( self , me , state = None , next_url = None , scope = 'read' ) : """Authorize a user via Micropub . Args : me ( string ) : the authing user ' s URL . if it does not begin with https ? : / / , http : / / will be prepended . state ( string , optional ) : passed through the whole auth process , ...
redirect_url = flask . url_for ( self . flask_endpoint_for_function ( self . _authorized_handler ) , _external = True ) return self . _start_indieauth ( me , redirect_url , state or next_url , scope )
def read ( cls , source , * args , ** kwargs ) : """Read data into a ` StateVector ` Parameters source : ` str ` , ` list ` Source of data , any of the following : - ` str ` path of single data file , - ` str ` path of LAL - format cache file , - ` list ` of paths . channel : ` str ` , ` ~ gwpy . dete...
return super ( StateVector , cls ) . read ( source , * args , ** kwargs )
def to_pandas ( self ) : """Convert to pandas Index . Returns pandas . base . Index"""
if not self . is_raw ( ) : raise ValueError ( 'Cannot convert to pandas Index if not evaluated.' ) from pandas import Index as PandasIndex return PandasIndex ( self . values , self . dtype , name = self . name )
def halt ( self ) : """Halt current endpoint ."""
try : self . _halt ( ) except IOError as exc : if exc . errno != errno . EBADMSG : raise else : raise ValueError ( 'halt did not return EBADMSG ?' ) self . _halted = True
def triangle_area ( pt1 , pt2 , pt3 ) : r"""Return the area of a triangle . Parameters pt1 : ( X , Y ) ndarray Starting vertex of a triangle pt2 : ( X , Y ) ndarray Second vertex of a triangle pt3 : ( X , Y ) ndarray Ending vertex of a triangle Returns area : float Area of the given triangle .""...
a = 0.0 a += pt1 [ 0 ] * pt2 [ 1 ] - pt2 [ 0 ] * pt1 [ 1 ] a += pt2 [ 0 ] * pt3 [ 1 ] - pt3 [ 0 ] * pt2 [ 1 ] a += pt3 [ 0 ] * pt1 [ 1 ] - pt1 [ 0 ] * pt3 [ 1 ] return abs ( a ) / 2
def require_ajax_logged_in ( func ) : """Check if ajax API is logged in and login if not"""
@ functools . wraps ( func ) def inner_func ( self , * pargs , ** kwargs ) : if not self . _ajax_api . logged_in : logger . info ( 'Logging into AJAX API for required meta method' ) if not self . has_credentials : raise ApiLoginFailure ( 'Login is required but no credentials were provide...
def key_usage ( self ) : """The : py : class : ` ~ django _ ca . extensions . KeyUsage ` extension , or ` ` None ` ` if it doesn ' t exist ."""
try : ext = self . x509 . extensions . get_extension_for_oid ( ExtensionOID . KEY_USAGE ) except x509 . ExtensionNotFound : return None return KeyUsage ( ext )
def get ( self , ** kwargs ) : """Returns the first object encountered that matches the specified lookup parameters . > > > site _ list . get ( id = 1) { ' url ' : ' http : / / site1 . tld / ' , ' published ' : False , ' id ' : 1} > > > site _ list . get ( published = True , id _ _ lt = 3) { ' url ' : ' h...
for x in self : if self . _check_element ( kwargs , x ) : return x kv_str = self . _stringify_kwargs ( kwargs ) raise QueryList . NotFound ( "Element not found with attributes: %s" % kv_str )
def stream_create_default_file_stream ( fname , isa_read_stream ) : """Wraps openjp2 library function opj _ stream _ create _ default _ vile _ stream . Sets the stream to be a file stream . This function is only valid for the 2.1 version of the openjp2 library . Parameters fname : str Specifies a file . ...
ARGTYPES = [ ctypes . c_char_p , ctypes . c_int32 ] OPENJP2 . opj_stream_create_default_file_stream . argtypes = ARGTYPES OPENJP2 . opj_stream_create_default_file_stream . restype = STREAM_TYPE_P read_stream = 1 if isa_read_stream else 0 file_argument = ctypes . c_char_p ( fname . encode ( ) ) stream = OPENJP2 . opj_st...
def heightmap_rain_erosion ( hm : np . ndarray , nbDrops : int , erosionCoef : float , sedimentationCoef : float , rnd : Optional [ tcod . random . Random ] = None , ) -> None : """Simulate the effect of rain drops on the terrain , resulting in erosion . ` ` nbDrops ` ` should be at least hm . size . Args : h...
lib . TCOD_heightmap_rain_erosion ( _heightmap_cdata ( hm ) , nbDrops , erosionCoef , sedimentationCoef , rnd . random_c if rnd else ffi . NULL , )
def _insert_uow ( self , freerun_entry , flow_request = None ) : """creates unit _ of _ work and inserts it into the DB : raise DuplicateKeyError : if unit _ of _ work with given parameters already exists"""
process_entry = context . process_context [ freerun_entry . process_name ] arguments = process_entry . arguments arguments . update ( freerun_entry . arguments ) if flow_request : schedulable_name = flow_request . schedulable_name timeperiod = flow_request . timeperiod start_timeperiod = flow_request . star...
def xymatch ( outfile , filenames , tol = 2 ) : """Given a list of MOPfiles merge them based on x / y coordinates matching . ."""
import math import sys output = { } files = [ ] for filename in filenames : this_file = read ( filename ) # # match files based on the ' X ' and ' Y ' column . # # if those don ' t exist then skip this file if not this_file [ 'data' ] . has_key ( 'X' ) or not this_file [ 'data' ] . has_key ( 'Y' ) : ...
def crop_by_percent ( cmap , per , which = 'both' , N = None ) : '''Crop end or ends of a colormap by per percent . : param cmap : A colormap object , like cmocean . cm . matter . : param per : Percent of colormap to remove . If which = = ' both ' , take this percent off both ends of colormap . If which = = '...
if which == 'both' : # take percent off both ends of cmap vmin = - 100 ; vmax = 100 ; pivot = 0 dmax = per elif which == 'min' : # take percent off bottom of cmap vmax = 10 ; pivot = 5 vmin = ( 0 + per / 100 ) * 2 * pivot dmax = None elif which == 'max' : # take percent off top of cmap ...
def sendRequest ( self , socket , cmd , args = ( ) , timeout = 10 ) : '''Perform client request / reply Request is a ZMQ multipart message : - command string - pickled argument list Reply is a pickled object'''
self . _logger . debug ( "sending request %s %s" % ( cmd , args ) ) t0 = time . time ( ) self . _sendMultiPartWithBarrierTimeout ( socket , [ cmd . encode ( ) , pickle . dumps ( args , Constants . PICKLE_PROTOCOL ) ] , timeout ) toBeReturned = self . receiveWithTimeout ( socket , timeout ) retObj = pickle . loads ( toB...
def relative_humidity_from_dewpoint ( temperature , dewpt ) : r"""Calculate the relative humidity . Uses temperature and dewpoint in celsius to calculate relative humidity using the ratio of vapor pressure to saturation vapor pressures . Parameters temperature : ` pint . Quantity ` The temperature dew p...
e = saturation_vapor_pressure ( dewpt ) e_s = saturation_vapor_pressure ( temperature ) return ( e / e_s )
def difference ( self , * args ) : """Take the difference between one array and a number of other arrays . Only the elements present in just the first array will remain ."""
setobj = set ( self . obj ) for i , v in enumerate ( args ) : setobj = setobj - set ( args [ i ] ) return self . _wrap ( self . _clean . _toOriginal ( setobj ) )
def _ensure_config_file_exists ( ) : """Makes sure the config file exists . : raises : : class : ` epab . core . new _ config . exc . ConfigFileNotFoundError `"""
config_file = Path ( ELIBConfig . config_file_path ) . absolute ( ) if not config_file . exists ( ) : raise ConfigFileNotFoundError ( ELIBConfig . config_file_path )
def sipdir_is_finished ( sipdir ) : """Return the state of modeling and inversion for a given SIP dir . The result does not take into account sensitivities or potentials , as optionally generated by CRMod . Parameters sipdir : string Directory to check Returns crmod _ is _ finished : bool True if al...
if not is_sipdir ( sipdir ) : raise Exception ( 'Directory is not a valid SIP directory!' ) subdirs_raw = sorted ( glob . glob ( sipdir + os . sep + 'invmod' + os . sep + '*' ) ) subdirs = [ x for x in subdirs_raw if os . path . isdir ( x ) ] crmod_finished = True crtomo_finished = True for subdir in subdirs : ...
def interval_timer ( interval , func , * args , ** kwargs ) : '''Interval timer function . Taken from : http : / / stackoverflow . com / questions / 22498038 / improvement - on - interval - python / 22498708'''
stopped = Event ( ) def loop ( ) : while not stopped . wait ( interval ) : # the first call is after interval func ( * args , ** kwargs ) Thread ( name = 'IntervalTimerThread' , target = loop ) . start ( ) return stopped . set
def _get_entropy ( reference_beats , estimated_beats , bins ) : """Helper function for information gain ( needs to be run twice - once backwards , once forwards ) Parameters reference _ beats : np . ndarray reference beat times , in seconds estimated _ beats : np . ndarray query beat times , in seconds ...
beat_error = np . zeros ( estimated_beats . shape [ 0 ] ) for n in range ( estimated_beats . shape [ 0 ] ) : # Get index of closest annotation to this beat beat_distances = estimated_beats [ n ] - reference_beats closest_beat = np . argmin ( np . abs ( beat_distances ) ) absolute_error = beat_distances [ cl...
def _parameterize_string ( raw ) : """Substitute placeholders in a string using CloudFormation references Args : raw ( ` str ` ) : String to be processed . Byte strings are not supported ; decode them before passing them to this function . Returns : ` str ` | : class : ` troposphere . GenericHelperFn ` : ...
parts = [ ] s_index = 0 for match in _PARAMETER_PATTERN . finditer ( raw ) : parts . append ( raw [ s_index : match . start ( ) ] ) parts . append ( { u"Ref" : match . group ( 1 ) } ) s_index = match . end ( ) if not parts : return GenericHelperFn ( raw ) parts . append ( raw [ s_index : ] ) return Gene...
def load_from_remote ( remote_name , owner = None ) : """Loads the data from a remote repository . : param remote _ name : The name of the dataset in the remote repository : param owner : ( optional ) The owner of the dataset . If nothing is provided , the current user is used . For public datasets use ' publ...
from . . import GMQLDataset pmg = get_python_manager ( ) remote_manager = get_remote_manager ( ) parser = remote_manager . get_dataset_schema ( remote_name , owner ) source_table = get_source_table ( ) id = source_table . search_source ( remote = remote_name ) if id is None : id = source_table . add_source ( remote...
def register_patches ( self ) : """Registers the patches . : return : Method success . : rtype : bool"""
if not self . __paths : return False unregistered_patches = [ ] for path in self . paths : for file in foundations . walkers . files_walker ( path , ( "\.{0}$" . format ( self . __extension ) , ) , ( "\._" , ) ) : name = foundations . strings . get_splitext_basename ( file ) if not self . regist...
def add_file_to_repo ( filename ) : """Add a file to the git repo This method does the same than a : : $ git add filename Keyword Arguments : : filename : ( str ) - - name of the file to commit Returns : < nothing >"""
try : repo = Repo ( ) index = repo . index index . add ( [ _delta_dir ( ) + filename ] ) except Exception as e : print ( "exception while gitadding file: %s" % e . message )
async def pair ( self ) : """Pair pyatv as a remote control with an Apple TV ."""
# Connect using the specified protocol # TODO : config should be stored elsewhere so that API is same for both protocol = self . atv . service . protocol if protocol == const . PROTOCOL_DMAP : await self . atv . pairing . start ( zeroconf = Zeroconf ( ) , name = self . args . remote_name , pairing_guid = self . arg...
def cancel_lb ( self , loadbal_id ) : """Cancels the specified load balancer . : param int loadbal _ id : Load Balancer ID to be cancelled ."""
lb_billing = self . lb_svc . getBillingItem ( id = loadbal_id ) billing_id = lb_billing [ 'id' ] billing_item = self . client [ 'Billing_Item' ] return billing_item . cancelService ( id = billing_id )
def _get_gpu ( ) : """* DEPRECATED * . Allocates first available GPU using cudaSetDevice ( ) , or returns 0 otherwise ."""
# Note : this code executes , but Tensorflow subsequently complains that the " current context was not created by the StreamExecutor cuda _ driver API " system = platform . system ( ) if system == "Linux" : libcudart = ct . cdll . LoadLibrary ( "libcudart.so" ) elif system == "Darwin" : libcudart = ct . cdll . ...
def input_dialog ( self , title = "Enter a value" , message = "Enter a value" , default = "" , ** kwargs ) : """Show an input dialog Usage : C { dialog . input _ dialog ( title = " Enter a value " , message = " Enter a value " , default = " " , * * kwargs ) } @ param title : window title for the dialog @ para...
return self . _run_zenity ( title , [ "--entry" , "--text" , message , "--entry-text" , default ] , kwargs )
def get_commit_message ( self , commit_sha ) : """Return the commit message for the current commit hash , replace # < PRID > with GH - < PRID >"""
cmd = [ "git" , "show" , "-s" , "--format=%B" , commit_sha ] output = subprocess . check_output ( cmd , stderr = subprocess . STDOUT ) message = output . strip ( ) . decode ( "utf-8" ) if self . config [ "fix_commit_msg" ] : return message . replace ( "#" , "GH-" ) else : return message
def iter_qs ( qs , adapter ) : '''Safely iterate over a DB QuerySet yielding ES documents'''
for obj in qs . no_cache ( ) . no_dereference ( ) . timeout ( False ) : if adapter . is_indexable ( obj ) : try : doc = adapter . from_model ( obj ) . to_dict ( include_meta = True ) yield doc except Exception as e : model = adapter . model . __name__ ...
def chmod ( path , mode = None , user = None , group = None , other = None , recursive = False ) : """Changes file mode permissions . > > > if chmod ( ' / tmp / one ' , 0755 ) : . . . print ( ' OK ' ) OK NOTE : The precending ` ` 0 ` ` is required when using a numerical mode ."""
successful = True mode = _ops_mode ( mode ) if user is not None : mode . user = user if group is not None : mode . group = group if other is not None : mode . other = other if recursive : for p in find ( path , no_peek = True ) : successful = _chmod ( p , mode ) and successful else : success...
def logs ( self ) : """returns an object to work with the site logs"""
if self . _resources is None : self . __init ( ) if "logs" in self . _resources : url = self . _url + "/logs" return _logs . Log ( url = url , securityHandler = self . _securityHandler , proxy_url = self . _proxy_url , proxy_port = self . _proxy_port , initialize = True ) else : return None
def writeTable ( self , tableName ) : """Write the table corresponding to the specified name , equivalent to the AMPL statement . . code - block : : ampl write table tableName ; Args : tableName : Name of the table to be written ."""
lock_and_call ( lambda : self . _impl . writeTable ( tableName ) , self . _lock )
def delete ( self , item ) : """Delete single item from SingleBlockManager . Ensures that self . blocks doesn ' t become empty ."""
loc = self . items . get_loc ( item ) self . _block . delete ( loc ) self . axes [ 0 ] = self . axes [ 0 ] . delete ( loc )
def pack ( self ) : """The ` CodeAttribute ` in packed byte string form ."""
with io . BytesIO ( ) as file_out : file_out . write ( pack ( '>HHI' , self . max_stack , self . max_locals , len ( self . _code ) ) ) file_out . write ( self . _code ) file_out . write ( pack ( '>H' , len ( self . exception_table ) ) ) for exception in self . exception_table : file_out . write ...
def _updateCanvasDraw ( self ) : """Overload of the draw function that update axes position before each draw"""
fn = self . canvas . draw def draw2 ( * a , ** k ) : self . _updateGridSpec ( ) return fn ( * a , ** k ) self . canvas . draw = draw2
def _create_sagemaker_pipeline_model ( self , instance_type ) : """Create a SageMaker Model Entity Args : instance _ type ( str ) : The EC2 instance type that this Model will be used for , this is only used to determine if the image needs GPU support or not . accelerator _ type ( str ) : Type of Elastic Inf...
if not self . sagemaker_session : self . sagemaker_session = Session ( ) containers = self . pipeline_container_def ( instance_type ) self . name = self . name or name_from_image ( containers [ 0 ] [ 'Image' ] ) self . sagemaker_session . create_model ( self . name , self . role , containers , vpc_config = self . v...
def dict_get_path ( data , path , default = None ) : """Returns the value inside nested structure of data located at period delimited path When traversing a list , as long as that list is containing objects of type dict , items in that list will have their " name " and " type " values tested against the cur...
keys = path . split ( "." ) for k in keys : if type ( data ) == list : found = False for item in data : name = item . get ( "name" , item . get ( "type" ) ) if name == k : found = True data = item break if not found : ...
def get_total_alignment_score ( bam ) : '''Returns total of AS : tags in the input BAM'''
sam_reader = pysam . Samfile ( bam , "rb" ) total = 0 for sam in sam_reader . fetch ( until_eof = True ) : try : total += sam . opt ( 'AS' ) except : pass return total
def help_text ( cls ) : """Return a slack - formatted list of commands with their usage ."""
docs = [ cmd_func . __doc__ for cmd_func in cls . commands . values ( ) ] # Don ' t want to include ' usage : ' or explanation . usage_lines = [ doc . partition ( '\n' ) [ 0 ] for doc in docs ] terse_lines = [ line [ len ( 'Usage: ' ) : ] for line in usage_lines ] terse_lines . sort ( ) return '\n' . join ( [ 'Availabl...
def calculate_auc_diff ( auc_structure_1 , auc_structure_2 , sort_order ) : """returns the absolute value of the difference in ROC AUC values and corresponding statistics . specifically , | AUC1 - AUC2 | , the 95 % confidence interval , and the 2 - sided p - value : param sort _ order : : param auc _ structur...
# determine auc and variance values for both sets auc1 = calculate_auc ( auc_structure_1 , sort_order , 'diff' ) var1a , var1d = calculate_auc_var ( auc_structure_1 ) auc2 = calculate_auc ( auc_structure_2 , sort_order , 'diff' ) var2a , var2d = calculate_auc_var ( auc_structure_2 ) # determine covariances between sets...
def not_ ( self ) : '''Negates this instance ' s query expression using MongoDB ' s ` ` $ not ` ` operator * * Example * * : ` ` ( User . name = = ' Jeff ' ) . not _ ( ) ` ` . . note : : Another usage is via an operator , but parens are needed to get past precedence issues : ` ` ~ ( User . name = = ' Jeff '...
ret_obj = { } for k , v in self . obj . items ( ) : if not isinstance ( v , dict ) : ret_obj [ k ] = { '$ne' : v } continue num_ops = len ( [ x for x in v if x [ 0 ] == '$' ] ) if num_ops != len ( v ) and num_ops != 0 : raise BadQueryException ( '$ operator used in field name' ) ...
def fit ( self , X , y = None , ** kwargs ) : """Fits n KMeans models where n is the length of ` ` self . k _ values _ ` ` , storing the silhouette scores in the ` ` self . k _ scores _ ` ` attribute . The " elbow " and silhouette score corresponding to it are stored in ` ` self . elbow _ value ` ` and ` ` se...
self . k_scores_ = [ ] self . k_timers_ = [ ] if self . locate_elbow : self . elbow_value_ = None self . elbow_score_ = None for k in self . k_values_ : # Compute the start time for each model start = time . time ( ) # Set the k value and fit the model self . estimator . set_params ( n_clusters = k ...
def axis_names ( self ) -> Tuple [ str , ... ] : """Names of axes ( stored in meta - data ) ."""
default = [ "axis{0}" . format ( i ) for i in range ( self . ndim ) ] return tuple ( self . _meta_data . get ( "axis_names" , None ) or default )
def variables ( names , ** kwargs ) : """Convenience function for the creation of multiple variables . For more control , consider using ` ` symbols ( names , cls = Variable , * * kwargs ) ` ` directly . : param names : string of variable names . Example : x , y = variables ( ' x , y ' ) : param kwargs : kw...
return symbols ( names , cls = Variable , seq = True , ** kwargs )
def copy ( self ) : """Creates a shallow copy of the collection # Returns ` Collection ` > A copy of the ` Collection `"""
collectedCopy = copy . copy ( self ) collectedCopy . _collection = copy . copy ( collectedCopy . _collection ) self . _collectedTypes = copy . copy ( self . _collectedTypes ) self . _allowedTypes = copy . copy ( self . _allowedTypes ) collectedCopy . errors = copy . copy ( collectedCopy . errors ) return collectedCopy
def clear_max_string_length ( self ) : """stub"""
if ( self . get_max_string_length_metadata ( ) . is_read_only ( ) or self . get_max_string_length_metadata ( ) . is_required ( ) ) : raise NoAccess ( ) self . my_osid_object_form . _my_map [ 'maxStringLength' ] = self . get_max_string_length_metadata ( ) . get_default_cardinal_values ( ) [ 0 ]
def get_valid_user_by_email ( email ) : """Return user instance"""
user = get_user ( email ) if user : if user . valid is False : return Err ( "user not valid" ) return Ok ( user ) return Err ( "user not exists" )
def get_deployment_targets ( self , project , deployment_group_id , tags = None , name = None , partial_name_match = None , expand = None , agent_status = None , agent_job_result = None , continuation_token = None , top = None , enabled = None , property_filters = None ) : """GetDeploymentTargets . [ Preview API ...
route_values = { } if project is not None : route_values [ 'project' ] = self . _serialize . url ( 'project' , project , 'str' ) if deployment_group_id is not None : route_values [ 'deploymentGroupId' ] = self . _serialize . url ( 'deployment_group_id' , deployment_group_id , 'int' ) query_parameters = { } if t...
def get_tables ( self ) : """Adds tables to the network . Example > > > writer = UAIWriter ( model ) > > > writer . get _ tables ( )"""
if isinstance ( self . model , BayesianModel ) : cpds = self . model . get_cpds ( ) cpds . sort ( key = lambda x : x . variable ) tables = [ ] for cpd in cpds : values = list ( map ( str , cpd . values . ravel ( ) ) ) tables . append ( values ) return tables elif isinstance ( self . ...
def convert_dms_string_to_dd ( dms ) : """Convert a degrees , minutes , and seconds ( DMS ) string representation , such as 38 ° 53 ' 23 " N , to a decimal degrees ( DD ) , which expresses latitude and longitude geographic coordinates as decimal fraction . @ param dms : degrees , minutes , and seconds ( DMS )...
degree_mark_offset = dms . find ( u'°' ) degrees = float ( dms [ : degree_mark_offset ] . strip ( ) ) minute_mark_offset = dms . find ( u"'" ) minutes = float ( dms [ degree_mark_offset + 1 : minute_mark_offset ] . strip ( ) ) second_mark_offset = dms . find ( u'"' ) seconds = float ( dms [ minute_mark_offset + 1 : sec...
def prepare ( self , configuration_folder , args_dict , environment ) : """Make a temporary configuration file from the files in our folder"""
self . configuration_folder = configuration_folder if not os . path . isdir ( configuration_folder ) : raise BadOption ( "Specified configuration folder is not a directory!" , wanted = configuration_folder ) available = [ os . path . join ( configuration_folder , name ) for name in os . listdir ( configuration_fold...
def _apply_BCs ( self ) : r"""Applies all the boundary conditions that have been specified , by adding values to the * A * and * b * matrices ."""
if 'pore.bc_rate' in self . keys ( ) : # Update b ind = np . isfinite ( self [ 'pore.bc_rate' ] ) self . b [ ind ] = self [ 'pore.bc_rate' ] [ ind ] if 'pore.bc_value' in self . keys ( ) : f = np . abs ( self . A . data ) . mean ( ) # Update b ( impose bc values ) ind = np . isfinite ( self [ 'pore....
def load_into_collections_from_zipfile ( collections , zipfile ) : """Loads resources contained in the given ZIP archive into each of the given collections . The ZIP file is expected to contain a list of file names obtained with the : func : ` get _ collection _ filename ` function , each pointing to a file ...
with ZipFile ( zipfile ) as zipf : names = zipf . namelist ( ) name_map = dict ( [ ( os . path . splitext ( name ) [ 0 ] , index ) for ( index , name ) in enumerate ( names ) ] ) for coll in collections : coll_name = get_collection_name ( coll ) index = name_map . get ( coll_name ) i...
def to_frame ( self , data , state ) : """Extract a single frame from the data buffer . The consumed data should be removed from the buffer . If no complete frame can be read , must raise a ` ` NoFrames ` ` exception . : param data : A ` ` bytearray ` ` instance containing the data so far read . : param s...
# Convert the data to bytes frame = six . binary_type ( data ) # Clear the buffer del data [ : ] # Return the frame return frame
def Handle ( self , args , token = None ) : """Renders specified config option ."""
if not args . name : raise ValueError ( "Name not specified." ) return ApiConfigOption ( ) . InitFromConfigOption ( args . name )
def remove_parameters_all ( self , twig = None , ** kwargs ) : """Remove all : class : ` Parameter ` s that match the search from the ParameterSet . Any Parameter that would be included in the resulting ParameterSet from a : func : ` filter ` call with the same arguments will be removed from this ParameterS...
params = self . filter ( twig = twig , check_visible = False , check_default = False , ** kwargs ) for param in params . to_list ( ) : self . _remove_parameter ( param )
def viewport ( value ) : """2 - element list of ints : Dimensions of the viewport The viewport is a bounding box containing the visualization . If the dimensions of the visualization are larger than the viewport , then the visualization will be scrollable . If undefined , then the full visualization is show...
if len ( value ) != 2 : raise ValueError ( 'viewport must have 2 dimensions' ) for v in value : _assert_is_type ( 'viewport dimension' , v , int ) if v < 0 : raise ValueError ( 'viewport dimensions cannot be negative' )
def create ( self , stream , start , parameters , sources , end = None ) : """Create a hitorics preview job . Uses API documented at http : / / dev . datasift . com / docs / api / rest - api / endpoints / previewcreate : param stream : hash of the CSDL filter to create the job for : type stream : str : para...
if len ( sources ) == 0 : raise HistoricSourcesRequired ( ) if isinstance ( sources , six . string_types ) : sources = [ sources ] params = { 'hash' : stream , 'start' : start , 'sources' : ',' . join ( sources ) , 'parameters' : ',' . join ( parameters ) } if end : params [ 'end' ] = end return self . requ...
def normalize_pipeline_name ( name = '' ) : """Translate unsafe characters to underscores ."""
normalized_name = name for bad in '\\/?%#' : normalized_name = normalized_name . replace ( bad , '_' ) return normalized_name
def write_hector_input ( scenario , path = None ) : """Writes a scenario DataFrame to a CSV emissions file as used in Hector . Parameters scenario : DataFrame DataFrame with emissions . path : file - like object or path Returns out : str If no path is given a String of the output is returned ."""
# Output header format : # ; Scenario name # ; Generated with pyhector # ; UNITS : GtC / yr GtC / yr [ . . . ] # Date ffi _ emissions luc _ emissions [ . . . ] out = "" try : name = "; " + scenario . name + "\n" except AttributeError : name = "; Hector Scenario\n" out += name out += "; Written with pyhector\n" ...
def from_dict ( posterior = None , * , posterior_predictive = None , sample_stats = None , prior = None , prior_predictive = None , sample_stats_prior = None , observed_data = None , coords = None , dims = None ) : """Convert Dictionary data into an InferenceData object . Parameters posterior : dict posterior...
return DictConverter ( posterior = posterior , posterior_predictive = posterior_predictive , sample_stats = sample_stats , prior = prior , prior_predictive = prior_predictive , sample_stats_prior = sample_stats_prior , observed_data = observed_data , coords = coords , dims = dims , ) . to_inference_data ( )
def save ( self ) : """Saves dictionary to disk in JSON format ."""
if self . filename is None : raise StoreException ( "Filename must be set to write store to disk" ) # We need an atomic way of re - writing the settings , we also need to # prevent only overwriting part of the settings file ( see bug # 116 ) . # Create a temp file and only then re - name it to the config filename =...
def revoke_tokens ( self ) : """Revoke the authorization token and all tokens that were generated using it ."""
self . is_active = False self . save ( ) self . refresh_token . revoke_tokens ( )
def _process_bracket ( self , trial_runner , bracket , trial ) : """This is called whenever a trial makes progress . When all live trials in the bracket have no more iterations left , Trials will be successively halved . If bracket is done , all non - running trials will be stopped and cleaned up , and duri...
action = TrialScheduler . PAUSE if bracket . cur_iter_done ( ) : if bracket . finished ( ) : bracket . cleanup_full ( trial_runner ) return TrialScheduler . STOP good , bad = bracket . successive_halving ( self . _reward_attr ) # kill bad trials self . _num_stopped += len ( bad ) for...
def get_uids ( self , filename = None ) : """Return a list of UIDs filename - - unused , for API compatibility only"""
self . _update ( ) return [ Abook . _gen_uid ( self . _book [ entry ] ) for entry in self . _book . sections ( ) ]
def remove_description_by_language ( self , language_type ) : """Removes the specified description . raise : NoAccess - ` ` Metadata . isRequired ( ) ` ` is ` ` true ` ` or ` ` Metadata . isReadOnly ( ) ` ` is ` ` true ` ` * compliance : mandatory - - This method must be implemented . *"""
if self . get_descriptions_metadata ( ) . is_read_only ( ) : raise NoAccess ( ) if not isinstance ( language_type , Type ) : raise InvalidArgument ( 'language_type must be instance of Type' ) self . my_osid_object_form . _my_map [ 'descriptions' ] = [ t for t in self . my_osid_object_form . _my_map [ 'descripti...
def get_normalized_term ( term_id : str , equivalents : list , namespace_targets : dict ) -> str : """Get normalized term"""
if equivalents and len ( equivalents ) > 0 : for start_ns in namespace_targets : if re . match ( start_ns , term_id ) : for target_ns in namespace_targets [ start_ns ] : for e in equivalents : if e and target_ns in e [ "namespace" ] and e [ "primary" ] : ...
def tm_header ( filename , ppdesc ) : """Parse the TM abinit header . Example : Troullier - Martins psp for element Fm Thu Oct 27 17:28:39 EDT 1994 100.00000 14.00000 940714 zatom , zion , pspdat 1 1 3 0 2001 . 00000 pspcod , pspxc , lmax , lloc , mmax , r2well 0 4.085 6.246 0 2.8786493 l , e99.0 , e99.9 , ...
lines = _read_nlines ( filename , - 1 ) header = [ ] for lineno , line in enumerate ( lines ) : header . append ( line ) if lineno == 2 : # Read lmax . tokens = line . split ( ) pspcod , pspxc , lmax , lloc = map ( int , tokens [ : 4 ] ) mmax , r2well = map ( float , tokens [ 4 : 6 ] ) ...
def register ( self , model_alias , code = 'general' , name = None , order = None , display_filter = None ) : """Register new tab : param model _ alias : : param code : : param name : : param order : : return :"""
model_alias = self . get_model_alias ( model_alias ) def wrapper ( create_layout ) : item = TabItem ( code = code , create_layout = create_layout , name = name , order = order , display_filter = display_filter ) if item in self . tabs [ model_alias ] : raise Exception ( "Tab {} already registered for mo...
def get_unbound_arg_names ( arg_names , arg_binding_keys ) : """Determines which args have no arg binding keys . Args : arg _ names : a sequence of the names of possibly bound args arg _ binding _ keys : a sequence of ArgBindingKey each of whose arg names is in arg _ names Returns : a sequence of arg na...
bound_arg_names = [ abk . _arg_name for abk in arg_binding_keys ] return [ arg_name for arg_name in arg_names if arg_name not in bound_arg_names ]
def extract_war_version ( war ) : '''Extract the version from the war file name . There does not seem to be a standard for encoding the version into the ` war file name ` _ . . _ ` war file name ` : https : / / tomcat . apache . org / tomcat - 6.0 - doc / deployer - howto . html Examples : . . code - block ...
basename = os . path . basename ( war ) war_package = os . path . splitext ( basename ) [ 0 ] # remove ' . war ' version = re . findall ( "-([\\d.-]+)$" , war_package ) # try semver return version [ 0 ] if version and len ( version ) == 1 else None
def get_tags ( self ) : """Returns a list of set of tags ."""
return sorted ( [ frozenset ( meta_graph . meta_info_def . tags ) for meta_graph in self . meta_graphs ] )
def transform ( row , table ) : 'Extract links from " project " field and remove HTML from all'
data = row . _asdict ( ) data [ "links" ] = " " . join ( extract_links ( row . project ) ) for key , value in data . items ( ) : if isinstance ( value , six . text_type ) : data [ key ] = extract_text ( value ) return data
def computeNoCall ( fileName ) : """Computes the number of no call . : param fileName : the name of the file : type fileName : str Reads the ` ` ped ` ` file created by Plink using the ` ` recodeA ` ` options ( see : py : func : ` createPedChr24UsingPlink ` ) and computes the number and percentage of no c...
outputFile = None try : outputFile = open ( fileName + ".noCall" , "w" ) except IOError : msg = "%s: can't write file" % fileName + ".noCall" raise ProgramError ( msg ) print >> outputFile , "\t" . join ( [ "PED" , "ID" , "SEX" , "nbGeno" , "nbNoCall" ] ) try : toPrint = [ ] with open ( fileName , "...
async def enter_async_context ( self , cm ) : """Enters the supplied async context manager . If successful , also pushes its _ _ aexit _ _ method as a callback and returns the result of the _ _ aenter _ _ method ."""
_cm_type = type ( cm ) _exit = _cm_type . __aexit__ result = await _cm_type . __aenter__ ( cm ) self . _push_async_cm_exit ( cm , _exit ) return result
def populate_metadata ( model , MetadataClass ) : """For a given model and metadata class , ensure there is metadata for every instance ."""
for instance in model . objects . all ( ) : create_metadata_instance ( MetadataClass , instance )
def add_user ( self , user , first_name = None , last_name = None , email = None , password = None ) : """Add a new user . Args : user ( string ) : User name . first _ name ( optional [ string ] ) : User ' s first name . Defaults to None . last _ name ( optional [ string ] ) : User ' s last name . Defaults ...
self . project_service . set_auth ( self . _token_project ) self . project_service . add_user ( user , first_name , last_name , email , password )
def read ( section : str = 'DEFAULT' ) : """reads the ~ / . datadog . ini ` section ` with the following allowed properties : param section identifying a specific datadog account api _ key : Datadog API key type api _ key : string app _ key : Datadog application key type app _ key : string proxies : Pro...
parser = ConfigParser ( ) parser . read ( path . expanduser ( '~/.datadog.ini' ) ) return { k : v for ( k , v ) in parser . items ( section ) if k in allowed_properties }
def find_by ( self , ** kwargs ) : """Find first record subject to restrictions in + kwargs + , raising RecordNotFound if no such record exists ."""
result = self . where ( ** kwargs ) . first ( ) if result : return result else : raise RecordNotFound ( kwargs )
def get_mod_func ( class_string ) : """Converts ' django . views . news . stories . story _ detail ' to ( ' django . views . news . stories ' , ' story _ detail ' ) Taken from django . core . urlresolvers"""
try : dot = class_string . rindex ( '.' ) except ValueError : return class_string , '' return class_string [ : dot ] , class_string [ dot + 1 : ]
def first_time_setup ( self ) : """First time running Open Sesame ? Create keyring and an auto - unlock key in default keyring . Make sure these things don ' t already exist ."""
if not self . _auto_unlock_key_position ( ) : pw = password . create_passwords ( ) [ 0 ] attrs = { 'application' : self . keyring } gkr . item_create_sync ( self . default_keyring , gkr . ITEM_GENERIC_SECRET , self . keyring , attrs , pw , True ) found_pos = self . _auto_unlock_key_position ( ) item_info = ...
def cmdvel2Twist ( vel ) : '''Translates from JderobotTypes CMDVel to ROS Twist . @ param vel : JderobotTypes CMDVel to translate @ type img : JdeRobotTypes . CMDVel @ return a Twist translated from vel'''
tw = TwistStamped ( ) tw . twist . linear . x = vel . vx tw . twist . linear . y = vel . vy tw . twist . linear . z = vel . vz tw . twist . angular . x = vel . ax tw . twist . angular . y = vel . ay tw . twist . angular . z = vel . az return tw
def wait_for_all_futures ( futures , print_traceback = False ) : """Wait indefinitely for all futures in the input iterable to complete . Use a timeout to enable interrupt handling . Call os . _ exit ( ) in case of KeyboardInterrupt . Otherwise , the atexit registered handler in concurrent . futures . thread ...
try : while True : waited_futures = concurrent . futures . wait ( futures , timeout = 60 ) if len ( waited_futures . not_done ) == 0 : break except KeyboardInterrupt : if print_traceback : traceback . print_stack ( ) else : print ( '' ) os . _exit ( os . EX_IO...
def extract ( self , member , path = "" , set_attrs = True ) : """Extract a member from the archive to the current working directory , using its full name . Its file information is extracted as accurately as possible . ` member ' may be a filename or a TarInfo object . You can specify a different directory us...
self . _check ( "r" ) if isinstance ( member , str ) : tarinfo = self . getmember ( member ) else : tarinfo = member # Prepare the link target for makelink ( ) . if tarinfo . islnk ( ) : tarinfo . _link_target = os . path . join ( path , tarinfo . linkname ) try : self . _extract_member ( tarinfo , os ....
def _get_unapplied_migrations ( self , loader ) : """Output a list of unapplied migrations in the form [ [ ' migration1 ' , migration2 ' ] , . . . ] . This implementation is mostly copied from the Django ' showmigrations ' mgmt command . https : / / github . com / django / django / blob / stable / 1.8 . x / dja...
unapplied = [ ] graph = loader . graph plan = [ ] seen = set ( ) # Generate the plan , in the order that migrations have been / should be applied . for target in graph . leaf_nodes ( ) : for migration in graph . forwards_plan ( target ) : if migration not in seen : plan . append ( graph . nodes ...
def to_pinyin ( s , accented = True ) : """Convert * s * to Pinyin . If * accented * is ` ` True ` ` , diacritics are added to the Pinyin syllables . If it ' s ` ` False ` ` , numbers are used to indicate tone ."""
identity = identify ( s ) if identity == PINYIN : if _has_accented_vowels ( s ) : return s if accented else accented_to_numbered ( s ) else : return numbered_to_accented ( s ) if accented else s elif identity == ZHUYIN : return zhuyin_to_pinyin ( s , accented = accented ) elif identity == IP...
def apply ( self , window_length , samples = True , func1d = None ) : """Runs any kind of function over a window . Args : window _ length ( int ) : the window length . Required . samples ( bool ) : window length is in samples . Use False for a window length given in metres . func1d ( function ) : a functi...
window_length /= 1 if samples else self . step if func1d is None : func1d = np . mean params = self . __dict__ . copy ( ) out = self . _rolling_window ( int ( window_length ) , func1d ) return Curve ( out , params = params )