signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def _determine_timeinterval ( self ) : """Return a dictionary with two datetime objects , start _ time and end _ time , covering the interval of the training job"""
description = self . _sage_client . describe_training_job ( TrainingJobName = self . name ) start_time = self . _start_time or description [ u'TrainingStartTime' ] # datetime object # Incrementing end time by 1 min since CloudWatch drops seconds before finding the logs . # This results in logs being searched in the tim...
def clear_waiting_coordinators ( self , cancel = False ) : '''remove all entries from waiting queue or cancell all in waiting queue'''
with self . _lockw : if cancel : for _coordinator in self . _waiting_transfer_coordinators : _coordinator . notify_cancelled ( "Clear Waiting Queue" , False ) self . _waiting_transfer_coordinators . clear ( )
def cd ( * args ) : """An argument of - is equivalent to $ OLDPWD . If - is the first argument , and the directory change is successful , the absolute pathname of the new working directory is written to the standard output ."""
if args [ 0 ] == "-" : try : newpwd , os . environ [ "OLDPWD" ] = os . environ [ "OLDPWD" ] , os . getcwd ( ) except KeyError as e : # $ OLDPWD initially not set raise e else : os . chdir ( newpwd ) print ( newpwd ) else : os . environ [ "OLDPWD" ] = os . getcwd ( ) o...
def _sanitize_column_names ( data ) : """Replace illegal characters with underscore"""
new_names = { } for name in data . columns : new_names [ name ] = _ILLEGAL_CHARACTER_PAT . sub ( "_" , name ) return new_names
def get_job_output ( self , job_id ) : """GetJobOutput https : / / apidocs . joyent . com / manta / api . html # GetJobOutput"""
log . debug ( "GetJobOutput %r" , job_id ) path = "/%s/jobs/%s/live/out" % ( self . account , job_id ) res , content = self . _request ( path , "GET" ) if res [ "status" ] != "200" : raise errors . MantaAPIError ( res , content ) keys = content . splitlines ( False ) return keys
def inverse_cdf ( su , W ) : """Inverse CDF algorithm for a finite distribution . Parameters su : ( M , ) ndarray M sorted uniform variates ( i . e . M ordered points in [ 0,1 ] ) . W : ( N , ) ndarray a vector of N normalized weights ( > = 0 and sum to one ) Returns A : ( M , ) ndarray a vector of ...
j = 0 s = W [ 0 ] M = su . shape [ 0 ] A = np . empty ( M , 'int' ) for n in range ( M ) : while su [ n ] > s : j += 1 s += W [ j ] A [ n ] = j return A
def _append_date ( self , value , _file ) : """Call this function to write date contents . Keyword arguments : * value - dict , content to be dumped * _ file - FileIO , output file"""
_tabs = '\t' * self . _tctr _text = value . strftime ( '%Y-%m-%dT%H:%M:%S.%fZ' ) _labs = '{tabs}<date>{text}</date>\n' . format ( tabs = _tabs , text = _text ) _file . write ( _labs )
def banlist ( self , channel ) : """Get the channel banlist . Required arguments : * channel - Channel of which to get the banlist for ."""
with self . lock : self . is_in_channel ( channel ) self . send ( 'MODE %s b' % channel ) bans = [ ] while self . readable ( ) : msg = self . _recv ( expected_replies = ( '367' , '368' ) ) if msg [ 0 ] == '367' : banmask , who , timestamp = msg [ 2 ] . split ( ) [ 1 : ] ...
def portfolio_value ( portfolio , date , price = 'close' ) : """Total value of a portfolio ( dict mapping symbols to numbers of shares ) $ CASH used as symbol for USD"""
value = 0.0 for ( sym , sym_shares ) in portfolio . iteritems ( ) : sym_price = None if sym_shares : sym_price = get_price ( symbol = sym , date = date , price = price ) # print sym , sym _ shares , sym _ price # print last _ date , k , price if sym_price != None : if np . isnan ( sy...
def runner_doc ( * args ) : '''Return the docstrings for all runners . Optionally , specify a runner or a function to narrow the selection . The strings are aggregated into a single document on the master for easy reading . Multiple runners / functions can be specified . . . versionadded : : 2014.7.0 CL...
run_ = salt . runner . Runner ( __opts__ ) docs = { } if not args : for fun in run_ . functions : docs [ fun ] = run_ . functions [ fun ] . __doc__ return _strip_rst ( docs ) for module in args : _use_fnmatch = False if '*' in module : target_mod = module _use_fnmatch = True ...
def dim_dtau ( self , pars ) : r""": math : ` \ frac { \ partial \ hat { \ rho ' ' } ( \ omega ) } { \ partial \ tau } = \ rho _ 0 \ frac { - m \ omega ^ c c \ tau ^ { c - 1 } sin ( \ frac { c \ pi } { 2 } } { 1 + 2 ( \ omega \ tau ) ^ c cos ( \ frac { c \ pi } { 2 } ) + ( \ omega \ tau ) ^ { 2 c } } + \ rho ...
self . _set_parameters ( pars ) # term1 nom1 = - self . m * np . sin ( self . ang ) * self . w ** self . c * self . c * self . tau ** ( self . c - 1 ) term1 = nom1 / self . denom # term2 nom2 = ( self . m * self . otc * np . sin ( self . ang ) ) * ( 2 * self . w ** self . c * self . c * self . tau ** ( self . c - 1 ) *...
def joint ( node ) : """Merge the bodies of primal and adjoint into a single function . Args : node : A module with the primal and adjoint function definitions as returned by ` reverse _ ad ` . Returns : func : A ` Module ` node with a single function definition containing the combined primal and adjoin...
node , _ , _ = _fix ( node ) body = node . body [ 0 ] . body [ : - 1 ] + node . body [ 1 ] . body func = gast . Module ( body = [ gast . FunctionDef ( name = node . body [ 0 ] . name , args = node . body [ 1 ] . args , body = body , decorator_list = [ ] , returns = None ) ] ) # Clean up anno . clearanno ( func ) return...
async def _process_auth_form ( self , html : str ) -> ( str , str ) : """Parsing data from authorization page and filling the form and submitting the form : param html : html page : return : url and html from redirected page"""
# Parse page p = AuthPageParser ( ) p . feed ( html ) p . close ( ) # Get data from hidden inputs form_data = dict ( p . inputs ) form_url = p . url form_data [ 'email' ] = self . login form_data [ 'pass' ] = self . password if p . message : # Show form errors raise VkAuthError ( 'invalid_data' , p . message , form...
def randomMails ( self , count = 1 ) : """Return random e - mails . : rtype : list : returns : list of random e - mails"""
self . check_count ( count ) random_nicks = self . rn . random_nicks ( count = count ) random_domains = sample ( self . dmails , count ) return [ nick . lower ( ) + "@" + domain for nick , domain in zip ( random_nicks , random_domains ) ]
def load_data ( self , df ) : """Wraps the LOAD DATA DDL statement . Loads data into an MapD table from pandas . DataFrame or pyarrow . Table Parameters df : pandas . DataFrame or pyarrow . Table Returns query : MapDQuery"""
stmt = ddl . LoadData ( self . _qualified_name , df ) return self . _execute ( stmt )
def handle_job_and_work_save ( self , sender , instance , ** kwargs ) : """Custom handler for job and work save"""
self . handle_save ( instance . project . __class__ , instance . project )
def mknod ( name , ntype , major = 0 , minor = 0 , user = None , group = None , mode = '0600' ) : '''Create a special file similar to the ' nix mknod command . The supported device types are ` ` p ` ` ( fifo pipe ) , ` ` c ` ` ( character device ) , and ` ` b ` ` ( block device ) . Provide the major and minor n...
name = os . path . expanduser ( name ) ret = { 'name' : name , 'changes' : { } , 'comment' : '' , 'result' : False } if not name : return _error ( ret , 'Must provide name to file.mknod' ) if ntype == 'c' : # Check for file existence if __salt__ [ 'file.file_exists' ] ( name ) : ret [ 'comment' ] = ( 'F...
def get_alignment_data ( self , section ) : """Get the alignment SAM and Fasta , if present . : param section : Can be ' template ' , ' complement ' , or ' 2d ' . : return : A tuple containing the SAM and the section of the reference aligned to ( both as strings ) . Returns None if no alignment is present f...
subgroup = '{}/Aligned_{}' . format ( self . group_name , section ) sam = self . handle . get_analysis_dataset ( subgroup , 'SAM' ) fasta = self . handle . get_analysis_dataset ( subgroup , 'Fasta' ) if sam is None or fasta is None : return None sequence = fasta . split ( '\n' ) [ 1 ] return sam , sequence
def get_font_matrix ( self ) : """Copies the scaled font ’ s font matrix . : returns : A new : class : ` Matrix ` object ."""
matrix = Matrix ( ) cairo . cairo_scaled_font_get_font_matrix ( self . _pointer , matrix . _pointer ) self . _check_status ( ) return matrix
def read ( self , addr , size ) : '''Parameters addr : int The register address . size : int Length of data to be read ( number of bytes ) . Returns array : array Data ( byte array ) read from memory . Returns 0 for each byte if it hasn ' t been written to .'''
logger . debug ( "Dummy SiTransferLayer.read addr: %s size: %s" % ( hex ( addr ) , size ) ) return array . array ( 'B' , [ self . mem [ curr_addr ] if curr_addr in self . mem else 0 for curr_addr in range ( addr , addr + size ) ] )
def RegisterSourceType ( cls , source_type_class ) : """Registers a source type . Source types are identified based on their type indicator . Args : source _ type _ class ( type ) : source type . Raises : KeyError : if source types is already set for the corresponding type indicator ."""
if source_type_class . TYPE_INDICATOR in cls . _source_type_classes : raise KeyError ( 'Source type already set for type: {0:s}.' . format ( source_type_class . TYPE_INDICATOR ) ) cls . _source_type_classes [ source_type_class . TYPE_INDICATOR ] = ( source_type_class )
def do_copy ( self , subcmd , opts , * args ) : """Duplicate something in working copy or repository , remembering history . usage : copy SRC DST SRC and DST can each be either a working copy ( WC ) path or URL : WC - > WC : copy and schedule for addition ( with history ) WC - > URL : immediately commit a...
print "'svn %s' opts: %s" % ( subcmd , opts ) print "'svn %s' args: %s" % ( subcmd , args )
def scale ( self , scaled_cx , scaled_cy ) : """Return scaled image dimensions in EMU based on the combination of parameters supplied . If * scaled _ cx * and * scaled _ cy * are both | None | , the native image size is returned . If neither * scaled _ cx * nor * scaled _ cy * is | None | , their values are r...
image_cx , image_cy = self . _native_size if scaled_cx is None and scaled_cy is None : scaled_cx = image_cx scaled_cy = image_cy elif scaled_cx is None : scaling_factor = float ( scaled_cy ) / float ( image_cy ) scaled_cx = int ( round ( image_cx * scaling_factor ) ) elif scaled_cy is None : scaling...
def serialize_model ( model ) : """Serialize the HTK model into a file . : param model : Model to be serialized"""
result = '' # First serialize the macros for macro in model [ 'macros' ] : if macro . get ( 'options' , None ) : result += '~o ' for option in macro [ 'options' ] [ 'definition' ] : result += _serialize_option ( option ) elif macro . get ( 'transition' , None ) : result += '~...
def project_meta ( self , attributes ) : """Projects the specified metadata attributes to new region fields : param attributes : a list of metadata attributes : return : a new GDataframe with additional region fields"""
if not isinstance ( attributes , list ) : raise TypeError ( 'attributes must be a list' ) meta_to_project = self . meta [ attributes ] . applymap ( lambda l : ", " . join ( l ) ) new_regs = self . regs . merge ( meta_to_project , left_index = True , right_index = True ) return GDataframe ( regs = new_regs , meta = ...
def delete_one_word ( self , word = RIGHT ) : """Delete one word the right or the the left of the cursor ."""
assert word in ( self . RIGHT , self . LEFT ) if word == self . RIGHT : papy = self . text . find ( ' ' , self . cursor ) + 1 if not papy : papy = len ( self . text ) self . text = self . text [ : self . cursor ] + self . text [ papy : ] else : papy = self . text . rfind ( ' ' , 0 , self . curso...
def haplotype_caller ( align_bams , items , ref_file , assoc_files , region = None , out_file = None ) : """Call variation with GATK ' s HaplotypeCaller . This requires the full non open - source version of GATK ."""
if out_file is None : out_file = "%s-variants.vcf.gz" % utils . splitext_plus ( align_bams [ 0 ] ) [ 0 ] if not utils . file_exists ( out_file ) : num_cores = dd . get_num_cores ( items [ 0 ] ) broad_runner , params = _shared_gatk_call_prep ( align_bams , items , ref_file , region , out_file , num_cores ) ...
def _get_path ( dataset_name ) : """Returns path to where checksums are stored for a given dataset ."""
path = _checksum_paths ( ) . get ( dataset_name , None ) if path : return path msg = ( 'No checksums file could be find for dataset %s. Please create one in ' 'one of: %s' ) % ( dataset_name , ', ' . join ( _CHECKSUM_DIRS ) ) raise AssertionError ( msg )
def filter_records ( root , head , update , filters = ( ) ) : """Apply the filters to the records ."""
root , head , update = freeze ( root ) , freeze ( head ) , freeze ( update ) for filter_ in filters : root , head , update = filter_ ( root , head , update ) return thaw ( root ) , thaw ( head ) , thaw ( update )
def remove_comments ( code ) : """Remove C - style comment from GLSL code string ."""
pattern = r"(\".*?\"|\'.*?\')|(/\*.*?\*/|//[^\r\n]*\n)" # first group captures quoted strings ( double or single ) # second group captures comments ( / / single - line or / * multi - line * / ) regex = re . compile ( pattern , re . MULTILINE | re . DOTALL ) def do_replace ( match ) : # if the 2nd group ( capturing comm...
def signature ( self , cmd ) : '''Convenience function that returns dict of function signature ( s ) specified by cmd . cmd is dict of the form : ' module ' : ' modulestring ' , ' tgt ' : ' targetpatternstring ' , ' tgt _ type ' : ' targetpatterntype ' , ' token ' : ' salttokenstring ' , ' username ' : ...
cmd [ 'client' ] = 'minion' if len ( cmd [ 'module' ] . split ( '.' ) ) > 2 and cmd [ 'module' ] . split ( '.' ) [ 0 ] in [ 'runner' , 'wheel' ] : cmd [ 'client' ] = 'master' return self . _signature ( cmd )
def logs ( self , num = None , source = None , ps = None , tail = False ) : """Returns the requested log ."""
# Bootstrap payload package . payload = { 'logplex' : 'true' } if num : payload [ 'num' ] = num if source : payload [ 'source' ] = source if ps : payload [ 'ps' ] = ps if tail : payload [ 'tail' ] = 1 # Grab the URL of the logplex endpoint . r = self . _h . _http_resource ( method = 'GET' , resource = (...
def find ( whatever = None , language = None , iso639_1 = None , iso639_2 = None , native = None ) : """Find data row with the language . : param whatever : key to search in any of the following fields : param language : key to search in English language name : param iso639_1 : key to search in ISO 639-1 code...
if whatever : keys = [ u'name' , u'iso639_1' , u'iso639_2_b' , u'iso639_2_t' , u'native' ] val = whatever elif language : keys = [ u'name' ] val = language elif iso639_1 : keys = [ u'iso639_1' ] val = iso639_1 elif iso639_2 : keys = [ u'iso639_2_b' , u'iso639_2_t' ] val = iso639_2 elif n...
def _get_activation ( self , F , inputs , activation , ** kwargs ) : """Get activation function . Convert if is string"""
func = { 'tanh' : F . tanh , 'relu' : F . relu , 'sigmoid' : F . sigmoid , 'softsign' : F . softsign } . get ( activation ) if func : return func ( inputs , ** kwargs ) elif isinstance ( activation , string_types ) : return F . Activation ( inputs , act_type = activation , ** kwargs ) elif isinstance ( activati...
def get ( self , url , ** opt ) : '''Convert the resource url to a complete url and then fetch the data from it . Args : url : The url of an OEmbed resource . * * opt : Parameters passed to the url . Returns : OEmbedResponse object according to data fetched'''
return self . fetch ( self . request ( url , ** opt ) )
def tlog_inv ( y , th = 1 , r = _display_max , d = _l_mmax ) : """Inverse truncated log10 transform . Values Parameters y : num | num iterable values to be transformed . th : num Inverse values below th are transormed to th . Must be > positive . r : num ( default = 10 * * 4) maximal transformed v...
if th <= 0 : raise ValueError ( 'Threshold value must be positive. %s given.' % th ) x = 10 ** ( y * 1. * d / r ) try : x [ x < th ] = th except TypeError : if x < th : x = th return x
def scrypt_mcf_check ( mcf , password ) : """Returns True if the password matches the given MCF hash"""
if isinstance ( password , unicode ) : password = password . encode ( 'utf8' ) elif not isinstance ( password , bytes ) : raise TypeError ( 'password must be a unicode or byte string' ) if not isinstance ( mcf , bytes ) : raise TypeError ( 'MCF must be a byte string' ) if mcf_mod . _scrypt_mcf_7_is_standard...
def assignment_propagation ( node ) : """Perform assignment propagation . Assignment propagation is not a compiler optimization as much as a readability optimization . If a variable name is used only once , it gets renamed when possible e . g . ` y = x ; z = y ` will become ` z = x ` . Args : node : The A...
n_reads = read_counts ( node ) to_remove = [ ] for succ in gast . walk ( node ) : # We found an assignment of the form a = b # - Left - hand side is a Name , right - hand side is a Name . if ( isinstance ( succ , gast . Assign ) and isinstance ( succ . value , gast . Name ) and len ( succ . targets ) == 1 and isins...
def client_id_from_id_token ( id_token ) : """Extracts the client id from a JSON Web Token ( JWT ) . Returns a string or None ."""
payload = JWT ( ) . unpack ( id_token ) . payload ( ) aud = payload . get ( 'aud' , None ) if aud is None : return None if isinstance ( aud , list ) : return aud [ 0 ] return aud
def ofp_instruction_from_str ( ofproto , action_str ) : """Parse an ovs - ofctl style action string and return a list of jsondict representations of OFPInstructionActions , which can then be passed to ofproto _ parser . ofp _ instruction _ from _ jsondict . Please note that this is for making transition from ...
action_re = re . compile ( r"([a-z_]+)(\([^)]*\)|[^a-z_,()][^,()]*)*" ) result = [ ] while len ( action_str ) : m = action_re . match ( action_str ) if not m : raise ryu . exception . OFPInvalidActionString ( action_str = action_str ) action_name = m . group ( 1 ) this_action = m . group ( 0 ) ...
def mapping_to_frozenset ( mapping ) : """Be aware that this treats any sequence type with the equal members as equal . As it is used to identify equality of schemas , this can be considered okay as definitions are semantically equal regardless the container type ."""
mapping = mapping . copy ( ) for key , value in mapping . items ( ) : if isinstance ( value , Mapping ) : mapping [ key ] = mapping_to_frozenset ( value ) elif isinstance ( value , Sequence ) : value = list ( value ) for i , item in enumerate ( value ) : if isinstance ( item ...
def add_model ( self , allow_alias = False , ** kwargs ) : """Add a ` Model ` instance to this entry ."""
if not allow_alias and MODEL . ALIAS in kwargs : err_str = "`{}` passed in kwargs, this shouldn't happen!" . format ( SOURCE . ALIAS ) self . _log . error ( err_str ) raise RuntimeError ( err_str ) # Set alias number to be + 1 of current number of models if MODEL . ALIAS not in kwargs : kwargs [ MODEL ....
def init_widget ( self ) : """Initialize the underlying widget ."""
super ( AndroidTextureView , self ) . __init__ ( self ) w = self . widget w . setSurfaceTextureListener ( w . getId ( ) ) w . onSurfaceTextureAvailable . connect ( self . on_surface_texture_available ) w . onSurfaceTextureDestroyed . connect ( self . on_surface_texture_destroyed ) w . onSurfaceTextureChanged . connect ...
def read ( self ) : """Read command result from blink ( 1 ) , low - level internal use Receive USB Feature Report 0x01 from blink ( 1 ) with 8 - byte payload Note : buf must be 8 bytes or bad things happen"""
buf = self . dev . get_feature_report ( REPORT_ID , 9 ) log . debug ( "blink1read: " + "," . join ( '0x%02x' % v for v in buf ) ) return buf
def alter_partition ( self , spec , location = None , format = None , tbl_properties = None , serde_properties = None , ) : """Change setting and parameters of an existing partition Parameters spec : dict or list The partition keys for the partition being modified location : string , optional format : str...
part_schema = self . partition_schema ( ) def _run_ddl ( ** kwds ) : stmt = ddl . AlterPartition ( self . _qualified_name , spec , part_schema , ** kwds ) return self . _execute ( stmt ) return self . _alter_table_helper ( _run_ddl , location = location , format = format , tbl_properties = tbl_properties , serd...
def reduce ( self , dimensions = None , function = None , spread_fn = None , ** reduce_map ) : """Applies reduction to elements along the specified dimension ( s ) . Allows reducing the values along one or more key dimension with the supplied function . Supports two signatures : Reducing with a list of dimens...
if util . config . future_deprecations : self . param . warning ( 'The HoloMap.reduce method is deprecated, ' 'for equivalent functionality use ' 'HoloMap.apply.reduce().collapse().' ) from . . element import Table reduced_items = [ ( k , v . reduce ( dimensions , function , spread_fn , ** reduce_map ) ) for k , v ...
def main ( ) : """Remove unused imports Unsafe ! Only tested on our codebase , which uses simple absolute imports on the form , " import a . b . c " ."""
parser = argparse . ArgumentParser ( description = __doc__ , formatter_class = argparse . RawDescriptionHelpFormatter ) parser . add_argument ( "path" , nargs = "+" , help = "File or directory path" ) parser . add_argument ( "--exclude" , nargs = "+" , help = "Exclude glob patterns" ) parser . add_argument ( "--no-recu...
def winddir_text ( pts ) : "Convert wind direction from 0 . . 15 to compass point text"
global _winddir_text_array if pts is None : return None if not isinstance ( pts , int ) : pts = int ( pts + 0.5 ) % 16 if not _winddir_text_array : _ = pywws . localisation . translation . ugettext _winddir_text_array = ( _ ( u'N' ) , _ ( u'NNE' ) , _ ( u'NE' ) , _ ( u'ENE' ) , _ ( u'E' ) , _ ( u'ESE' )...
def profile_list ( ** kwargs ) : """Show uploaded profiles ."""
ctx = Context ( ** kwargs ) ctx . execute_action ( 'profile:list' , ** { 'storage' : ctx . repo . create_secure_service ( 'storage' ) , } )
def readline ( self , timeout = 500 ) : """Try to read our I / O for ' timeout ' milliseconds , return None otherwise . This makes calling and reading I / O non blocking !"""
poll_result = self . poller . poll ( timeout ) if poll_result : line = self . io . readline ( ) . strip ( ) if self . io == sys . stdin and line == "[" : # skip first event line wrt issue # 19 line = self . io . readline ( ) . strip ( ) try : # python3 compatibility code line = line . decode...
def scanf ( format , s = None , collapseWhitespace = True ) : """scanf supports the following formats : % c One character %5c 5 characters % d , % i int value %7d , % 7i int value with length 7 % f float value % o octal value % X , % x hex value % s string terminated by whitespace Examples : > >...
if s is None : s = sys . stdin if hasattr ( s , "readline" ) : s = s . readline ( ) format_re , casts = scanf_compile ( format , collapseWhitespace ) found = format_re . search ( s ) if found : groups = found . groups ( ) return tuple ( [ casts [ i ] ( groups [ i ] ) for i in range ( len ( groups ) ) ] ...
def read_10x_mtx ( path , var_names = 'gene_symbols' , make_unique = True , cache = False , gex_only = True ) -> AnnData : """Read 10x - Genomics - formatted mtx directory . Parameters path : ` str ` Path to directory for ` . mtx ` and ` . tsv ` files , e . g . ' . / filtered _ gene _ bc _ matrices / hg19 /...
path = Path ( path ) genefile_exists = ( path / 'genes.tsv' ) . is_file ( ) read = _read_legacy_10x_mtx if genefile_exists else _read_v3_10x_mtx adata = read ( str ( path ) , var_names = var_names , make_unique = make_unique , cache = cache , ) if genefile_exists or not gex_only : return adata else : gex_rows =...
def setup_default_wrappers ( self ) : """Setup defaulf wrappers . Wrappers are applied when view method does not return instance of Response . In this case nefertari renderers call wrappers and handle response generation ."""
# Index self . _after_calls [ 'index' ] = [ wrappers . wrap_in_dict ( self . request ) , wrappers . add_meta ( self . request ) , wrappers . add_object_url ( self . request ) , ] # Show self . _after_calls [ 'show' ] = [ wrappers . wrap_in_dict ( self . request ) , wrappers . add_meta ( self . request ) , wrappers . ad...
def exception ( self , ncode ) : """Looks up the exception in error _ dictionary and raises it . Required arguments : * ncode - Error numerical code ."""
error = self . error_dictionary [ ncode ] error_msg = self . _buffer [ self . _index - 1 ] . split ( None , 3 ) [ 3 ] exec ( 'raise self.%s("%s: %s")' % ( error , error , error_msg ) )
def get_global_compatibility_level ( self ) : """Gets the global compatibility level ."""
res = requests . get ( self . _url ( '/config' ) , headers = HEADERS ) raise_if_failed ( res ) return res . json ( ) [ 'compatibility' ]
def scale_T ( T , P_I , I_F ) : """Scale T with a block diagonal matrix . Helper function that scales T with a right multiplication by a block diagonal inverse , so that T is the identity at C - node rows . Parameters T : { bsr _ matrix } Tentative prolongator , with square blocks in the BSR data structur...
if not isspmatrix_bsr ( T ) : raise TypeError ( 'Expected BSR matrix T' ) elif T . blocksize [ 0 ] != T . blocksize [ 1 ] : raise TypeError ( 'Expected BSR matrix T with square blocks' ) if not isspmatrix_bsr ( P_I ) : raise TypeError ( 'Expected BSR matrix P_I' ) elif P_I . blocksize [ 0 ] != P_I . blocksi...
def astype ( self , dtype ) : """Cast DataFrame columns to given dtype . Parameters dtype : numpy . dtype or dict Dtype or column _ name - > dtype mapping to cast columns to . Note index is excluded . Returns DataFrame With casted columns ."""
if isinstance ( dtype , np . dtype ) : new_data = OrderedDict ( ( column . name , column . astype ( dtype ) ) for column in self . _iter ( ) ) return DataFrame ( new_data , self . index ) elif isinstance ( dtype , dict ) : check_inner_types ( dtype . values ( ) , np . dtype ) new_data = OrderedDict ( se...
def build_D3treeStandard ( old , MAX_DEPTH , level = 1 , toplayer = None ) : """For d3s examples all we need is a json with name , children and size . . eg " name " : " flare " , " children " : [ " name " : " analytics " , " children " : [ " name " : " cluster " , " children " : [ { " name " : " Agglo...
out = [ ] if not old : old = toplayer for x in old : d = { } # print " * " * level , x . label d [ 'qname' ] = x . qname d [ 'name' ] = x . bestLabel ( quotes = False ) . replace ( "_" , " " ) d [ 'objid' ] = x . id if x . children ( ) and level < MAX_DEPTH : d [ 'size' ] = len ( x ....
def sample ( self , N = 1 ) : """Sample N trajectories from the posterior . Note Performs the forward step in case it has not been performed ."""
if not self . filt : self . forward ( ) paths = np . empty ( ( len ( self . filt ) , N ) , np . int ) paths [ - 1 , : ] = rs . multinomial ( self . filt [ - 1 ] , M = N ) log_trans = np . log ( self . hmm . trans_mat ) for t , f in reversed ( list ( enumerate ( self . filt [ : - 1 ] ) ) ) : for n in range ( N )...
def host ( self , hostname , owner = None , ** kwargs ) : """Create the Host TI object . Args : owner : hostname : * * kwargs : Return :"""
return Host ( self . tcex , hostname , owner = owner , ** kwargs )
def get_age_levels ( self ) : """Method to add a " level " column to the ages table . Finds the lowest filled in level ( i . e . , specimen , sample , etc . ) for that particular row . I . e . , a row with both site and sample name filled in is considered a sample - level age . Returns self . tables [ '...
def get_level ( ser , levels = ( 'specimen' , 'sample' , 'site' , 'location' ) ) : for level in levels : if pd . notnull ( ser [ level ] ) : if len ( ser [ level ] ) : # guard against empty strings return level return # get available levels in age table possible_levels = [ 's...
def _unescape_token ( escaped_token ) : """Inverse of _ escape _ token ( ) . Args : escaped _ token : a unicode string Returns : token : a unicode string"""
def match ( m ) : if m . group ( 1 ) is None : return u"_" if m . group ( 0 ) == u"\\u" else u"\\" try : return six . unichr ( int ( m . group ( 1 ) ) ) except ( ValueError , OverflowError ) : return "" trimmed = escaped_token [ : - 1 ] if escaped_token . endswith ( "_" ) else escape...
def post_message ( plugin , polled_time , identity , message ) : """Post single message : type plugin : errbot . BotPlugin : type polled _ time : datetime . datetime : type identity : str : type message : str"""
user = plugin . build_identifier ( identity ) return plugin . send ( user , message )
def guinieranalysis ( samplenames , qranges = None , qmax_from_shanum = True , prfunctions_postfix = '' , dist = None , plotguinier = True , graph_extension = '.png' , dmax = None , dmax_from_shanum = False ) : """Perform Guinier analysis on the samples . Inputs : samplenames : list of sample names qranges : ...
figpr = plt . figure ( ) ip = get_ipython ( ) axpr = figpr . add_subplot ( 1 , 1 , 1 ) if qranges is None : qranges = { '__default__' : ( 0 , 1000000 ) } if dmax is None : dmax = { '__default__' : None } if '__default__' not in qranges : qranges [ '__default__' ] = ( 0 , 1000000 ) if '__default__' not in dm...
def format_measure ( measure ) : """Get format and units for data coming from profiler task ."""
# Convert to a positive value . measure = abs ( measure ) # For number of calls if isinstance ( measure , int ) : return to_text_string ( measure ) # For time measurements if 1.e-9 < measure <= 1.e-6 : measure = u"{0:.2f} ns" . format ( measure / 1.e-9 ) elif 1.e-6 < measure <= 1.e-3 : measure = u"{0:.2f} u...
def same_unit ( self , other : Union [ UnitTypeId , Set [ UnitTypeId ] , List [ UnitTypeId ] , Dict [ UnitTypeId , Any ] ] ) -> "Units" : """Usage : ' self . units . same _ tech ( UnitTypeId . COMMANDCENTER ) ' returns CommandCenter and CommandCenterFlying , ' self . units . same _ tech ( UnitTypeId . ORBITAL...
if isinstance ( other , UnitTypeId ) : other = { other } unit_alias_types = set ( other ) for unitType in other : unit_alias = self . game_data . units [ unitType . value ] . unit_alias if unit_alias : unit_alias_types . add ( unit_alias ) return self . filter ( lambda unit : unit . type_id in unit_...
def get_expression_engine ( self , name ) : """Return an expression engine instance ."""
try : return self . expression_engines [ name ] except KeyError : raise InvalidEngineError ( "Unsupported expression engine: {}" . format ( name ) )
def _configure_logger_handler ( cls , log_dest , log_filename ) : """Return a logging handler for the specified ` log _ dest ` , or ` None ` if ` log _ dest ` is ` None ` ."""
if log_dest is None : return None msg_format = '%(asctime)s-%(name)s-%(message)s' if log_dest == 'stderr' : # Note : sys . stderr is the default stream for StreamHandler handler = logging . StreamHandler ( ) handler . setFormatter ( logging . Formatter ( msg_format ) ) elif log_dest == 'file' : if not l...
def rsa_public_key_pkcs8_to_pkcs1 ( pkcs8_key ) : """Convert a PKCS8 - encoded RSA private key to PKCS1."""
decoded_values = decoder . decode ( pkcs8_key , asn1Spec = PublicKeyInfo ( ) ) try : decoded_key = decoded_values [ 0 ] except IndexError : raise ValueError ( "Invalid public key encoding." ) return decoded_key [ "publicKey" ] . asOctets ( )
def gelman_rubin ( x , return_var = False ) : """Returns estimate of R for a set of traces . The Gelman - Rubin diagnostic tests for lack of convergence by comparing the variance between multiple chains to the variance within each chain . If convergence has been achieved , the between - chain and within - cha...
if np . shape ( x ) < ( 2 , ) : raise ValueError ( 'Gelman-Rubin diagnostic requires multiple chains of the same length.' ) try : m , n = np . shape ( x ) except ValueError : return [ gelman_rubin ( np . transpose ( y ) ) for y in np . transpose ( x ) ] # Calculate between - chain variance B_over_n = np . s...
def close_socket ( self ) : """close socket"""
self . _socket_lock . acquire ( ) self . _force_close_session ( ) self . _socket_lock . release ( )
def _get ( self , * args , ** kwargs ) : """Make a GET request ."""
return self . _request ( requests . get , * args , ** kwargs )
def get_dimord ( measure , calc = None , community = None ) : """Get the dimension order of a network measure . Parameters measure : str Name of funciton in teneto . networkmeasures . calc : str , default = None Calc parameter for the function community : bool , default = None If not null , then commu...
if not calc : calc = '' else : calc = '_' + calc if not community : community = '' else : community = 'community' if 'community' in calc and 'community' in community : community = '' if calc == 'community_avg' or calc == 'community_pairs' : community = '' dimord_dict = { 'temporal_closeness_cent...
def _usernamesToSidObjects ( cls , val , ** kwargs ) : '''converts a list of usernames to sid objects'''
if not val : return val if isinstance ( val , six . string_types ) : val = val . split ( ',' ) sids = [ ] for _user in val : try : sid = win32security . LookupAccountName ( '' , _user ) [ 0 ] sids . append ( sid ) # This needs to be more specific except Exception as e : log ....
def input_loop ( ) : '''wait for user input'''
global operation_takeoff global time_init_operation_takeoff global time_end_operation_takeoff while mpstate . status . exit != True : try : if mpstate . status . exit != True : if mpstate . udp . bound ( ) : line = mpstate . udp . readln ( ) mpstate . udp . writel...
def submit ( self , data , runtime_dir , argv ) : """Run process with SLURM . For details , see : meth : ` ~ resolwe . flow . managers . workload _ connectors . base . BaseConnector . submit ` ."""
limits = data . process . get_resource_limits ( ) logger . debug ( __ ( "Connector '{}' running for Data with id {} ({})." , self . __class__ . __module__ , data . id , repr ( argv ) ) ) # Compute target partition . partition = getattr ( settings , 'FLOW_SLURM_PARTITION_DEFAULT' , None ) if data . process . slug in get...
def rows ( self ) : """Return / yield tuples or lists corresponding to each row to be inserted ."""
with self . input ( ) . open ( 'r' ) as fobj : for line in fobj : yield line . strip ( '\n' ) . split ( '\t' )
def is_sparse_vector ( x ) : """x is a 2D sparse matrix with it ' s first shape equal to 1."""
return sp . issparse ( x ) and len ( x . shape ) == 2 and x . shape [ 0 ] == 1
def max_tab_name_length_changed ( self , settings , key , user_data ) : """If the gconf var max _ tab _ name _ length be changed , this method will be called and will set the tab name length limit ."""
# avoid get window title before terminal is ready if self . guake . notebook_manager . get_current_notebook ( ) . get_current_terminal ( ) is None : return # avoid get window title before terminal is ready if self . guake . notebook_manager . get_current_notebook ( ) . get_current_terminal ( ) . get_window_title ( ...
def _get_cache_key ( self , obj ) : """Derive cache key for given object ."""
if obj is not None : # Make sure that key is REALLY unique . return '{}-{}' . format ( id ( self ) , obj . pk ) return "{}-None" . format ( id ( self ) )
def get_upload_key_metadata ( self ) : """Generate metadata dictionary from a bucket key ."""
key = self . get_upload_key ( ) metadata = key . metadata . copy ( ) # Some http header properties which are stored on the key need to be # copied to the metadata when updating headers = { # http header name , key attribute name 'Cache-Control' : 'cache_control' , 'Content-Type' : 'content_type' , 'Content-Disposition'...
def do_fish_complete ( cli , prog_name ) : """Do the fish completion Parameters cli : click . Command The main click Command of the program prog _ name : str The program name on the command line Returns bool True if the completion was successful , False otherwise"""
commandline = os . environ [ 'COMMANDLINE' ] args = split_args ( commandline ) [ 1 : ] if args and not commandline . endswith ( ' ' ) : incomplete = args [ - 1 ] args = args [ : - 1 ] else : incomplete = '' for item , help in get_choices ( cli , prog_name , args , incomplete ) : if help : echo (...
def find_existing_record ( env , zone_id , dns_name , check_key = None , check_value = None ) : """Check if a specific DNS record exists . Args : env ( str ) : Deployment environment . zone _ id ( str ) : Route53 zone id . dns _ name ( str ) : FQDN of application ' s dns entry to add / update . check _ ke...
client = boto3 . Session ( profile_name = env ) . client ( 'route53' ) pager = client . get_paginator ( 'list_resource_record_sets' ) existingrecord = None for rset in pager . paginate ( HostedZoneId = zone_id ) : for record in rset [ 'ResourceRecordSets' ] : if check_key : if record [ 'Name' ] ...
def get_uniformly_controlled_rotation_matrix ( k ) : """Returns the matrix represented by : math : ` M _ { ij } ` in arXiv : quant - ph / 0407010. This matrix converts the angles of : math : ` k ` - fold uniformly controlled rotations to the angles of the efficient gate decomposition . : param int k : number ...
M = np . full ( ( 2 ** k , 2 ** k ) , 2 ** - k ) for i in range ( 2 ** k ) : g_i = i ^ ( i >> 1 ) # Gray code for i for j in range ( 2 ** k ) : M [ i , j ] *= ( - 1 ) ** ( bin ( j & g_i ) . count ( "1" ) ) return M
def get_vars_in_expression ( source ) : '''Get list of variable names in a python expression .'''
import compiler from compiler . ast import Node # @ brief Internal recursive function . # @ param node An AST parse Node . # @ param var _ list Input list of variables . # @ return An updated list of variables . def get_vars_body ( node , var_list = [ ] ) : if isinstance ( node , Node ) : if node . __class_...
def Builder ( ** kw ) : """A factory for builder objects ."""
composite = None if 'generator' in kw : if 'action' in kw : raise UserError ( "You must not specify both an action and a generator." ) kw [ 'action' ] = SCons . Action . CommandGeneratorAction ( kw [ 'generator' ] , { } ) del kw [ 'generator' ] elif 'action' in kw : source_ext_match = kw . get (...
def download ( branch = None , build = True , installdir = "MalmoPlatform" ) : """Download Malmo from github and build ( by default ) the Minecraft Mod . Example usage : import malmoenv . bootstrap ; malmoenv . bootstrap . download ( ) Args : branch : optional branch to clone . TODO Default is release version...
if branch is None : branch = malmo_version subprocess . check_call ( [ "git" , "clone" , "-b" , branch , "https://github.com/Microsoft/malmo.git" , installdir ] ) return setup ( build = build , installdir = installdir )
def get_port_channel_detail_output_lacp_admin_key ( self , ** kwargs ) : """Auto Generated Code"""
config = ET . Element ( "config" ) get_port_channel_detail = ET . Element ( "get_port_channel_detail" ) config = get_port_channel_detail output = ET . SubElement ( get_port_channel_detail , "output" ) lacp = ET . SubElement ( output , "lacp" ) admin_key = ET . SubElement ( lacp , "admin-key" ) admin_key . text = kwargs...
def _check_rows ( rows , check , in_range = True , return_test = 'any' ) : """Check all rows to be in / out of a certain range and provide testing on return values based on provided conditions Parameters rows : pd . DataFrame data rows check : dict dictionary with possible values of ' up ' , ' lo ' , an...
valid_checks = set ( [ 'up' , 'lo' , 'year' ] ) if not set ( check . keys ( ) ) . issubset ( valid_checks ) : msg = 'Unknown checking type: {}' raise ValueError ( msg . format ( check . keys ( ) - valid_checks ) ) where_idx = set ( rows . index [ rows [ 'year' ] == check [ 'year' ] ] ) if 'year' in check else s...
def handler_view ( self , request , resource_name , ids = None ) : """Handler for resources . . . versionadded : : 0.5.7 Content - Type check : return django . http . HttpResponse"""
signal_request . send ( sender = self , request = request ) time_start = time . time ( ) self . update_urls ( request , resource_name = resource_name , ids = ids ) resource = self . resource_map [ resource_name ] allowed_http_methods = resource . Meta . allowed_methods if request . method not in allowed_http_methods : ...
def list_tar ( archive , compression , cmd , verbosity , interactive ) : """List a TAR archive ."""
cmdlist = [ cmd , '--list' ] add_tar_opts ( cmdlist , compression , verbosity ) cmdlist . extend ( [ "--file" , archive ] ) return cmdlist
def __initialize_model ( self ) : """Initializes the Model ."""
LOGGER . debug ( "> Initializing model." ) self . beginResetModel ( ) self . root_node = umbra . ui . nodes . DefaultNode ( name = "InvisibleRootNode" ) self . __default_project_node = ProjectNode ( name = self . __default_project , parent = self . root_node , node_flags = int ( Qt . ItemIsEnabled ) , attributes_flags ...
def get_param ( self , name ) : """Get a WinDivert parameter . See pydivert . Param for the list of parameters . The remapped function is WinDivertGetParam : : BOOL WinDivertGetParam ( _ _ in HANDLE handle , _ _ in WINDIVERT _ PARAM param , _ _ out UINT64 * pValue For more info on the C call visit : htt...
value = c_uint64 ( 0 ) windivert_dll . WinDivertGetParam ( self . _handle , name , byref ( value ) ) return value . value
def mkdir ( self , path , mode = o777 ) : """Create a folder ( directory ) named ` ` path ` ` with numeric mode ` ` mode ` ` . The default mode is 0777 ( octal ) . On some systems , mode is ignored . Where it is used , the current umask value is first masked out . : param str path : name of the folder to crea...
path = self . _adjust_cwd ( path ) self . _log ( DEBUG , "mkdir({!r}, {!r})" . format ( path , mode ) ) attr = SFTPAttributes ( ) attr . st_mode = mode self . _request ( CMD_MKDIR , path , attr )
def traced_function ( func = None , name = None , on_start = None , require_active_trace = False ) : """A decorator that enables tracing of the wrapped function or Tornado co - routine provided there is a parent span already established . . . code - block : : python @ traced _ function def my _ function1 ( ...
if func is None : return functools . partial ( traced_function , name = name , on_start = on_start , require_active_trace = require_active_trace ) if name : operation_name = name else : operation_name = func . __name__ @ functools . wraps ( func ) def decorator ( * args , ** kwargs ) : parent_span = get...
def uncomment ( name , regex , char = '#' , backup = '.bak' ) : '''Uncomment specified commented lines in a file name The full path to the file to be edited regex A regular expression used to find the lines that are to be uncommented . This regex should not include the comment character . A leading ` ` ^ ...
name = os . path . expanduser ( name ) ret = { 'name' : name , 'changes' : { } , 'result' : False , 'comment' : '' } if not name : return _error ( ret , 'Must provide name to file.uncomment' ) check_res , check_msg = _check_file ( name ) if not check_res : return _error ( ret , check_msg ) # Make sure the patte...
def split_scoped_hparams ( scopes , merged_hparams ) : """Split single HParams with scoped keys into multiple ."""
split_values = { scope : { } for scope in scopes } merged_values = merged_hparams . values ( ) for scoped_key , value in six . iteritems ( merged_values ) : scope = scoped_key . split ( "." ) [ 0 ] key = scoped_key [ len ( scope ) + 1 : ] split_values [ scope ] [ key ] = value return [ hparam . HParams ( **...
def _get_object_as_soft ( self ) : """Get the object as SOFT formated string ."""
soft = [ "^%s = %s" % ( self . geotype , self . name ) , self . _get_metadata_as_string ( ) , self . _get_columns_as_string ( ) , self . _get_table_as_string ( ) ] return "\n" . join ( soft )
def construct ( self , response_args , request , ** kwargs ) : """Construct the response : param response _ args : response arguments : param request : The parsed request , a self . request _ cls class instance : param kwargs : Extra keyword arguments : return : An instance of the self . response _ cls clas...
response_args = self . do_pre_construct ( response_args , request , ** kwargs ) # logger . debug ( " kwargs : % s " % sanitize ( kwargs ) ) response = self . response_cls ( ** response_args ) return self . do_post_construct ( response , request , ** kwargs )
def extract_row ( self , row ) : """get row number ' row '"""
new_row = [ ] for col in range ( self . get_grid_width ( ) ) : new_row . append ( self . get_tile ( row , col ) ) return new_row