signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def cc ( project , detect_project = False ) : """Return a clang that hides CFLAGS and LDFLAGS . This will generate a wrapper script in the current directory and return a complete plumbum command to it . Args : cflags : The CFLAGS we want to hide . ldflags : The LDFLAGS we want to hide . func ( optional ...
from benchbuild . utils import cmd cc_name = str ( CFG [ "compiler" ] [ "c" ] ) wrap_cc ( cc_name , compiler ( cc_name ) , project , detect_project = detect_project ) return cmd [ "./{}" . format ( cc_name ) ]
def records ( self , sc , group_by = 'greedy' , limit = None , sample = 1 , seed = 42 , decode = None , summaries = None ) : """Retrieve the elements of a Dataset : param sc : a SparkContext object : param group _ by : specifies a partition strategy for the objects : param limit : maximum number of objects to...
decode = decode or message_parser . parse_heka_message summaries = summaries or self . summaries ( sc , limit ) # Calculate the sample if summaries is not empty and limit is not set if summaries and limit is None and sample != 1 : if sample < 0 or sample > 1 : raise ValueError ( 'sample must be between 0 an...
def intervalSum ( self , a , b ) : """: param int a b : with 1 < = a < = b : returns : t [ a ] + . . . + t [ b ]"""
return self . prefixSum ( b ) - self . prefixSum ( a - 1 )
def mapCellsToPoints ( self ) : """Transform cell data ( i . e . , data specified per cell ) into point data ( i . e . , data specified at cell points ) . The method of transformation is based on averaging the data values of all cells using a particular point ."""
c2p = vtk . vtkCellDataToPointData ( ) c2p . SetInputData ( self . polydata ( False ) ) c2p . Update ( ) return self . updateMesh ( c2p . GetOutput ( ) )
def _make_digest ( k , ** kwargs ) : """Creates a digest suitable for use within an : class : ` phyles . FSCache ` object from the key object ` k ` . > > > adict = { ' a ' : { ' b ' : 1 } , ' f ' : [ ] } > > > make _ digest ( adict ) ' a2VKynHgDrUIm17r6BQ5QcA5XVmqpNBmiKbZ9kTu0A '"""
result = list ( ) result_dict = _make_digest_dict ( k , ** kwargs ) if result_dict is None : return 'default' else : for ( key , h ) in sorted ( result_dict . items ( ) ) : result . append ( '{}_{}' . format ( key , h ) ) return '.' . join ( result )
def get_signatures_with_results ( vcs ) : """Returns the list of signatures for which test results are saved . Args : vcs ( easyci . vcs . base . Vcs ) Returns : List [ str ]"""
results_dir = os . path . join ( vcs . private_dir ( ) , 'results' ) if not os . path . exists ( results_dir ) : return [ ] rel_paths = os . listdir ( results_dir ) return [ p for p in rel_paths if os . path . isdir ( os . path . join ( results_dir , p ) ) ]
def publish ( self ) : """Publishes the object . The decorator ` assert _ draft ` makes sure that you cannot publish a published object . : param self : The object to tbe published . : return : The published object ."""
if self . is_draft : # If the object has previously been linked then patch the # placeholder data and remove the previously linked object . # Otherwise set the published date . if self . publishing_linked : self . patch_placeholders ( ) # Unlink draft and published copies then delete published . ...
def detect_format ( self , candidates ) : """Detects the format of the fileset from a list of possible candidates . If multiple candidates match the potential files , e . g . NiFTI - X ( see dcm2niix ) and NiFTI , then the first matching candidate is selected . If a ' format _ name ' was specified when the ...
if self . _format is not None : raise ArcanaFileFormatError ( "Format has already been set for {}" . format ( self ) ) matches = [ c for c in candidates if c . matches ( self ) ] if not matches : raise ArcanaFileFormatError ( "None of the candidate file formats ({}) match {}" . format ( ', ' . join ( str ( c ) ...
def decode_offset_response ( cls , response ) : """Decode OffsetResponse into OffsetResponsePayloads Arguments : response : OffsetResponse Returns : list of OffsetResponsePayloads"""
return [ kafka . structs . OffsetResponsePayload ( topic , partition , error , tuple ( offsets ) ) for topic , partitions in response . topics for partition , error , offsets in partitions ]
def watch_prefix ( self , key_prefix , ** kwargs ) : """The same as ` ` watch ` ` , but watches a range of keys with a prefix ."""
kwargs [ 'range_end' ] = _increment_last_byte ( key_prefix ) return self . watch ( key_prefix , ** kwargs )
def flash ( self , flash = True ) : """Turn on or off flashing of the device ' s LED for physical identification purposes ."""
if flash : action = canstat . kvLED_ACTION_ALL_LEDS_ON else : action = canstat . kvLED_ACTION_ALL_LEDS_OFF try : kvFlashLeds ( self . _read_handle , action , 30000 ) except ( CANLIBError , NotImplementedError ) as e : log . error ( 'Could not flash LEDs (%s)' , e )
def init ( lang , domain ) : """Initialize translations for a language code ."""
translations_dir = _get_translations_dir ( ) domain = _get_translations_domain ( domain ) pot = os . path . join ( translations_dir , f'{domain}.pot' ) return _run ( f'init -i {pot} -d {translations_dir} -l {lang} --domain={domain}' )
def onBinaryMessage ( self , msg , fromClient ) : data = bytearray ( ) data . extend ( msg ) """self . print _ debug ( " message length : { } " . format ( len ( data ) ) ) self . print _ debug ( " message data : { } " . format ( hexlify ( data ) ) )"""
try : self . queue . put_nowait ( data ) except asyncio . QueueFull : pass
def _assign_numbers ( self ) : """Assign numbers in preparation for validating these receipts . WARNING : Don ' t call the method manually unless you know what you ' re doing !"""
first = self . select_related ( 'point_of_sales' , 'receipt_type' ) . first ( ) next_num = Receipt . objects . fetch_last_receipt_number ( first . point_of_sales , first . receipt_type , ) + 1 for receipt in self . filter ( receipt_number__isnull = True ) : # Atomically update receipt number Receipt . objects . fil...
def non_fluents ( self ) -> Dict [ str , PVariable ] : '''Returns non - fluent pvariables .'''
return { str ( pvar ) : pvar for pvar in self . pvariables if pvar . is_non_fluent ( ) }
def clone ( url , path = None , remove = True ) : """Clone a local repo from that URL to that path If path is not given , then use the git default : same as repo name If path is given and remove is True then the path is removed before cloning Because this is run from a script it is assumed that user should ...
clean = True if path and os . path . isdir ( path ) : if not remove : clean = False else : shutil . rmtree ( path ) if clean : stdout = run ( 'clone %s %s' % ( url , path or '' ) ) into = stdout . splitlines ( ) [ 0 ] . split ( "'" ) [ 1 ] path_to_clone = os . path . realpath ( into ...
def write ( self ) : """This actually runs the qvality program from PATH ."""
outfn = self . create_outfilepath ( self . fn , self . outsuffix ) command = [ 'qvality' ] command . extend ( self . qvalityoptions ) command . extend ( [ self . scores [ 'target' ] [ 'fn' ] , self . scores [ 'decoy' ] [ 'fn' ] , '-o' , outfn ] ) subprocess . call ( command )
def remove_watcher ( self , issue , watcher ) : """Remove a user from an issue ' s watch list . : param issue : ID or key of the issue affected : param watcher : username of the user to remove from the watchers list : rtype : Response"""
url = self . _get_url ( 'issue/' + str ( issue ) + '/watchers' ) params = { 'username' : watcher } result = self . _session . delete ( url , params = params ) return result
def start ( self ) : """Start discovering and listing to connections ."""
if self . _state == CLOSED : raise NSQException ( 'producer already closed' ) if self . is_running : self . logger . warn ( 'producer already started' ) return self . logger . debug ( 'starting producer...' ) self . _state = RUNNING for address in self . nsqd_tcp_addresses : address , port = address . s...
def static_get_type_attr ( t , name ) : """Get a type attribute statically , circumventing the descriptor protocol ."""
for type_ in t . mro ( ) : try : return vars ( type_ ) [ name ] except KeyError : pass raise AttributeError ( name )
def _add_dictlist_to_database_via_load_in_file ( masterListIndex , log , dbTablename , dbSettings , dateModified = False ) : """* load a list of dictionaries into a database table with load data infile * * * Key Arguments : * * - ` ` masterListIndex ` ` - - the index of the sharedList of dictionary lists to pro...
log . debug ( 'starting the ``_add_dictlist_to_database_via_load_in_file`` function' ) global sharedList dictList = sharedList [ masterListIndex ] [ 0 ] count = sharedList [ masterListIndex ] [ 1 ] if count > totalCount : count = totalCount ltotalCount = totalCount # SETUP ALL DATABASE CONNECTIONS dbConn = database...
def find_bounding_indices ( arr , values , axis , from_below = True ) : """Find the indices surrounding the values within arr along axis . Returns a set of above , below , good . Above and below are lists of arrays of indices . These lists are formulated such that they can be used directly to index into a numpy...
# The shape of generated indices is the same as the input , but with the axis of interest # replaced by the number of values to search for . indices_shape = list ( arr . shape ) indices_shape [ axis ] = len ( values ) # Storage for the found indices and the mask for good locations indices = np . empty ( indices_shape ,...
def crt_prf_ftr_tc ( aryMdlRsp , aryTmpExpInf , varNumVol , varTr , varTmpOvsmpl , switchHrfSet , tplPngSize , varPar , dctPrm = None , lgcPrint = True ) : """Create all spatial x feature prf time courses . Parameters aryMdlRsp : 2d numpy array , shape [ n _ x _ pos * n _ y _ pos * n _ sd , n _ cond ] Respons...
# Identify number of unique features vecFeat = np . unique ( aryTmpExpInf [ : , 3 ] ) vecFeat = vecFeat [ np . nonzero ( vecFeat ) [ 0 ] ] # Preallocate the output array aryPrfTc = np . zeros ( ( aryMdlRsp . shape [ 0 ] , 0 , varNumVol ) , dtype = np . float32 ) # Loop over unique features for indFtr , ftr in enumerate...
def xlim_as_gps ( func ) : """Wrap ` ` func ` ` to handle pass limit inputs through ` gwpy . time . to _ gps `"""
@ wraps ( func ) def wrapped_func ( self , left = None , right = None , ** kw ) : if right is None and numpy . iterable ( left ) : left , right = left kw [ 'left' ] = left kw [ 'right' ] = right gpsscale = self . get_xscale ( ) in GPS_SCALES for key in ( 'left' , 'right' ) : if gpssc...
def get_dataset ( self , key , info ) : """Load a dataset"""
logger . debug ( 'Reading %s.' , key . name ) try : variable = self . nc [ info [ 'file_key' ] ] except KeyError : return info . update ( variable . attrs ) variable . attrs = info return variable
def parse_version ( str_ ) : """Parses the program ' s version from a python variable declaration ."""
v = re . findall ( r"\d+.\d+.\d+" , str_ ) if v : return v [ 0 ] else : print ( "cannot parse string {}" . format ( str_ ) ) raise KeyError
def grad_numerical ( self , x , func , epsilon = None ) : """symmetric gradient"""
eps = 1e-8 * ( 1 + abs ( x ) ) if epsilon is None else epsilon grad = np . zeros ( len ( x ) ) ei = np . zeros ( len ( x ) ) # float is 1.6 times faster than int for i in rglen ( x ) : ei [ i ] = eps [ i ] grad [ i ] = ( func ( x + ei ) - func ( x - ei ) ) / ( 2 * eps [ i ] ) ei [ i ] = 0 return grad
async def getFile ( self , file_id ) : """See : https : / / core . telegram . org / bots / api # getfile"""
p = _strip ( locals ( ) ) return await self . _api_request ( 'getFile' , _rectify ( p ) )
def _rebind_variables ( self , new_inputs ) : """Return self . _ expr with all variables rebound to the indices implied by new _ inputs ."""
expr = self . _expr # If we have 11 + variables , some of our variable names may be # substrings of other variable names . For example , we might have x _ 1, # x _ 10 , and x _ 100 . By enumerating in reverse order , we ensure that # every variable name which is a substring of another variable name is # processed after...
def sorted ( self , by , ** kwargs ) : """Sort array by a column . Parameters by : str Name of the columns to sort by ( e . g . ' time ' ) ."""
sort_idc = np . argsort ( self [ by ] , ** kwargs ) return self . __class__ ( self [ sort_idc ] , h5loc = self . h5loc , split_h5 = self . split_h5 , name = self . name )
def copy_fs_if_newer ( src_fs , # type : Union [ FS , Text ] dst_fs , # type : Union [ FS , Text ] walker = None , # type : Optional [ Walker ] on_copy = None , # type : Optional [ _ OnCopy ] workers = 0 , # type : int ) : # type : ( . . . ) - > None """Copy the contents of one filesystem to another , checking time...
return copy_dir_if_newer ( src_fs , "/" , dst_fs , "/" , walker = walker , on_copy = on_copy , workers = workers )
def get_subgraph ( self , starting_node , block_addresses ) : """Get a sub - graph out of a bunch of basic block addresses . : param CFGNode starting _ node : The beginning of the subgraph : param iterable block _ addresses : A collection of block addresses that should be included in the subgraph if there is ...
graph = networkx . DiGraph ( ) if starting_node not in self . graph : raise AngrCFGError ( 'get_subgraph(): the specified "starting_node" %s does not exist in the current CFG.' % starting_node ) addr_set = set ( block_addresses ) graph . add_node ( starting_node ) queue = [ starting_node ] while queue : node = ...
def FanOut ( self , obj , parent = None ) : """Expand values from various attribute types . Strings are returned as is . Dictionaries are returned with a key string , and an expanded set of values . Other iterables are expanded until they flatten out . Other items are returned in string format . Args : ...
# Catch cases where RDFs are iterable but return themselves . if parent and obj == parent : results = [ utils . SmartUnicode ( obj ) . strip ( ) ] elif isinstance ( obj , ( string_types , rdf_structs . EnumNamedValue ) ) : results = [ utils . SmartUnicode ( obj ) . strip ( ) ] elif isinstance ( obj , rdf_protod...
def append ( self , value ) : """Add an item to the end of the list ."""
return super ( Collection , self ) . append ( self . _ensure_value_is_valid ( value ) )
def is_type ( type_ , * p ) : """True if all args have the same type"""
try : for i in p : if i . type_ != type_ : return False return True except : pass return False
def edges_between_two_vertices ( self , vertex1 , vertex2 , keys = False ) : """Iterates over edges between two supplied vertices in current : class : ` BreakpointGraph ` Proxies a call to : meth : ` Breakpoint . _ Breakpoint _ _ edges _ between _ two _ vertices ` method . : param vertex1 : a first vertex out o...
for entry in self . __edges_between_two_vertices ( vertex1 = vertex1 , vertex2 = vertex2 , keys = keys ) : yield entry
def run ( ) : """Installs required development dependencies . Uses git to checkout other modularcrypto repos for more accurate coverage data ."""
deps_dir = os . path . join ( build_root , 'modularcrypto-deps' ) if os . path . exists ( deps_dir ) : shutil . rmtree ( deps_dir , ignore_errors = True ) os . mkdir ( deps_dir ) try : print ( "Staging ci dependencies" ) _stage_requirements ( deps_dir , os . path . join ( package_root , 'requires' , 'ci' ) ...
def importGurobiSolution ( self , grbmodel ) : """Import the solution from a gurobipy . Model object . Args : grbmodel : A : class : ` gurobipy . Model ` object with the model solved ."""
self . eval ( '' . join ( 'let {} := {};' . format ( var . VarName , var . X ) for var in grbmodel . getVars ( ) if '$' not in var . VarName ) )
def i4_sobol_generate ( dim_num , n , skip = 1 ) : """i4 _ sobol _ generate generates a Sobol dataset . Parameters : Input , integer dim _ num , the spatial dimension . Input , integer N , the number of points to generate . Input , integer SKIP , the number of initial points to skip . Output , real R ( M ...
r = np . full ( ( n , dim_num ) , np . nan ) for j in range ( n ) : seed = j + skip r [ j , 0 : dim_num ] , next_seed = i4_sobol ( dim_num , seed ) return r
def segments ( self ) : """Return a list of ordered tuple objects , representing contiguous occupied data addresses . Each tuple has a length of two and follows the semantics of the range and xrange objects . The second entry of the tuple is always an integer greater than the first entry ."""
addresses = self . addresses ( ) if not addresses : return [ ] elif len ( addresses ) == 1 : return ( [ ( addresses [ 0 ] , addresses [ 0 ] + 1 ) ] ) adjacent_differences = [ ( b - a ) for ( a , b ) in zip ( addresses [ : - 1 ] , addresses [ 1 : ] ) ] breaks = [ i for ( i , x ) in enumerate ( adjacent_differenc...
def server_by_name ( self , name ) : '''Find a server by its name'''
return self . server_show_libcloud ( self . server_list ( ) . get ( name , { } ) . get ( 'id' , '' ) )
def remove_users ( self , user_ids , nid = None ) : """Remove users from a network ` nid ` : type user _ ids : list of str : param user _ ids : a list of user ids . These are the same ids that are returned by get _ all _ users . : type nid : str : param nid : This is the ID of the network to remove studen...
r = self . request ( method = "network.update" , data = { "remove_users" : user_ids } , nid = nid , nid_key = "id" ) return self . _handle_error ( r , "Could not remove users." )
def ReadVarInt ( self , max = sys . maxsize ) : """Read a variable length integer from the stream . The NEO network protocol supports encoded storage for space saving . See : http : / / docs . neo . org / en - us / node / network - protocol . html # convention Args : max ( int ) : ( Optional ) maximum number ...
fb = self . ReadByte ( ) if fb is 0 : return fb value = 0 if hex ( fb ) == '0xfd' : value = self . ReadUInt16 ( ) elif hex ( fb ) == '0xfe' : value = self . ReadUInt32 ( ) elif hex ( fb ) == '0xff' : value = self . ReadUInt64 ( ) else : value = fb if value > max : raise Exception ( "Invalid form...
def _find_matching_collections_externally ( collections , record ) : """Find matching collections with percolator engine . : param collections : set of collections where search : param record : record to match"""
index , doc_type = RecordIndexer ( ) . record_to_index ( record ) body = { "doc" : record . dumps ( ) } results = current_search_client . percolate ( index = index , doc_type = doc_type , allow_no_indices = True , ignore_unavailable = True , body = body ) prefix_len = len ( 'collection-' ) for match in results [ 'match...
def float ( self , item , default = None ) : """Return value of key as a float : param item : key of value to transform : param default : value to return if item does not exist : return : float of value"""
try : item = self . __getattr__ ( item ) except AttributeError as err : if default is not None : return default raise err return float ( item )
def find_binutils_libs ( self , libdir , lib_ext ) : """Find Binutils libraries ."""
bfd_expr = re . compile ( "(lib(?:bfd)|(?:opcodes))(.*?)\%s" % lib_ext ) libs = { } for root , dirs , files in os . walk ( libdir ) : for f in files : m = bfd_expr . search ( f ) if m : lib , version = m . groups ( ) fp = os . path . join ( root , f ) if version i...
def insert_loudest_triggers_option_group ( parser , coinc_options = True ) : """Add options to the optparser object for selecting templates in bins . Parameters parser : object OptionParser instance ."""
opt_group = insert_bank_bins_option_group ( parser ) opt_group . title = "Options for finding loudest triggers." if coinc_options : opt_group . add_argument ( "--statmap-file" , default = None , help = "HDF format clustered coincident trigger " "result file." ) opt_group . add_argument ( "--statmap-group" , def...
def cpp_checker ( code , working_directory ) : """Return checker ."""
return gcc_checker ( code , '.cpp' , [ os . getenv ( 'CXX' , 'g++' ) , '-std=c++0x' ] + INCLUDE_FLAGS , working_directory = working_directory )
def validate_book ( body ) : '''This does not only accept / refuse a book . It also returns an ENHANCED version of body , with ( mostly fts - related ) additional fields . This function is idempotent .'''
if '_language' not in body : raise ValueError ( 'language needed' ) if len ( body [ '_language' ] ) > 2 : raise ValueError ( 'invalid language: %s' % body [ '_language' ] ) # remove old _ text _ * fields for k in body . keys ( ) : if k . startswith ( '_text' ) : del ( body [ k ] ) allfields = collec...
def logger ( self ) : """The logger for this class ."""
# This is internal / CPython only / etc # It ' s also astonishingly faster than alternatives . frame = sys . _getframe ( 1 ) file_name = frame . f_code . co_filename module_name = _get_module ( file_name ) return logging . getLogger ( module_name )
def modname_source_to_target ( self , spec , modname , source ) : """Create a target file name from the input module name and its source file name . The result should be a path relative to the build _ dir , and this is derived directly from the modname with NO implicit convers of path separators ( i . e . ' /...
loaderplugin_registry = spec . get ( CALMJS_LOADERPLUGIN_REGISTRY ) if '!' in modname and loaderplugin_registry : handler = loaderplugin_registry . get ( modname ) if handler : return handler . modname_source_to_target ( self , spec , modname , source ) if ( source . endswith ( self . filename_suffix ) ...
def compile ( self , compass ) : """Calls the compass script specified in the compass extension with the paths provided by the config . rb ."""
try : output = subprocess . check_output ( [ compass . compass_path , 'compile' , '-q' ] , cwd = self . base_dir ) os . utime ( self . dest , None ) compass . log . debug ( output ) except OSError , e : if e . errno == errno . ENOENT : compass . log . error ( "Compass could not be found in the P...
def _compile_operation_rule ( self , rule , left , right , result_class ) : """Compile given operation rule , when possible for given compination of operation operands ."""
# Make sure variables always have constant with correct datatype on the # opposite side of operation . if isinstance ( left , VariableRule ) and isinstance ( right , ( ConstantRule , ListRule ) ) : return self . _cor_compile ( rule , left , right , result_class , clean_variable ( left . value ) , self . compilation...
def get_token_async ( self , refresh = False ) : """Get an authentication token . The token is cached in memcache , keyed by the scopes argument . Uses a random token expiration headroom value generated in the constructor to eliminate a burst of GET _ ACCESS _ TOKEN API requests . Args : refresh : If True...
key = '%s,%s' % ( self . service_account_id , ',' . join ( self . scopes ) ) ts = yield _AE_TokenStorage_ . get_by_id_async ( key , use_cache = True , use_memcache = self . retry_params . memcache_access_token , use_datastore = self . retry_params . save_access_token ) if refresh or ts is None or ts . expires < ( time ...
def _sample_points ( X , centers , oversampling_factor , random_state ) : r"""Sample points independently with probability . . math : : p _ x = \ frac { \ ell \ cdot d ^ 2 ( x , \ mathcal { C } ) } { \ phi _ X ( \ mathcal { C } ) }"""
# re - implement evaluate _ cost here , to avoid redundant computation distances = pairwise_distances ( X , centers ) . min ( 1 ) ** 2 denom = distances . sum ( ) p = oversampling_factor * distances / denom draws = random_state . uniform ( size = len ( p ) , chunks = p . chunks ) picked = p > draws new_idxs , = da . wh...
def new ( cls ) -> 'Generator' : """Creates and returns random generator point that satisfy BLS algorithm requirements . : return : BLS generator"""
logger = logging . getLogger ( __name__ ) logger . debug ( "Generator::new: >>>" ) c_instance = c_void_p ( ) do_call ( cls . new_handler , byref ( c_instance ) ) res = cls ( c_instance ) logger . debug ( "Generator::new: <<< res: %r" , res ) return res
def set_mode_flag ( self , flag , enable ) : '''Enables / disables MAV _ MODE _ FLAG @ param flag The mode flag , see MAV _ MODE _ FLAG enum @ param enable Enable the flag , ( True / False )'''
if self . mavlink10 ( ) : mode = self . base_mode if ( enable == True ) : mode = mode | flag elif ( enable == False ) : mode = mode & ~ flag self . mav . command_long_send ( self . target_system , self . target_component , mavlink . MAV_CMD_DO_SET_MODE , 0 , mode , 0 , 0 , 0 , 0 , 0 , 0 ...
def state_to_modelparams ( self , state ) : """Converts a QuTiP - represented state into a model parameter vector . : param qutip . Qobj state : State to be converted . : rtype : : class : ` np . ndarray ` : return : The representation of the given state in this basis , as a vector of real parameters ."""
basis = self . flat ( ) data = state . data . todense ( ) . view ( np . ndarray ) . flatten ( ) # NB : assumes Hermitian state and basis ! return np . real ( np . dot ( basis . conj ( ) , data ) )
def stmt_type ( obj , mk = True ) : """Return standardized , backwards compatible object type String . This is a temporary solution to make sure type comparisons and matches keys of Statements and related classes are backwards compatible ."""
if isinstance ( obj , Statement ) and mk : return type ( obj ) else : return type ( obj ) . __name__
def apply_patch ( self , patch ) : """Applies given patch . : param patch : Patch . : type patch : Patch : return : Method success . : rtype : bool"""
history_file = File ( self . __history_file ) patches_history = history_file . cache ( ) and [ line . strip ( ) for line in history_file . content ] or [ ] if patch . uid not in patches_history : LOGGER . debug ( "> Applying '{0}' patch!" . format ( patch . name ) ) if patch . apply ( ) : history_file ....
def _collect_settings ( self , apps ) : """Iterate over given apps or INSTALLED _ APPS and collect the content of each ' s settings file , which is expected to be in JSON format ."""
contents = { } if apps : for app in apps : if app not in settings . INSTALLED_APPS : raise CommandError ( "Application '{0}' not in settings.INSTALLED_APPS" . format ( app ) ) else : apps = settings . INSTALLED_APPS for app in apps : module = import_module ( app ) for module_dir in m...
def do_delete ( endpoint , access_token ) : '''Do an HTTP GET request and return JSON . Args : endpoint ( str ) : Azure Resource Manager management endpoint . access _ token ( str ) : A valid Azure authentication token . Returns : HTTP response .'''
headers = { "Authorization" : 'Bearer ' + access_token } headers [ 'User-Agent' ] = get_user_agent ( ) return requests . delete ( endpoint , headers = headers )
def gauss_box_model ( x , amplitude = 1.0 , mean = 0.0 , stddev = 1.0 , hpix = 0.5 ) : """Integrate a Gaussian profile ."""
z = ( x - mean ) / stddev z2 = z + hpix / stddev z1 = z - hpix / stddev return amplitude * ( norm . cdf ( z2 ) - norm . cdf ( z1 ) )
def check_db_for_missing_notifications ( ) : """Check the database for missing notifications ."""
aws_access_key_id = os . environ [ 'aws_access_key_id' ] aws_secret_access_key = os . environ [ 'aws_secret_access_key' ] if config . getboolean ( 'Shell Parameters' , 'launch_in_sandbox_mode' ) : conn = MTurkConnection ( aws_access_key_id = aws_access_key_id , aws_secret_access_key = aws_secret_access_key , host =...
def configure_default_logger ( self , log_freq = 'midnight' , log_total = 30 , log_level = 'INFO' , log_format = ReportingFormats . DEFAULT . value , custom_args = '' ) : """default logger that every Prosper script should use ! ! Args : log _ freq ( str ) : TimedRotatingFileHandle _ str - - https : / / docs . p...
# # Override defaults if required # # log_freq = self . config . get_option ( 'LOGGING' , 'log_freq' , None , log_freq ) log_total = self . config . get_option ( 'LOGGING' , 'log_total' , None , log_total ) # # Set up log file handles / name # # log_filename = self . log_name + '.log' log_abspath = path . join ( self ....
def get_resources ( self , ids , cache = True ) : """Retrieve ecs resources for serverless policies or related resources Requires arns in new format . https : / / docs . aws . amazon . com / AmazonECS / latest / userguide / ecs - resource - ids . html"""
cluster_resources = { } for i in ids : _ , ident = i . rsplit ( ':' , 1 ) parts = ident . split ( '/' , 2 ) if len ( parts ) != 3 : raise PolicyExecutionError ( "New format ecs arn required" ) cluster_resources . setdefault ( parts [ 1 ] , [ ] ) . append ( parts [ 2 ] ) results = [ ] client = lo...
def reshape ( attrs , inputs , proto_obj ) : """Reshape the given array by the shape attribute ."""
if len ( inputs ) == 1 : return 'reshape' , attrs , inputs [ 0 ] reshape_shape = list ( proto_obj . _params [ inputs [ 1 ] . name ] . asnumpy ( ) ) reshape_shape = [ int ( i ) for i in reshape_shape ] new_attrs = { 'shape' : reshape_shape } return 'reshape' , new_attrs , inputs [ : 1 ]
def libvlc_media_list_event_manager ( p_ml ) : '''Get libvlc _ event _ manager from this media list instance . The p _ event _ manager is immutable , so you don ' t have to hold the lock . @ param p _ ml : a media list instance . @ return : libvlc _ event _ manager .'''
f = _Cfunctions . get ( 'libvlc_media_list_event_manager' , None ) or _Cfunction ( 'libvlc_media_list_event_manager' , ( ( 1 , ) , ) , class_result ( EventManager ) , ctypes . c_void_p , MediaList ) return f ( p_ml )
def decode ( self ) : """Decompress compressed UTF16 value ."""
hi = self . enc_byte ( ) flagbits = 0 while self . encpos < len ( self . encdata ) : if flagbits == 0 : flags = self . enc_byte ( ) flagbits = 8 flagbits -= 2 t = ( flags >> flagbits ) & 3 if t == 0 : self . put ( self . enc_byte ( ) , 0 ) elif t == 1 : self . put ( s...
def print_input_output ( opts ) : """Prints the input and output directories to the console . : param opts : namespace that contains printable ' input ' and ' output ' fields ."""
if opts . is_dir : print ( "Root input directory:\t" + opts . input ) print ( "Outputting to:\t\t" + opts . output + "\n" ) else : print ( "Input file:\t\t" + opts . input ) print ( "Outputting to:\t\t" + opts . output + opts . input + "\n" )
def check_wide_data_for_blank_choices ( choice_col , wide_data ) : """Checks ` wide _ data ` for null values in the choice column , and raises a helpful ValueError if null values are found . Parameters choice _ col : str . Denotes the column in ` wide _ data ` that is used to record each observation ' s c...
if wide_data [ choice_col ] . isnull ( ) . any ( ) : msg_1 = "One or more of the values in wide_data[choice_col] is null." msg_2 = " Remove null values in the choice column or fill them in." raise ValueError ( msg_1 + msg_2 ) return None
def _set_version ( self , v , load = False ) : """Setter method for version , mapped from YANG variable / interface / port _ channel / hide _ vrrp _ holer / vrrp / version ( uint8) If this variable is read - only ( config : false ) in the source YANG file , then _ set _ version is considered as a private meth...
parent = getattr ( self , "_parent" , None ) if parent is not None and load is False : raise AttributeError ( "Cannot set keys directly when" + " within an instantiated list" ) if hasattr ( v , "_utype" ) : v = v . _utype ( v ) try : t = YANGDynClass ( v , base = RestrictedClassType ( base_type = Restricted...
def patch ( self , item , byte_order = BYTEORDER ) : """Returns a memory : class : ` Patch ` for the given * item * that shall be patched in the ` data source ` . : param item : item to patch . : param byte _ order : encoding : class : ` Byteorder ` for the item . : type byte _ order : : class : ` Byteorder...
# Re - index the data object self . index_data ( ) if is_container ( item ) : length = item . container_size ( ) if length [ 1 ] is not 0 : # Incomplete container raise ContainerLengthError ( item , length ) field = item . first_field ( ) if field is None : # Empty container ? return Non...
def issue_date ( self ) : """Date when the DOI was issued ( : class : ` datetime . datetime . Datetime ` ) ."""
dates = _pluralize ( self . _r [ 'dates' ] , 'date' ) for date in dates : if date [ '@dateType' ] == 'Issued' : return datetime . datetime . strptime ( date [ '#text' ] , '%Y-%m-%d' )
def action ( self , action_id , ** kwargs ) : """Query an action , specify the parameters for the action as keyword parameters . An optional keyword parameter method = ' GET ' ( default ) or method = ' POST ' can be set . The character set encoding of the response can be configured using the encoding keyword parame...
if 'method' in kwargs : method = kwargs [ 'method' ] del kwargs [ 'method' ] else : method = 'GET' if 'encoding' in kwargs : encoding = kwargs [ 'encoding' ] del kwargs [ 'encoding' ] else : encoding = 'utf-8' return self . request ( 'actions/' + action_id , method , kwargs , False , encoding )
def check_and_set_unreachability ( self , hosts , services ) : """Check if all dependencies are down , if yes set this object as unreachable . todo : this function do not care about execution _ failure _ criteria ! : param hosts : hosts objects , used to get object in act _ depend _ of : type hosts : aligna...
parent_is_down = [ ] for ( dep_id , _ , _ , _ ) in self . act_depend_of : if dep_id in hosts : dep = hosts [ dep_id ] else : dep = services [ dep_id ] if dep . state in [ 'd' , 'DOWN' , 'c' , 'CRITICAL' , 'u' , 'UNKNOWN' , 'x' , 'UNREACHABLE' ] : parent_is_down . append ( True ) ...
def split_none ( self ) : "Don ' t split the data and create an empty validation set ."
val = self [ [ ] ] val . ignore_empty = True return self . _split ( self . path , self , val )
def get_timestamp ( str_len = 13 ) : """get timestamp string , length can only between 0 and 16"""
if isinstance ( str_len , integer_types ) and 0 < str_len < 17 : return builtin_str ( time . time ( ) ) . replace ( "." , "" ) [ : str_len ] raise ParamsError ( "timestamp length can only between 0 and 16." )
def pretty_spaces ( level ) : """Return spaces and new line . : type level : int or None : param level : deep level : rtype : unicode : return : string with new line and spaces"""
if level is None : return u'' return ( os . linesep if level >= 0 else u'' ) + ( u' ' * ( INDENT * level ) )
def save_semantic_data_for_state ( state , state_path_full ) : """Saves the semantic data in a separate json file . : param state : The state of which the script file should be saved : param str state _ path _ full : The path to the file system storage location of the state"""
destination_script_file = os . path . join ( state_path_full , SEMANTIC_DATA_FILE ) try : storage_utils . write_dict_to_json ( state . semantic_data , destination_script_file ) except IOError : logger . exception ( "Storing of semantic data for state {0} failed! Destination path: {1}" . format ( state . get_pat...
def _sysfs_attr ( name , value = None , log_lvl = None , log_msg = None ) : '''Simple wrapper with logging around sysfs . attr'''
if isinstance ( name , six . string_types ) : name = [ name ] res = __salt__ [ 'sysfs.attr' ] ( os . path . join ( * name ) , value ) if not res and log_lvl is not None and log_msg is not None : log . log ( LOG [ log_lvl ] , log_msg ) return res
def rotation_df ( ATT ) : '''return the current DCM rotation matrix'''
r = Matrix3 ( ) r . from_euler ( radians ( ATT . Roll ) , radians ( ATT . Pitch ) , radians ( ATT . Yaw ) ) return r
def ssl_required ( allow_non_ssl = False ) : """Views decorated with this will always get redirected to https except when allow _ non _ ssl is set to true ."""
def wrapper ( view_func ) : def _checkssl ( request , * args , ** kwargs ) : # allow _ non _ ssl = True lets non - https requests to come # through to this view ( and hence not redirect ) if hasattr ( settings , 'SSL_ENABLED' ) and settings . SSL_ENABLED and not request . is_secure ( ) and not allow_non...
def create_temporary_table ( self , table_name , custom_sql ) : """Create Temporary table based on sql query . This will be used as a basis for executing expectations . WARNING : this feature is new in v0.4. It hasn ' t been tested in all SQL dialects , and may change based on community feedback . : param cus...
stmt = "CREATE TEMPORARY TABLE {table_name} AS {custom_sql}" . format ( table_name = table_name , custom_sql = custom_sql ) self . engine . execute ( stmt )
def last_first_initial ( self ) : """Return a name in the format of : Lastname , F [ ( Nickname ) ]"""
return ( "{}{} " . format ( self . last_name , ", " + self . first_name [ : 1 ] + "." if self . first_name else "" ) + ( "({}) " . format ( self . nickname ) if self . nickname else "" ) )
def stack_trace ( depth = None ) : """returns a print friendly stack trace at the current frame , without aborting the application . : param depth : The depth of the stack trace . if omitted , the entire stack will be printed . usage : : print stack _ trace ( 10)"""
frames = inspect . stack ( ) [ 2 : ] if depth : frames = frames [ : depth ] result = StringIO ( ) result . write ( "----------------------------------------------------\n" ) for ( frame , file , line , context , code , status ) in frames : result . write ( "In %s from %s\n%s %s" % ( context , file , line , "\n"...
def addDataToQueue ( self , displacement , reset = False ) : """Add the given displacement to the region ' s internal queue . Calls to compute will cause items in the queue to be dequeued in FIFO order . : param displacement : Two floats representing translation vector [ dx , dy ] to be passed to the linked r...
self . queue . appendleft ( { "dataOut" : list ( displacement ) , "reset" : bool ( reset ) } )
def plot_losses ( self , skip_start : int = 0 , skip_end : int = 0 , return_fig : bool = None ) -> Optional [ plt . Figure ] : "Plot training and validation losses ."
fig , ax = plt . subplots ( 1 , 1 ) losses = self . _split_list ( self . losses , skip_start , skip_end ) iterations = self . _split_list ( range_of ( self . losses ) , skip_start , skip_end ) ax . plot ( iterations , losses , label = 'Train' ) val_iter = self . _split_list_val ( np . cumsum ( self . nb_batches ) , ski...
def __grant_generate ( grant , database , user , host = 'localhost' , grant_option = False , escape = True , ssl_option = False ) : '''Validate grants and build the query that could set the given grants Note that this query contains arguments for user and host but not for grants or database .'''
# TODO : Re - order the grant so it is according to the # SHOW GRANTS for xxx @ yyy query ( SELECT comes first , etc ) grant = re . sub ( r'\s*,\s*' , ', ' , grant ) . upper ( ) grant = __grant_normalize ( grant ) db_part = database . rpartition ( '.' ) dbc = db_part [ 0 ] table = db_part [ 2 ] if escape : if dbc !...
def supported_languages ( self , task = None ) : """Languages that are covered by a specific task . Args : task ( string ) : Task name ."""
if task : collection = self . get_collection ( task = task ) return [ isoLangs [ x . id . split ( '.' ) [ 1 ] ] [ "name" ] for x in collection . packages ] else : return [ x . name . split ( ) [ 0 ] for x in self . collections ( ) if Downloader . LANG_PREFIX in x . id ]
def connect ( self , username = None , passcode = None , wait = False , headers = None , ** keyword_headers ) : """Start a connection . : param str username : the username to connect with : param str passcode : the password used to authenticate with : param bool wait : if True , wait for the connection to be ...
cmd = CMD_STOMP headers = utils . merge_headers ( [ headers , keyword_headers ] ) headers [ HDR_ACCEPT_VERSION ] = self . version if self . transport . vhost : headers [ HDR_HOST ] = self . transport . vhost if username is not None : headers [ HDR_LOGIN ] = username if passcode is not None : headers [ HDR_P...
def smart_search ( cls , query_string , search_options = None , extra_query = None ) : """Perform a smart VRF search . Maps to the function : py : func : ` nipap . backend . Nipap . smart _ search _ vrf ` in the backend . Please see the documentation for the backend function for information regarding input ...
if search_options is None : search_options = { } xmlrpc = XMLRPCConnection ( ) try : smart_result = xmlrpc . connection . smart_search_vrf ( { 'query_string' : query_string , 'search_options' : search_options , 'auth' : AuthOptions ( ) . options , 'extra_query' : extra_query } ) except xmlrpclib . Fault as xml_...
def plural ( self , text , count = None ) : """Return the plural of text . If count supplied , then return text if count is one of : 1 , a , an , one , each , every , this , that otherwise return the plural . Whitespace at the start and end is preserved ."""
pre , word , post = self . partition_word ( text ) if not word : return text plural = self . postprocess ( word , self . _pl_special_adjective ( word , count ) or self . _pl_special_verb ( word , count ) or self . _plnoun ( word , count ) , ) return "{}{}{}" . format ( pre , plural , post )
def move ( self , target ) : """Moves this DriveItem to another Folder . Can ' t move between different Drives . : param target : a Folder , Drive item or Item Id string . If it ' s a drive the item will be moved to the root folder . : type target : drive . Folder or DriveItem or str : return : Success / ...
if isinstance ( target , Folder ) : target_id = target . object_id elif isinstance ( target , Drive ) : # we need the root folder id root_folder = target . get_root_folder ( ) if not root_folder : return False target_id = root_folder . object_id elif isinstance ( target , str ) : target_id =...
def _main ( ) : """Some demo ."""
if sys . argv [ 1 : ] == [ "test" ] : for k , v in sorted ( globals ( ) . items ( ) ) : if not k . startswith ( "test_" ) : continue print ( "running: %s()" % k ) v ( ) print ( "ok." ) sys . exit ( ) elif sys . argv [ 1 : ] == [ "debug_shell" ] : debug_shell ( locals ...
def EncodeForCSV ( x ) : "Encodes one value for CSV ."
k = x . encode ( 'utf-8' ) if ',' in k or '"' in k : return '"%s"' % k . replace ( '"' , '""' ) else : return k
def from_string ( cls , s , space ) : """Produce a TopNumber by hashing a string ."""
import hashlib hs = hashlib . sha1 ( s ) . hexdigest ( ) return cls . from_hex ( hs , space )
def children ( self , p_todo , p_only_direct = False ) : """Returns a list of child todos that the given todo ( in ) directly depends on ."""
children = self . _depgraph . outgoing_neighbors ( hash ( p_todo ) , not p_only_direct ) return [ self . _tododict [ child ] for child in children ]
def histogram_phase ( phase_slices , phase , histbins = 200 , show_plot = False ) : """histograms the phase slices such as to build a histogram of the position distribution at each phase value . Parameters phase _ slices : ndarray 2d array containing slices from many oscillations at each phase phase : nda...
counts_array = _np . zeros ( [ len ( phase ) , histbins ] ) histedges = [ phase_slices . min ( ) , phase_slices . max ( ) ] for i , phase_slice in enumerate ( phase_slices ) : # for each value of phase counts , bin_edges = _np . histogram ( phase_slice , bins = histbins , range = histedges ) # histogram the pos...
def resolve_format ( format , path ) : """Looks at a file ' s extension and format ( if any ) and returns format ."""
if format is None : if ( re . match ( r'.+\.(yml|yaml)$' , path ) ) : return 'yaml' elif ( re . match ( r'.+\.tsv$' , path ) ) : return 'tsv' else : return format . lower ( )