signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def ctrl_c ( self , pre_dl = None , post_dl = None ) : """Press Ctrl + C , usually for copy . * * 中文文档 * * 按下 Ctrl + C 组合键 , 通常用于复制 。"""
self . delay ( pre_dl ) self . k . press_key ( self . k . control_key ) self . k . tap_key ( "c" ) self . k . release_key ( self . k . control_key ) self . delay ( post_dl )
def _load ( cls , prefix , user_agent_config_yaml , user_agent_lookup = None ) : # type : ( str , str , Optional [ str ] ) - > str """Load user agent YAML file Args : prefix ( str ) : Text to put at start of user agent user _ agent _ config _ yaml ( str ) : Path to user agent YAML file user _ agent _ lookup...
if not user_agent_config_yaml : user_agent_config_yaml = cls . default_user_agent_config_yaml logger . info ( 'No user agent or user agent config file given. Using default user agent config file: %s.' % user_agent_config_yaml ) if not isfile ( user_agent_config_yaml ) : raise UserAgentError ( "User_agent sh...
def ftr_process ( url = None , content = None , config = None , base_url = None ) : u"""process an URL , or some already fetched content from a given URL . : param url : The URL of article to extract . Can be ` ` None ` ` , but only if you provide both ` ` content ` ` and ` ` config ` ` parameters . : type ...
if url is None and content is None and config is None : raise RuntimeError ( 'At least one of url or the couple content/config ' 'argument must be present.' ) if content is not None and url is None and config is None : raise RuntimeError ( 'Passing content only will not give any result.' ) if content is None : ...
def _delete_dir ( self ) : """Delete old folder if exists before start build"""
if not self . auto and os . path . isdir ( self . meta . build_path + self . prgnam ) : shutil . rmtree ( self . meta . build_path + self . prgnam )
def k ( self , symbol = '' , begin = None , end = None ) : '''读取k线信息 : param symbol : : param begin : : param end : : return : pd . dataFrame or None'''
with self . client . connect ( * self . bestip ) : data = self . client . get_k_data ( symbol , begin , end ) return data
def ReportConfiguration ( self , file ) : """: param file : Destination for report details : return : None"""
global encodingpar print >> file , BuildReportLine ( "FAM FILE" , self . fam_details ) print >> file , BuildReportLine ( "IMPUTE_ARCHIVES" , "%s:%s" % ( str ( self . chroms [ 0 ] ) , self . archives [ 0 ] ) ) idx = 0 for arch in self . archives [ 1 : ] : print >> file , BuildReportLine ( "" , "%s:%s" % ( str ( self...
def setup_method_options ( method , tuning_options ) : """prepare method specific options"""
kwargs = { } # pass size of parameter space as max iterations to methods that support it # it seems not all methods iterpret this value in the same manner maxiter = numpy . prod ( [ len ( v ) for v in tuning_options . tune_params . values ( ) ] ) kwargs [ 'maxiter' ] = maxiter if method in [ "Nelder-Mead" , "Powell" ] ...
def _get_field_schema ( self ) : """Get a list of all of the default fields for this query type . If data is available in the monitor type , a list of field definitions will be returned ahead of the actual data , providing insight into the available fields . If no data is available in a monitor , this will ...
self . update_format ( DetailedFormat ( ) ) for fields in self . execute ( ) : if 'fields' in fields : return fields [ 'fields' ]
def init ( driverName = None , debug = False ) : '''Constructs a new TTS engine instance or reuses the existing instance for the driver name . @ param driverName : Name of the platform specific driver to use . If None , selects the default driver for the operating system . @ type : str @ param debug : Deb...
try : eng = _activeEngines [ driverName ] except KeyError : eng = Engine ( driverName , debug ) _activeEngines [ driverName ] = eng return eng
def star ( n , alpha = 'faced' , center = ( 1 , 1 ) ) : """Create the star points of various design matrices Parameters n : int The number of variables in the design Optional alpha : str Available values are ' faced ' ( default ) , ' orthogonal ' , or ' rotatable ' center : array A 1 - by - 2 array ...
# Star points at the center of each face of the factorial if alpha == 'faced' : a = 1 elif alpha == 'orthogonal' : nc = 2 ** n # factorial points nco = center [ 0 ] # center points to factorial na = 2 * n # axial points nao = center [ 1 ] # center points to axial design # value o...
async def get_authenticated_user ( self , http_client : httpclient . AsyncHTTPClient = None ) -> Dict [ str , Any ] : """Gets the OAuth authorized user and access token . This method should be called from the handler for your OAuth callback URL to complete the registration process . We run the callback with t...
handler = cast ( RequestHandler , self ) request_key = escape . utf8 ( handler . get_argument ( "oauth_token" ) ) oauth_verifier = handler . get_argument ( "oauth_verifier" , None ) request_cookie = handler . get_cookie ( "_oauth_request_token" ) if not request_cookie : raise AuthError ( "Missing OAuth request toke...
def set_cmap_cb ( self , w , index ) : """This callback is invoked when the user selects a new color map from the preferences pane ."""
name = cmap . get_names ( ) [ index ] self . t_ . set ( color_map = name )
def _get_si ( ) : '''Authenticate with vCenter server and return service instance object .'''
url = config . get_cloud_config_value ( 'url' , get_configured_provider ( ) , __opts__ , search_global = False ) username = config . get_cloud_config_value ( 'user' , get_configured_provider ( ) , __opts__ , search_global = False ) password = config . get_cloud_config_value ( 'password' , get_configured_provider ( ) , ...
def score_large_straight_yatzy ( dice : List [ int ] ) -> int : """Large straight scoring according to yatzy rules"""
dice_set = set ( dice ) if _are_two_sets_equal ( { 2 , 3 , 4 , 5 , 6 } , dice_set ) : return sum ( dice ) return 0
def _sheet_meta_from_prompts ( sheets , old_name , name , ct_paleo , ct_chron ) : """Guide the user to create a proper , standardized sheet name : param list sheets : Running list of sheet metadata : param str old _ name : Original sheet name : param str name : Data set name : param int ct _ paleo : Running...
cont = True # Loop until valid sheet name is built , or user gives up while cont : try : pc = input ( "Is this a (p)aleo or (c)hronology sheet?" ) . lower ( ) if pc in ( "p" , "c" , "paleo" , "chron" , "chronology" ) : tt = input ( "Is this a (d)istribution, (e)nsemble, (m)easurement, or...
def start_client ( self , event = None ) : """Negotiate a new SSH2 session as a client . This is the first step after creating a new L { Transport } . A separate thread is created for protocol negotiation . If an event is passed in , this method returns immediately . When negotiation is done ( successful or...
self . active = True if event is not None : # async , return immediately and let the app poll for completion self . completion_event = event self . start ( ) return # synchronous , wait for a result self . completion_event = event = threading . Event ( ) self . start ( ) Random . atfork ( ) while True : ...
def find_include_file ( self , t ) : """Finds the # include file for a given preprocessor tuple ."""
fname = t [ 2 ] for d in self . searchpath [ t [ 1 ] ] : if d == os . curdir : f = fname else : f = os . path . join ( d , fname ) if os . path . isfile ( f ) : return f return None
def _unique_constrains ( cls ) : """Get all ( single column and multi column ) unique constraints"""
unique = [ { c . name for c in u . columns } for u in cls . __table_args__ if isinstance ( u , UniqueConstraint ) ] unique . extend ( { c . name } for c in cls . __table__ . columns if c . unique ) return unique
def validate ( self , value ) : """Validate value . Args : value : model value . Returns : Whether the specified value is valid data type value . Raises : BadValueError : when value is not of self . data _ type type ."""
if value is not None and not isinstance ( value , self . data_type ) : raise datastore_errors . BadValueError ( "Property %s must be convertible to a %s instance (%s)" % ( self . name , self . data_type , value ) ) return super ( JsonProperty , self ) . validate ( value )
def coordinator_dead ( self , error ) : """Mark the current coordinator as dead ."""
if self . coordinator_id is not None : log . warning ( "Marking the coordinator dead (node %s) for group %s: %s." , self . coordinator_id , self . group_id , error ) self . coordinator_id = None
def _formatOntologyTermObject ( self , terms , element_type ) : """Formats the ontology term object for query"""
elementClause = None if not isinstance ( terms , collections . Iterable ) : terms = [ terms ] elements = [ ] for term in terms : if term . term_id : elements . append ( '?{} = <{}> ' . format ( element_type , term . term_id ) ) else : elements . append ( '?{} = <{}> ' . format ( element_type...
def encrypt ( self , k , a , m ) : """Encrypt accoriding to the selected encryption and hashing functions . : param k : Encryption key ( optional ) : param a : Additional Authentication Data : param m : Plaintext Returns a dictionary with the computed data ."""
iv = _randombits ( 96 ) cipher = Cipher ( algorithms . AES ( k ) , modes . GCM ( iv ) , backend = self . backend ) encryptor = cipher . encryptor ( ) encryptor . authenticate_additional_data ( a ) e = encryptor . update ( m ) + encryptor . finalize ( ) return ( iv , e , encryptor . tag )
def addons ( cls , recurse = True ) : """Returns a dictionary containing all the available addons for this mixin class . If the optional recurse flag is set to True , then all the base classes will be searched for the given addon as well . : param recurse | < bool > : return { < str > name : < variant > add...
cls . initAddons ( ) prop = '_{0}__addons' . format ( cls . __name__ ) out = { } # lookup base classes if recurse : for base in cls . __bases__ : if issubclass ( base , AddonManager ) : out . update ( base . addons ( recurse ) ) # always use the highest level for any given key out . update ( get...
async def workerTypeHealth ( self , * args , ** kwargs ) : """Look up the resource health for a workerType Return a view of the health of a given worker type This method gives output : ` ` v1 / health . json # ` ` This method is ` ` experimental ` `"""
return await self . _makeApiCall ( self . funcinfo [ "workerTypeHealth" ] , * args , ** kwargs )
def submit_as_gauge_and_monotonic_count ( self , metric_suffix , metric , scraper_config ) : """submit a kube _ dns metric both as a gauge ( for compatibility ) and as a monotonic _ count"""
metric_name = scraper_config [ 'namespace' ] + metric_suffix for sample in metric . samples : # Explicit shallow copy of the instance tags _tags = list ( scraper_config [ 'custom_tags' ] ) for label_name , label_value in iteritems ( sample [ self . SAMPLE_LABELS ] ) : _tags . append ( '{}:{}' . format (...
def median ( self , * args , ** kwargs ) : '''geo . median ( axis = None , out = None , overwrite _ input = False ) axis : int , optional Axis along which the medians are computed . The default ( axis = None ) is to compute the median along a flattened version of the array . out : ndarray , optional Alter...
return np . ma . median ( self . raster , * args , ** kwargs )
def get_endpoint_map ( self ) : """returns API version and endpoint map"""
log . debug ( "getting end points..." ) cmd , url = DEVICE_URLS [ "get_endpoint_map" ] return self . _exec ( cmd , url )
def to ( self , * args ) : '''get / set the ' device ' to which messages are sent . Valid targets are : string filenames : ' / tmp / test . log ' remote hosts : ' pretoria : 1701' system devices : sys . stdout , sys . stderr special names : ' stdout ' file handles : open ( ' / tmp / test . log ' )'''
if len ( args ) : self . _logFile = args [ 0 ] if self . _logHandle and self . _logHandle != sys . stdout : self . _logHandle . close ( ) # if type ( self . _ logFile ) is types . FileType : if isinstance ( self . _logFile , IOBase ) : self . _logHandle = self . _logFile elif self . ...
def webui_schematics_panels_panel_properties_height ( self , ** kwargs ) : """Auto Generated Code"""
config = ET . Element ( "config" ) webui = ET . SubElement ( config , "webui" , xmlns = "http://tail-f.com/ns/webui" ) schematics = ET . SubElement ( webui , "schematics" ) panels = ET . SubElement ( schematics , "panels" ) panel = ET . SubElement ( panels , "panel" ) name_key = ET . SubElement ( panel , "name" ) name_...
def __add_symbols ( self , cmd ) : """Add all additional defined and undefined symbols ."""
if self . __config . define_symbols : symbols = self . __config . define_symbols cmd . append ( '' . join ( [ ' -D"%s"' % def_symbol for def_symbol in symbols ] ) ) if self . __config . undefine_symbols : un_symbols = self . __config . undefine_symbols cmd . append ( '' . join ( [ ' -U"%s"' % undef_symb...
def make_all_dirs ( path , mode = 0o777 ) : """Ensure local dir , with all its parent dirs , are created . Unlike os . makedirs ( ) , will not fail if the path already exists ."""
# Avoid races inherent to doing this in two steps ( check then create ) . # Python 3 has exist _ ok but the approach below works for Python 2 + 3. # https : / / stackoverflow . com / questions / 600268 / mkdir - p - functionality - in - python try : os . makedirs ( path , mode = mode ) except OSError as e : if ...
def _float ( value ) : """Conversion of state vector field , with automatic unit handling"""
if "[" in value : # There is a unit field value , sep , unit = value . partition ( "[" ) unit = sep + unit # As defined in the CCSDS Orbital Data Message Blue Book , the unit should # be the same as defined in table 3-3 which are for km and km / s for position and # velocity respectively . Thus , th...
def setup ( app ) : '''Required Sphinx extension setup function .'''
app . add_role ( 'bokeh-commit' , bokeh_commit ) app . add_role ( 'bokeh-issue' , bokeh_issue ) app . add_role ( 'bokeh-pull' , bokeh_pull ) app . add_role ( 'bokeh-tree' , bokeh_tree )
def check_service ( self , info ) : """Checks the network for a unique service name , modifying the ServiceInfo passed in if it is not unique ."""
now = current_time_millis ( ) next_time = now i = 0 while i < 3 : for record in self . cache . entries_with_name ( info . type ) : if record . type == _TYPE_PTR and not record . is_expired ( now ) and record . alias == info . name : if ( info . name . find ( '.' ) < 0 ) : info . ...
def unzip ( input_layer , split_dim = 0 , num_splits = 2 ) : """Unzips this Tensor along the split _ dim into num _ splits Equal chunks . Examples : * ` [ 1 , 2 , 3 , 4 ] - > [ 1 , 3 ] , [ 2 , 4 ] ` * ` [ [ 1 , 1 ] , [ 2 , 2 ] , [ 3 , 3 ] , [ 4 , 4 ] ] - > [ [ 1 , 1 ] , [ 3 , 3 ] ] , [ [ 2 , 2 ] , [ 4 , 4 ] ]...
shape = input_layer . shape _check_split_dims ( num_splits , split_dim , shape ) splits = functions . unzip ( input_layer , split_dim , shape [ split_dim ] , num_splits ) return input_layer . with_sequence ( splits )
def unapostrophe ( text ) : """Strip apostrophe and ' s ' from the end of a string ."""
text = re . sub ( r'[%s]s?$' % '' . join ( APOSTROPHES ) , '' , text ) return text
def clean_dictkeys ( ddict , exclusions = None ) : """Exclude chars in dict keys and return a clean dictionary ."""
exclusions = [ ] if exclusions is None else exclusions if not isinstance ( ddict , dict ) : return { } for key in list ( ddict . keys ( ) ) : if [ incl for incl in exclusions if incl in key ] : data = ddict . pop ( key ) clean_key = exclude_chars ( key , exclusions ) if clean_key : ...
def scp_put ( files , remote_path = None , recursive = False , preserve_times = False , saltenv = 'base' , ** kwargs ) : '''. . versionadded : : 2019.2.0 Transfer files and directories to remote network device . . . note : : This function is only available only when the underlying library ` scp < https : / ...
conn_args = netmiko_args ( ** kwargs ) conn_args [ 'hostname' ] = conn_args [ 'host' ] kwargs . update ( conn_args ) return __salt__ [ 'scp.put' ] ( files , remote_path = remote_path , recursive = recursive , preserve_times = preserve_times , saltenv = saltenv , ** kwargs )
def bind ( self , sock ) : """Wrap and return the given socket ."""
if self . context is None : self . context = self . get_context ( ) conn = SSLConnection ( self . context , sock ) self . _environ = self . get_environ ( ) return conn
def connect_put_node_proxy_with_path ( self , name , path , ** kwargs ) : """connect PUT requests to proxy of Node This method makes a synchronous HTTP request by default . To make an asynchronous HTTP request , please pass async _ req = True > > > thread = api . connect _ put _ node _ proxy _ with _ path ( n...
kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'async_req' ) : return self . connect_put_node_proxy_with_path_with_http_info ( name , path , ** kwargs ) else : ( data ) = self . connect_put_node_proxy_with_path_with_http_info ( name , path , ** kwargs ) return data
def get_tdms_files ( directory ) : """Recursively find projects based on ' . tdms ' file endings Searches the ` directory ` recursively and return a sorted list of all found ' . tdms ' project files , except fluorescence data trace files which end with ` _ traces . tdms ` ."""
path = pathlib . Path ( directory ) . resolve ( ) # get all tdms files tdmslist = [ r for r in path . rglob ( "*.tdms" ) if r . is_file ( ) ] # exclude traces files tdmslist = [ r for r in tdmslist if not r . name . endswith ( "_traces.tdms" ) ] return sorted ( tdmslist )
def reset_object ( self , driver_wrapper = None ) : """Reset each page element object : param driver _ wrapper : driver wrapper instance"""
if driver_wrapper : self . driver_wrapper = driver_wrapper for element in self . _page_elements : element . reset_object ( driver_wrapper ) self . _web_elements = [ ] self . _page_elements = [ ]
def seen_tasks ( self ) : """Shows a list of seen task types ."""
print ( '\n' . join ( self . _stub . seen_tasks ( clearly_pb2 . Empty ( ) ) . task_types ) )
def removeSinglePixels ( img ) : '''img - boolean array remove all pixels that have no neighbour'''
gx = img . shape [ 0 ] gy = img . shape [ 1 ] for i in range ( gx ) : for j in range ( gy ) : if img [ i , j ] : found_neighbour = False for ii in range ( max ( 0 , i - 1 ) , min ( gx , i + 2 ) ) : for jj in range ( max ( 0 , j - 1 ) , min ( gy , j + 2 ) ) : ...
def speakerDiarization ( filename , n_speakers , mt_size = 2.0 , mt_step = 0.2 , st_win = 0.05 , lda_dim = 35 , plot_res = False ) : '''ARGUMENTS : - filename : the name of the WAV file to be analyzed - n _ speakers the number of speakers ( clusters ) in the recording ( < = 0 for unknown ) - mt _ size ( opt )...
[ fs , x ] = audioBasicIO . readAudioFile ( filename ) x = audioBasicIO . stereo2mono ( x ) duration = len ( x ) / fs [ classifier_1 , MEAN1 , STD1 , classNames1 , mtWin1 , mtStep1 , stWin1 , stStep1 , computeBEAT1 ] = aT . load_model_knn ( os . path . join ( os . path . dirname ( os . path . realpath ( __file__ ) ) , ...
def extract_twin_values ( triples , traits , gender = None ) : """Calculate the heritability of certain traits in triplets . Parameters triples : ( a , b , " Female / Male " ) triples . The sample IDs are then used to query the traits dictionary . traits : sample _ id = > value dictionary Returns tuples...
# Construct the pairs of trait values traitValuesAbsent = 0 nanValues = 0 genderSkipped = 0 twinValues = [ ] for a , b , t in triples : if gender is not None and t != gender : genderSkipped += 1 continue if not ( a in traits and b in traits ) : traitValuesAbsent += 1 continue ...
def get_blocks_struct ( self ) : """Return a dictionary with block ids keyed to ( x , y , z ) ."""
cur_x = 0 cur_y = 0 cur_z = 0 blocks = { } for block_id in self . blocksList : blocks [ ( cur_x , cur_y , cur_z ) ] = block_id cur_y += 1 if ( cur_y > 127 ) : cur_y = 0 cur_z += 1 if ( cur_z > 15 ) : cur_z = 0 cur_x += 1 return blocks
def _valid_ip ( ip_address ) : '''Check if the IP address is valid and routable Return either True or False'''
try : address = ipaddress . IPv4Address ( ip_address ) except ipaddress . AddressValueError : return False if address . is_unspecified or address . is_loopback or address . is_link_local or address . is_multicast or address . is_reserved : return False return True
def first_of ( obj , * attrs ) : """: param obj : : param attrs : a list of strings : return : the first truthy attribute of obj , calling it as a function if necessary ."""
for attr in attrs : r = resolve ( obj , attr ) if r : return r
def set_keyboard_focus ( self , move_up , move_down , select ) : """Set the keyboard as the object that controls the menu . move _ up is from the pygame . KEYS enum that defines what button causes the menu selection to move up . move _ down is from the pygame . KEYS enum that defines what button causes the menu...
self . input_focus = StateTypes . KEYBOARD self . move_up_button = move_up self . move_down_button = move_down self . select_button = select
def hide ( self , fromQtmacs : bool = False ) : """Overloaded ` ` hide ( ) ` ` function to avoid calling it accidentally . This method is a ( weak ) security mechanism to prevent programmers from using the ` ` hide ` ` method as originally intended by Qt , ie . to hide a window . However , in Qtmacs , app...
if not fromQtmacs : # Log a warning message if someone tries to call the # native hide ( ) method . msg = ( 'hide() command for applet <b>{}</b> ignored. Use ' ' qteNewApplet instead.' . format ( self . qteAppletID ( ) ) ) self . qteLogger . warning ( msg ) else : # If we are explicitly requested to hide ( ) th...
def FqdnUrl ( v ) : """Verify that the value is a Fully qualified domain name URL . > > > s = Schema ( FqdnUrl ( ) ) > > > with raises ( MultipleInvalid , ' expected a Fully qualified domain name URL ' ) : . . . s ( " http : / / localhost / " ) > > > s ( ' http : / / w3 . org ' ) ' http : / / w3 . org '""...
try : parsed_url = _url_validation ( v ) if "." not in parsed_url . netloc : raise UrlInvalid ( "must have a domain name in URL" ) return v except : raise ValueError
def _get_violations ( self , query , record ) : """Reverse - engineer the query to figure out why a record was selected . : param query : MongoDB query : type query : MongQuery : param record : Record in question : type record : dict : return : Reasons why bad : rtype : list ( ConstraintViolation )"""
# special case , when no constraints are given if len ( query . all_clauses ) == 0 : return [ NullConstraintViolation ( ) ] # normal case , check all the constraints reasons = [ ] for clause in query . all_clauses : var_name = None key = clause . constraint . field . name op = clause . constraint . op ...
def print_spelling_errors ( filename , encoding = 'utf8' ) : """Print misspelled words returned by sphinxcontrib - spelling"""
filesize = os . stat ( filename ) . st_size if filesize : sys . stdout . write ( 'Misspelled Words:\n' ) with io . open ( filename , encoding = encoding ) as wordlist : for line in wordlist : sys . stdout . write ( ' ' + line ) return 1 if filesize else 0
def view_creatr ( filename ) : """Name of the View File to be created"""
if not check ( ) : click . echo ( Fore . RED + 'ERROR: Ensure you are in a bast app to run the create:view command' ) return path = os . path . abspath ( '.' ) + '/public/templates' if not os . path . exists ( path ) : os . makedirs ( path ) filename_ = str ( filename + ".html" ) . lower ( ) view_file = ope...
def unsafe ( self ) : """True if the mapping is unsafe for an update . Applies only to local source . Returns True if the paths for source and destination are the same , or if one is a component of the other path ."""
( scheme , netloc , path , params , query , frag ) = urlparse ( self . src_uri ) if ( scheme != '' ) : return ( False ) s = os . path . normpath ( self . src_uri ) d = os . path . normpath ( self . dst_path ) lcp = os . path . commonprefix ( [ s , d ] ) return ( s == lcp or d == lcp )
def validate ( self , obj ) : """Validate convertibility to internal representation Returns bool True if ' obj ' matches the data type Raises ValidationError If the validation fails"""
if not isinstance ( obj , self . internal_type ) : raise ValidationError ( obj , self . internal_type ) return True
def replace_widgets ( self , widgets , team_context , dashboard_id , eTag = None ) : """ReplaceWidgets . [ Preview API ] Replace the widgets on specified dashboard with the supplied widgets . : param [ Widget ] widgets : Revised state of widgets to store for the dashboard . : param : class : ` < TeamContext >...
project = None team = None if team_context is not None : if team_context . project_id : project = team_context . project_id else : project = team_context . project if team_context . team_id : team = team_context . team_id else : team = team_context . team route_values = {...
async def add ( self , full , valu ) : '''Atomically increments a node ' s value .'''
node = await self . open ( full ) oldv = node . valu newv = oldv + valu node . valu = await self . storNodeValu ( full , node . valu + valu ) await node . fire ( 'hive:set' , path = full , valu = valu , oldv = oldv ) return newv
def _read_precursor ( precursor , sps ) : """Load precursor file for that species"""
hairpin = defaultdict ( str ) name = None with open ( precursor ) as in_handle : for line in in_handle : if line . startswith ( ">" ) : if hairpin [ name ] : hairpin [ name ] = hairpin [ name ] + "NNNNNNNNNNNN" name = line . strip ( ) . replace ( ">" , " " ) . split (...
def closed_loop_edge_lengths_via_footpoint ( glats , glons , alts , dates , direction , vector_direction , step_size = None , max_steps = None , edge_length = 25. , edge_steps = 5 ) : """Forms closed loop integration along mag field , satrting at input points and goes through footpoint . At footpoint , steps alon...
if step_size is None : step_size = 100. if max_steps is None : max_steps = 1000 steps = np . arange ( max_steps ) if direction == 'south' : direct = - 1 elif direction == 'north' : direct = 1 # use spacecraft location to get ECEF ecef_xs , ecef_ys , ecef_zs = geodetic_to_ecef ( glats , glons , alts ) # ...
def list_domains ( ) : '''Return a list of virtual machine names on the minion CLI Example : . . code - block : : bash salt ' * ' virt . list _ domains'''
data = __salt__ [ 'vmadm.list' ] ( keyed = True ) vms = [ "UUID TYPE RAM STATE ALIAS" ] for vm in data : vms . append ( "{vmuuid}{vmtype}{vmram}{vmstate}{vmalias}" . format ( vmuuid = vm . ljust ( 38 ) , vmtype = data [ vm ] [ 'type' ] . ljust ( 6 ) , vmram = data ...
def _n_parameters ( self ) : """Return the number of free parameters in the model ."""
ndim = self . means_ . shape [ 1 ] if self . covariance_type == 'full' : cov_params = self . n_components * ndim * ( ndim + 1 ) / 2. elif self . covariance_type == 'diag' : cov_params = self . n_components * ndim elif self . covariance_type == 'tied' : cov_params = ndim * ( ndim + 1 ) / 2. elif self . covar...
def rpc_get_historic_names_by_address ( self , address , offset , count , ** con_info ) : """Get the list of names owned by an address throughout history Return { ' status ' : True , ' names ' : [ { ' name ' : . . . , ' block _ id ' : . . . , ' vtxindex ' : . . . } ] } on success Return { ' error ' : . . . } on...
if not check_address ( address ) : return { 'error' : 'Invalid address' , 'http_status' : 400 } if not check_offset ( offset ) : return { 'error' : 'invalid offset' , 'http_status' : 400 } if not check_count ( count , 10 ) : return { 'error' : 'invalid count' , 'http_status' : 400 } db = get_db_state ( self...
def update_spec ( self ) : """Update the source specification with information from the row intuiter , but only if the spec values are not already set ."""
if self . datafile . exists : with self . datafile . reader as r : self . header_lines = r . info [ 'header_rows' ] self . comment_lines = r . info [ 'comment_rows' ] self . start_line = r . info [ 'data_start_row' ] self . end_line = r . info [ 'data_end_row' ]
def retrieve_token ( self , token ) : """Retrieve Token details for a specific Token . Args : token : The identifier of the token . Returns :"""
headers = self . client . _get_private_headers ( ) endpoint = '/tokens/{}' . format ( token ) return self . client . _get ( self . client . URL_BASE + endpoint , headers = headers )
def get_abs ( msrc , mrec , srcazm , srcdip , recazm , recdip , verb ) : r"""Get required ab ' s for given angles . This check - function is called from one of the modelling routines in : mod : ` model ` . Consult these modelling routines for a detailed description of the input parameters . Parameters msr...
# Get required ab ' s ( 9 at most ) ab_calc = np . array ( [ [ 11 , 12 , 13 ] , [ 21 , 22 , 23 ] , [ 31 , 32 , 33 ] ] ) if msrc : ab_calc += 3 if mrec : ab_calc += 30 # Switch < ab > using reciprocity . if msrc : # G ^ mm _ ab ( s , r , e , z ) = - G ^ ee _ ab ( s , r , - z , - e ) ab_calc -= 33...
def fetch ( self , is_dl_forced = True ) : """Fetches data from udp collaboration server , see top level comments for class for more information : return :"""
username = config . get_config ( ) [ 'dbauth' ] [ 'udp' ] [ 'user' ] password = config . get_config ( ) [ 'dbauth' ] [ 'udp' ] [ 'password' ] credentials = ( username , password ) # Get patient map file : patient_id_map = self . open_and_parse_yaml ( self . map_files [ 'patient_ids' ] ) udp_internal_ids = patient_id_ma...
def load_devices ( self ) : """load stored devices from the local file"""
self . _devices = [ ] if os . path . exists ( self . _devices_filename ) : log . debug ( "loading devices from '{}'..." . format ( self . _devices_filename ) ) with codecs . open ( self . _devices_filename , "rb" , "utf-8" ) as f : self . _devices = json . load ( f ) return self . _devices
def run_netsh_command ( netsh_args ) : """Execute a netsh command and return the output ."""
devnull = open ( os . devnull , 'w' ) command_raw = 'netsh interface ipv4 ' + netsh_args return int ( subprocess . call ( command_raw , stdout = devnull ) )
def _set_up_savefolder ( self ) : """Create catalogs for different file output to clean up savefolder . Non - public method Parameters None Returns None"""
if self . savefolder == None : return self . cells_path = os . path . join ( self . savefolder , 'cells' ) if RANK == 0 : if not os . path . isdir ( self . cells_path ) : os . mkdir ( self . cells_path ) self . figures_path = os . path . join ( self . savefolder , 'figures' ) if RANK == 0 : if not o...
def get_1D_overlap ( eclusters , depth = 1 ) : """Find blocks that are 1D overlapping , returns cliques of block ids that are in conflict"""
overlap_set = set ( ) active = set ( ) ends = [ ] for i , ( chr , left , right ) in enumerate ( eclusters ) : ends . append ( ( chr , left , 0 , i ) ) # 0/1 for left / right - ness ends . append ( ( chr , right , 1 , i ) ) ends . sort ( ) chr_last = "" for chr , pos , left_right , i in ends : if chr != ...
def handle_hooks ( stage , hooks , provider , context , dump , outline ) : """Handle pre / post hooks . Args : stage ( str ) : The name of the hook stage - pre _ build / post _ build . hooks ( list ) : A list of dictionaries containing the hooks to execute . provider ( : class : ` stacker . provider . base ...
if not outline and not dump and hooks : utils . handle_hooks ( stage = stage , hooks = hooks , provider = provider , context = context )
def download_image ( self , image_type , image ) : """Read file of a project and download it : param image _ type : Image type : param image : The path of the image : returns : A file stream"""
url = self . _getUrl ( "/{}/images/{}" . format ( image_type , image ) ) response = yield from self . _session ( ) . request ( "GET" , url , auth = self . _auth ) if response . status == 404 : raise aiohttp . web . HTTPNotFound ( text = "{} not found on compute" . format ( image ) ) return response
def get_all_clients ( self , params = None ) : """Get all clients This will iterate over all pages until it gets all elements . So if the rate limit exceeded it will throw an Exception and you will get nothing : param params : search params : return : list"""
return self . _iterate_through_pages ( get_function = self . get_clients_per_page , resource = CLIENTS , ** { 'params' : params } )
def set_deployment_run_id ( self ) : """Sets the deployment run ID from deployment properties : return : None"""
log = logging . getLogger ( self . cls_logger + '.set_deployment_run_id' ) deployment_run_id_val = self . get_value ( 'cons3rt.deploymentRun.id' ) if not deployment_run_id_val : log . debug ( 'Deployment run ID not found in deployment properties' ) return try : deployment_run_id = int ( deployment_run_id_va...
def is_shown ( self , request ) : """If there aren ' t any visible items in the submenu , don ' t bother to show this menu item"""
for menuitem in self . menu . _registered_menu_items : if menuitem . is_shown ( request ) : return True return False
def scaled_dimensions ( self , width = None , height = None ) : """Return a ( cx , cy ) 2 - tuple representing the native dimensions of this image scaled by applying the following rules to * width * and * height * . If both * width * and * height * are specified , the return value is ( * width * , * height * ...
if width is None and height is None : return self . width , self . height if width is None : scaling_factor = float ( height ) / float ( self . height ) width = round ( self . width * scaling_factor ) if height is None : scaling_factor = float ( width ) / float ( self . width ) height = round ( self...
def latmio_dir ( R , itr , D = None , seed = None ) : '''This function " latticizes " a directed network , while preserving the in - and out - degree distributions . In weighted networks , the function preserves the out - strength but not the in - strength distributions . Parameters R : NxN np . ndarray d...
rng = get_rng ( seed ) n = len ( R ) ind_rp = rng . permutation ( n ) # randomly reorder matrix R = R . copy ( ) R = R [ np . ix_ ( ind_rp , ind_rp ) ] # create distance to diagonal matrix if not specified by user if D is None : D = np . zeros ( ( n , n ) ) un = np . mod ( range ( 1 , n ) , n ) um = np . mo...
def get_precursor_mz ( exact_mass , precursor_type ) : """Calculate precursor mz based on exact mass and precursor type Args : exact _ mass ( float ) : exact mass of compound of interest precursor _ type ( str ) : Precursor type ( currently only works with ' [ M - H ] - ' , ' [ M + H ] + ' and ' [ M + H - H2O...
# these are just taken from what was present in the massbank . msp file for those missing the exact mass d = { '[M-H]-' : - 1.007276 , '[M+H]+' : 1.007276 , '[M+H-H2O]+' : 1.007276 - ( ( 1.007276 * 2 ) + 15.9949 ) } try : return exact_mass + d [ precursor_type ] except KeyError as e : print ( e ) return Fal...
def make_module ( self , vars = None , shared = False , locals = None ) : """This method works like the : attr : ` module ` attribute when called without arguments but it will evaluate the template on every call rather than caching it . It ' s also possible to provide a dict which is then used as context . Th...
return TemplateModule ( self , self . new_context ( vars , shared , locals ) )
def get_hour_dirs ( root = None ) : """Gets the directories under selfplay _ dir that match YYYY - MM - DD - HH ."""
root = root or selfplay_dir ( ) return list ( filter ( lambda s : re . match ( r"\d{4}-\d{2}-\d{2}-\d{2}" , s ) , gfile . ListDirectory ( root ) ) )
def _add_redundancy_router_interfaces ( self , context , router , itfc_info , new_port , redundancy_router_ids = None , ha_settings_db = None , create_ha_group = True ) : """To be called in add _ router _ interface ( ) AFTER interface has been added to router in DB ."""
# There are essentially three cases where we add interface to a # redundancy router : # 1 . HA is enabled on a user visible router that has one or more # interfaces . # 2 . Redundancy level is increased so one or more redundancy routers # are added . # 3 . An interface is added to a user visible router . # For 1 : An H...
def async_send ( self , url , data , headers , success_cb , failure_cb ) : """Spawn an async request to a remote webserver ."""
# this can be optimized by making a custom self . send that does not # read the response since we don ' t use it . self . _lock . acquire ( ) return gevent . spawn ( super ( GeventedHTTPTransport , self ) . send , url , data , headers ) . link ( lambda x : self . _done ( x , success_cb , failure_cb ) )
def _CompileProtos ( ) : """Compiles all Fleetspeak protos ."""
proto_files = [ ] for dir_path , _ , filenames in os . walk ( THIS_DIRECTORY ) : for filename in filenames : if filename . endswith ( ".proto" ) : proto_files . append ( os . path . join ( dir_path , filename ) ) if not proto_files : return protoc_command = [ "python" , "-m" , "grpc_tools.pr...
def loglike ( self , y , f , n ) : r"""Binomial log likelihood . Parameters y : ndarray array of 0 , 1 valued integers of targets f : ndarray latent function from the GLM prior ( : math : ` \ mathbf { f } = \ boldsymbol \ Phi \ mathbf { w } ` ) n : ndarray the total number of observations Returns ...
ll = binom . logpmf ( y , n = n , p = expit ( f ) ) return ll
def dataframe ( self ) : """Returns a pandas DataFrame containing all other class properties and values . The index for the DataFrame is the string abbreviation of the team , such as ' DET ' ."""
fields_to_include = { 'abbreviation' : self . abbreviation , 'average_age' : self . average_age , 'games_played' : self . games_played , 'goals_against' : self . goals_against , 'goals_for' : self . goals_for , 'losses' : self . losses , 'name' : self . name , 'overtime_losses' : self . overtime_losses , 'pdo_at_even_s...
def blog_authors ( * args ) : """Put a list of authors ( users ) for blog posts into the template context ."""
blog_posts = BlogPost . objects . published ( ) authors = User . objects . filter ( blogposts__in = blog_posts ) return list ( authors . annotate ( post_count = Count ( "blogposts" ) ) )
def _resize_with_dtype ( arr , dtype ) : """This function will transform arr into an array with the same type as dtype . It will do this by filling new columns with zeros ( or NaNs , if it is a float column ) . Also , columns that are not in the new dtype will be dropped ."""
structured_arrays = dtype . names is not None and arr . dtype . names is not None old_columns = arr . dtype . names or [ ] new_columns = dtype . names or [ ] # In numpy 1.9 the ndarray . astype method used to handle changes in number of fields . The code below # should replicate the same behaviour the old astype used t...
def _deps_only_toggled ( self , widget , data = None ) : """Function deactivate options in case of deps _ only and opposite"""
active = widget . get_active ( ) self . dir_name . set_sensitive ( not active ) self . entry_project_name . set_sensitive ( not active ) self . dir_name_browse_btn . set_sensitive ( not active ) self . run_btn . set_sensitive ( active or not self . project_name_shown or self . entry_project_name . get_text ( ) != "" )
def _get_xref ( self , line ) : """Given line , return optional attribute xref value in a dict of sets ."""
# Ex : Wikipedia : Zygotene # Ex : Reactome : REACT _ 22295 " Addition of a third mannose to . . . " mtch = self . attr2cmp [ 'xref' ] . match ( line ) return mtch . group ( 1 ) . replace ( ' ' , '' )
def show_formats ( ) : """Print a list of all the file formats that are supported for writing . The file formats are determined by their extensions . Returns None"""
fmts = { "ann" : "Kvis annotation" , "reg" : "DS9 regions file" , "fits" : "FITS Binary Table" , "csv" : "Comma separated values" , "tab" : "tabe separated values" , "tex" : "LaTeX table format" , "html" : "HTML table" , "vot" : "VO-Table" , "xml" : "VO-Table" , "db" : "Sqlite3 database" , "sqlite" : "Sqlite3 database"...
def md5 ( text ) : """Returns the md5 hash of a string ."""
h = hashlib . md5 ( ) h . update ( _unicode ( text ) . encode ( "utf-8" ) ) return h . hexdigest ( )
def format_exception ( cls , instance , trcback , context = 1 ) : """| Formats given exception . | The code produce a similar output to : func : ` traceback . format _ exception ` except that it allows frames to be excluded from the stack if the given stack trace frame tag is found in the frame locals and set *...
stack = extract_stack ( get_inner_most_frame ( trcback ) , context = context ) output = [ ] output . append ( "Traceback (most recent call last):" ) for frame , file_name , line_number , name , context , index in stack : output . append ( " File \"{0}\", line {1}, in {2}" . format ( file_name , line_number , name ...
def set_screen_layout ( self , screen_layout_mode , guest_screen_info ) : """Set video modes for the guest screens . in screen _ layout _ mode of type : class : ` ScreenLayoutMode ` in guest _ screen _ info of type : class : ` IGuestScreenInfo `"""
if not isinstance ( screen_layout_mode , ScreenLayoutMode ) : raise TypeError ( "screen_layout_mode can only be an instance of type ScreenLayoutMode" ) if not isinstance ( guest_screen_info , list ) : raise TypeError ( "guest_screen_info can only be an instance of type list" ) for a in guest_screen_info [ : 10 ...
def _ParsePathSpecification ( self , knowledge_base , searcher , file_system , path_specification , path_separator ) : """Parses a file system for a preprocessing attribute . Args : knowledge _ base ( KnowledgeBase ) : to fill with preprocessing information . searcher ( dfvfs . FileSystemSearcher ) : file sys...
try : file_entry = searcher . GetFileEntryByPathSpec ( path_specification ) except IOError as exception : relative_path = searcher . GetRelativePath ( path_specification ) if path_separator != file_system . PATH_SEPARATOR : relative_path_segments = file_system . SplitPath ( relative_path ) r...
def are_same_file_types ( objs ) : """Are given ( maybe ) file objs same type ( extension ) ? : param objs : A list of file path or file ( - like ) objects > > > are _ same _ file _ types ( [ ] ) False > > > are _ same _ file _ types ( [ " a . conf " ] ) True > > > are _ same _ file _ types ( [ " a . co...
if not objs : return False ext = _try_to_get_extension ( objs [ 0 ] ) if ext is None : return False return all ( _try_to_get_extension ( p ) == ext for p in objs [ 1 : ] )
def dtool ( debug ) : """Tool to work with datasets ."""
level = logging . WARNING if debug : level = logging . DEBUG logging . basicConfig ( format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' , level = level )
def unsubscribe ( self , future ) : """Terminates the subscription given by a future Args : future ( Future ) : The future of the original subscription"""
assert future not in self . _pending_unsubscribes , "%r has already been unsubscribed from" % self . _pending_unsubscribes [ future ] subscribe = self . _requests [ future ] self . _pending_unsubscribes [ future ] = subscribe # Clear out the subscription self . _subscriptions . pop ( subscribe . id ) request = Unsubscr...
def export_svgs ( obj , filename = None , height = None , width = None , webdriver = None , timeout = 5 ) : '''Export the SVG - enabled plots within a layout . Each plot will result in a distinct SVG file . If the filename is not given , it is derived from the script name ( e . g . ` ` / foo / myplot . py ` `...
svgs = get_svgs ( obj , height = height , width = width , driver = webdriver , timeout = timeout ) if len ( svgs ) == 0 : log . warning ( "No SVG Plots were found." ) return if filename is None : filename = default_filename ( "svg" ) filenames = [ ] for i , svg in enumerate ( svgs ) : if i == 0 : ...