signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def Columns ( iterable ) : """Returns a string of column names for MySQL INSERTs . To account for Iterables with undefined order ( dicts before Python 3.6 ) , this function sorts column names . Examples : > > > Columns ( { " password " : " foo " , " name " : " bar " } ) u ' ( ` name ` , ` password ` ) ' ...
columns = sorted ( iterable ) return "({})" . format ( ", " . join ( "`{}`" . format ( col ) for col in columns ) )
def list_loadbalancers ( call = None ) : '''Return a list of the loadbalancers that are on the provider'''
if call == 'action' : raise SaltCloudSystemExit ( 'The avail_images function must be called with ' '-f or --function, or with the --list-loadbalancers option' ) ret = { } conn = get_conn ( ) datacenter = get_datacenter ( conn ) for item in conn . list_loadbalancers ( datacenter [ 'id' ] ) [ 'items' ] : lb = { '...
def strip_from_ansi_esc_sequences ( text ) : """find ANSI escape sequences in text and remove them : param text : str : return : list , should be passed to ListBox"""
# esc [ + values + control character # h , l , p commands are complicated , let ' s ignore them seq_regex = r"\x1b\[[0-9;]*[mKJusDCBAfH]" regex = re . compile ( seq_regex ) start = 0 response = "" for match in regex . finditer ( text ) : end = match . start ( ) response += text [ start : end ] start = match...
def metatab_derived_handler ( m ) : """Create local Zip , Excel and Filesystem packages : param m : : param skip _ if _ exists : : return :"""
from metapack . exc import PackageError from metapack . util import get_materialized_data_cache from shutil import rmtree create_list = [ ] url = None doc = MetapackDoc ( m . mt_file ) env = get_lib_module_dict ( doc ) package_dir = m . package_root if m . args . package_directory : # If this is set , the FS package wi...
def _python_psk_client_callback ( ssl_id , hint ) : """Called by _ sslpsk . c to return the ( psk , identity ) tuple for the socket with the specified ssl socket ."""
if ssl_id not in _callbacks : return ( "" , "" ) else : res = _callbacks [ ssl_id ] ( hint ) return res if isinstance ( res , tuple ) else ( res , "" )
def valid_ip_prefix ( ip_prefix ) : """Perform a sanity check on ip _ prefix . Arguments : ip _ prefix ( str ) : The IP - Prefix to validate Returns : True if ip _ prefix is a valid IPv4 address with prefix length 32 or a valid IPv6 address with prefix length 128 , otherwise False"""
try : ip_prefix = ipaddress . ip_network ( ip_prefix ) except ValueError : return False else : if ip_prefix . version == 4 and ip_prefix . max_prefixlen != 32 : return False if ip_prefix . version == 6 and ip_prefix . max_prefixlen != 128 : return False return True
def _get_folds ( n_rows , n_folds , use_stored ) : """Get the used CV folds"""
# n _ folds = self . _ n _ folds # use _ stored = self . _ use _ stored _ folds # n _ rows = self . _ n _ rows if use_stored is not None : # path = ' ~ / concise / data - offline / lw - pombe / cv _ folds _ 5 . json ' with open ( os . path . expanduser ( use_stored ) ) as json_file : json_data = json . load...
def get_xpath_frequencydistribution ( paths ) : """Build and return a frequency distribution over xpath occurrences ."""
# " html / body / div / div / text " - > [ " html " , " body " , " div " , " div " , " text " ] splitpaths = [ p . split ( '/' ) for p in paths ] # get list of " parentpaths " by right - stripping off the last xpath - node , # effectively getting the parent path parentpaths = [ '/' . join ( p [ : - 1 ] ) for p in split...
def switch_to_frame ( self , frame , timeout = settings . SMALL_TIMEOUT ) : """Sets driver control to the specified browser frame ."""
if self . timeout_multiplier and timeout == settings . SMALL_TIMEOUT : timeout = self . __get_new_timeout ( timeout ) page_actions . switch_to_frame ( self . driver , frame , timeout )
def BC_Mirror ( self ) : """Mirrors qs across the boundary on either the west ( left ) or east ( right ) side , depending on the selections . This can , for example , produce a scenario in which you are observing a mountain range up to the range crest ( or , more correctly , the halfway point across the mou...
if self . BC_W == 'Mirror' : i = 0 # self . l2 [ i ] + = np . nan # self . l1 [ i ] + = np . nan self . c0 [ i ] += 0 self . r1 [ i ] += self . l1_coeff_i [ i ] self . r2 [ i ] += self . l2_coeff_i [ i ] i = 1 # self . l2 [ i ] + = np . nan self . l1 [ i ] += 0 self . c0 [ i ] +=...
def _filter_max_length ( example , max_length = 256 ) : """Indicates whether the example ' s length is lower than the maximum length ."""
return tf . logical_and ( tf . size ( example [ 0 ] ) <= max_length , tf . size ( example [ 1 ] ) <= max_length )
def with_siblings ( graph , outputs ) : """Include all missing siblings ."""
siblings = set ( ) for node in outputs : siblings |= graph . siblings ( node ) return siblings
def __argument ( self , ttype , tvalue ) : """Argument parsing method This method acts as an entry point for ' argument ' parsing . Syntax : string - list / number / tag : param ttype : current token type : param tvalue : current token value : return : False if an error is encountered , True otherwise""...
if ttype in [ "multiline" , "string" ] : return self . __curcommand . check_next_arg ( "string" , tvalue . decode ( "utf-8" ) ) if ttype in [ "number" , "tag" ] : return self . __curcommand . check_next_arg ( ttype , tvalue . decode ( "ascii" ) ) if ttype == "left_bracket" : self . __cstate = self . __strin...
def temperature ( header : str , temp : Number , unit : str = 'C' ) -> str : """Format temperature details into a spoken word string"""
if not ( temp and temp . value ) : return header + ' unknown' if unit in SPOKEN_UNITS : unit = SPOKEN_UNITS [ unit ] use_s = '' if temp . spoken in ( 'one' , 'minus one' ) else 's' return ' ' . join ( ( header , temp . spoken , 'degree' + use_s , unit ) )
def stalk_at ( self , pid , address , action = None ) : """Sets a one shot code breakpoint at the given process and address . If instead of an address you pass a label , the breakpoint may be deferred until the DLL it points to is loaded . @ see : L { break _ at } , L { dont _ stalk _ at } @ type pid : int ...
bp = self . __set_break ( pid , address , action , oneshot = True ) return bp is not None
def _nodes_to_values ( self ) : """Returns list of list of ( Node , string _ value ) tuples ."""
def is_none ( slice ) : return slice [ 0 ] == - 1 and slice [ 1 ] == - 1 def get ( slice ) : return self . string [ slice [ 0 ] : slice [ 1 ] ] return [ ( varname , get ( slice ) , slice ) for varname , slice in self . _nodes_to_regs ( ) if not is_none ( slice ) ]
def _create_date_slug ( self ) : """Prefixes the slug with the ` ` published _ on ` ` date ."""
if not self . pk : # haven ' t saved this yet , so use today ' s date d = utc_now ( ) elif self . published and self . published_on : # use the actual published on date d = self . published_on elif self . updated_on : # default to the last - updated date d = self . updated_on self . date_slug = u"{0}/{1}" ....
def _create_error ( self , status_code ) : """Construct an error message in jsend format . : param int status _ code : The status code to translate into an error message : return : A dictionary in jsend format with the error and the code : rtype : dict"""
return jsend . error ( message = ComodoCA . status_code [ status_code ] , code = status_code )
def check_configuration_string ( self , config_string , is_job = True , external_name = False ) : """Check whether the given job or task configuration string is well - formed ( if ` ` is _ bstring ` ` is ` ` True ` ` ) and it has all the required parameters . : param string config _ string : the byte string o...
if is_job : self . log ( u"Checking job configuration string" ) else : self . log ( u"Checking task configuration string" ) self . result = ValidatorResult ( ) if self . _are_safety_checks_disabled ( u"check_configuration_string" ) : return self . result if is_job : required_parameters = self . JOB_REQU...
def select_larva ( self ) : """Select all larva ."""
action = sc_pb . Action ( ) action . action_ui . select_larva . SetInParent ( ) # Adds the empty proto field . return action
def age ( self ) : """int , the estimated age of the person . Note that A DOB object is based on a date - range and the exact date is usually unknown so for age calculation the the middle of the range is assumed to be the real date - of - birth ."""
if self . date_range is None : return dob = self . date_range . middle today = datetime . date . today ( ) if ( today . month , today . day ) < ( dob . month , dob . day ) : return today . year - dob . year - 1 else : return today . year - dob . year
def abort ( self , exception = exc . ConnectError ) : """Aborts a connection and puts all pending futures into an error state . If ` ` sys . exc _ info ( ) ` ` is set ( i . e . this is being called in an exception handler ) then pending futures will have that exc info set . Otherwise the given ` ` exception `...
log . warn ( "Aborting connection to %s:%s" , self . host , self . port ) def abort_pending ( f ) : exc_info = sys . exc_info ( ) if any ( exc_info ) : f . set_exc_info ( exc_info ) else : f . set_exception ( exception ( self . host , self . port ) ) for pending in self . drain_all_pending (...
def adapt ( self ) : r"""Update the proposal using the points stored in ` ` self . samples [ - 1 ] ` ` and the parameters which can be set via : py : meth : ` . set _ adapt _ params ` . In the above referenced function ' s docstring , the algorithm is described in detail . If the resulting matrix is not a v...
last_run = self . samples [ - 1 ] accept_rate = float ( self . _last_accept_count ) / len ( last_run ) # careful with rowvar ! # in this form it is expected that each column of ` ` points ` ` # represents sampling values of a variable # this is the case if points is a list of sampled points covar_estimator = _np . cov ...
def insert_from_xmldoc ( connection , source_xmldoc , preserve_ids = False , verbose = False ) : """Insert the tables from an in - ram XML document into the database at the given connection . If preserve _ ids is False ( default ) , then row IDs are modified during the insert process to prevent collisions wit...
# enable / disable ID remapping orig_DBTable_append = dbtables . DBTable . append if not preserve_ids : try : dbtables . idmap_create ( connection ) except sqlite3 . OperationalError : # assume table already exists pass dbtables . idmap_sync ( connection ) dbtables . DBTable . append = d...
def get_dependencies ( self ) : """Return dependencies , which should trigger updates of this model ."""
# pylint : disable = no - member return super ( ) . get_dependencies ( ) + [ Data . collection_set , Data . entity_set , Data . parents , ]
def _get_raw_movielens_data ( ) : """Return the raw lines of the train and test files ."""
path = _get_movielens_path ( ) if not os . path . isfile ( path ) : _download_movielens ( path ) with zipfile . ZipFile ( path ) as datafile : return ( datafile . read ( 'ml-100k/ua.base' ) . decode ( ) . split ( '\n' ) , datafile . read ( 'ml-100k/ua.test' ) . decode ( ) . split ( '\n' ) )
def cli_schemata_list ( self , * args ) : """Display a list of registered schemata"""
self . log ( 'Registered schemata languages:' , "," . join ( sorted ( l10n_schemastore . keys ( ) ) ) ) self . log ( 'Registered Schemata:' , "," . join ( sorted ( schemastore . keys ( ) ) ) ) if '-c' in args or '-config' in args : self . log ( 'Registered Configuration Schemata:' , "," . join ( sorted ( configsche...
def transform_to_3d ( points , normal , z = 0 ) : """Project points into 3d from 2d points ."""
d = np . cross ( normal , ( 0 , 0 , 1 ) ) M = rotation_matrix ( d ) transformed_points = M . dot ( points . T ) . T + z return transformed_points
def parse_dereplicated_uc ( dereplicated_uc_lines ) : """Return dict of seq ID : dereplicated seq IDs from dereplicated . uc lines dereplicated _ uc _ lines : list of lines of . uc file from dereplicated seqs from usearch61 ( i . e . open file of abundance sorted . uc data )"""
dereplicated_clusters = { } seed_hit_ix = 0 seq_id_ix = 8 seed_id_ix = 9 for line in dereplicated_uc_lines : if line . startswith ( "#" ) or len ( line . strip ( ) ) == 0 : continue curr_line = line . strip ( ) . split ( '\t' ) if curr_line [ seed_hit_ix ] == "S" : dereplicated_clusters [ cu...
def cloud_percent ( self ) : """Return percentage of cloud coverage ."""
image_content_qi = self . _metadata . findtext ( ( """n1:Quality_Indicators_Info/Image_Content_QI/""" """CLOUDY_PIXEL_PERCENTAGE""" ) , namespaces = self . _nsmap ) return float ( image_content_qi )
def repo ( name : str , owner : str ) -> snug . Query [ dict ] : """a repository lookup by owner and name"""
return json . loads ( ( yield f'/repos/{owner}/{name}' ) . content )
def is_defined_by_module ( item , module , parent = None ) : """Check if item is directly defined by a module . This check may be prone to errors ."""
flag = False if isinstance ( item , types . ModuleType ) : if not hasattr ( item , '__file__' ) : try : # hack for cv2 and xfeatures2d import utool as ut name = ut . get_modname_from_modpath ( module . __file__ ) flag = name in str ( item ) except : fl...
def add ( self , num , price , aType , stuff_status , title , desc , location_state , location_city , cid , session , ** kwargs ) : '''taobao . item . add 添加一个商品 此接口用于新增一个商品 商品所属的卖家是当前会话的用户 商品的属性和sku的属性有包含的关系 , 商品的价格要位于sku的价格区间之中 ( 例如 , sku价格有5元 、 10元两种 , 那么商品的价格就需要大于等于5元 , 小于等于10元 , 否则新增商品会失败 ) 商品的类目和商品的价格 、 sku...
request = TOPRequest ( 'taobao.item.add' ) request [ 'num' ] = num request [ 'price' ] = price request [ 'type' ] = aType request [ 'stuff_status' ] = stuff_status request [ 'title' ] = title request [ 'desc' ] = desc request [ 'location.state' ] = location_state request [ 'location.city' ] = location_city request [ 'c...
def _compute_counts_from_intensity ( intensity , bexpcube ) : """Make the counts map from the intensity"""
data = intensity . data * np . sqrt ( bexpcube . data [ 1 : ] * bexpcube . data [ 0 : - 1 ] ) return HpxMap ( data , intensity . hpx )
def resolve_attr ( obj , path ) : """A recursive version of getattr for navigating dotted paths . Args : obj : An object for which we want to retrieve a nested attribute . path : A dot separated string containing zero or more attribute names . Returns : The attribute referred to by obj . a1 . a2 . a3 . . ...
if not path : return obj head , _ , tail = path . partition ( '.' ) head_obj = getattr ( obj , head ) return resolve_attr ( head_obj , tail )
def _split_audio_by_size ( self , audio_abs_path , results_abs_path , chunk_size ) : """Calculates the duration of the name . wav in order for all splits have the size of chunk _ size except possibly the last split ( which will be smaller ) and then passes the duration to ` split _ audio _ by _ duration ` Par...
sample_rate = self . _get_audio_sample_rate ( audio_abs_path ) sample_bit = self . _get_audio_sample_bit ( audio_abs_path ) channel_num = self . _get_audio_channels ( audio_abs_path ) duration = 8 * chunk_size / reduce ( lambda x , y : int ( x ) * int ( y ) , [ sample_rate , sample_bit , channel_num ] ) self . _split_a...
def copydir ( orig , dest ) : """copies directory orig to dest . Returns a list of tuples of relative filenames which were copied from orig to dest"""
copied = list ( ) makedirsp ( dest ) for root , dirs , files in walk ( orig ) : for d in dirs : # ensure directories exist makedirsp ( join ( dest , d ) ) for f in files : root_f = join ( root , f ) dest_f = join ( dest , relpath ( root_f , orig ) ) copy ( root_f , dest_f ) ...
def load ( cls , campaign_dir , ns_path = None , runner_type = 'Auto' , optimized = True , check_repo = True ) : """Load an existing simulation campaign . Note that specifying an ns - 3 installation is not compulsory when using this method : existing results will be available , but in order to run additional ...
# Convert paths to be absolute if ns_path is not None : ns_path = os . path . abspath ( ns_path ) campaign_dir = os . path . abspath ( campaign_dir ) # Read the existing configuration into the new DatabaseManager db = DatabaseManager . load ( campaign_dir ) script = db . get_script ( ) runner = None if ns_path is n...
def _master ( self ) : """Master node ' s operation . Assigning tasks to workers and collecting results from them Parameters None Returns results : list of tuple ( voxel _ id , accuracy ) the accuracy numbers of all voxels , in accuracy descending order the length of array equals the number of voxels"...
logger . info ( 'Master at rank %d starts to allocate tasks' , MPI . COMM_WORLD . Get_rank ( ) ) results = [ ] comm = MPI . COMM_WORLD size = comm . Get_size ( ) sending_voxels = self . voxel_unit if self . voxel_unit < self . num_voxels else self . num_voxels current_task = ( 0 , sending_voxels ) status = MPI . Status...
def sort_sections ( self , order ) : """Sort sections according to the section names in the order list . All remaining sections are added to the end in their original order : param order : Iterable of section names : return :"""
order_lc = [ e . lower ( ) for e in order ] sections = OrderedDict ( ( k , self . sections [ k ] ) for k in order_lc if k in self . sections ) sections . update ( ( k , self . sections [ k ] ) for k in self . sections . keys ( ) if k not in order_lc ) assert len ( self . sections ) == len ( sections ) self . sections =...
def delete_device ( name , safety_on = True ) : '''Deletes a device from Vistara based on DNS name or partial name . By default , delete _ device will only perform the delete if a single host is returned . Set safety _ on = False to delete all matches ( up to default API search page size ) CLI Example : . ....
config = _get_vistara_configuration ( ) if not config : return False access_token = _get_oath2_access_token ( config [ 'client_key' ] , config [ 'client_secret' ] ) if not access_token : return 'Vistara access token not available' query_string = 'dnsName:{0}' . format ( name ) devices = _search_devices ( query_...
def create_software_renderer ( self , surface ) : """Create a 2D software rendering context for a surface . Args : surface ( Surface ) : The surface where rendering is done . Returns : Renderer : A 2D software rendering context . Raises : SDLError : If there was an error creating the renderer ."""
renderer = object . __new__ ( Renderer ) renderer . _ptr = self . _ptr = check_ptr_err ( lib . SDL_CreateSoftwareRenderer ( surface . _ptr ) ) return renderer
def handle_incoming_response ( self , call_id , payload ) : """Get a registered handler for a given response and execute it ."""
self . log . debug ( 'handle_incoming_response: in [typehint: %s, call ID: %s]' , payload [ 'typehint' ] , call_id ) # We already log the full JSON response typehint = payload [ "typehint" ] handler = self . handlers . get ( typehint ) def feature_not_supported ( m ) : msg = feedback [ "handler_not_implemented" ] ...
def IsDatabaseLink ( link ) : """Finds whether the link is a database Self Link or a database ID based link : param str link : Link to analyze : return : True or False . : rtype : boolean"""
if not link : return False # trimming the leading and trailing " / " from the input string link = TrimBeginningAndEndingSlashes ( link ) # Splitting the link ( separated by " / " ) into parts parts = link . split ( '/' ) if len ( parts ) != 2 : return False # First part should be " dbs " if not parts [ 0 ] or n...
def delete_blacklist_entry ( self , blacklist_entry_id ) : """Delete an existing blacklist entry . Keyword arguments : blacklist _ entry _ id - - The unique identifier of the blacklist entry to delete ."""
delete_blacklist_endpoint = Template ( "${rest_root}/blacklist/${public_key}/${blacklist_entry_id}/delete" ) url = delete_blacklist_endpoint . substitute ( rest_root = self . _rest_root , public_key = self . _public_key , blacklist_entry_id = blacklist_entry_id ) self . __post_request ( url , { } )
def OnFont ( self , event ) : """Check event handler"""
font_data = wx . FontData ( ) # Disable color chooser on Windows font_data . EnableEffects ( False ) if self . chosen_font : font_data . SetInitialFont ( self . chosen_font ) dlg = wx . FontDialog ( self , font_data ) if dlg . ShowModal ( ) == wx . ID_OK : font_data = dlg . GetFontData ( ) font = self . cho...
def publish ( c , sdist = True , wheel = False , index = None , sign = False , dry_run = False , directory = None , dual_wheels = False , alt_python = None , check_desc = False , ) : """Publish code to PyPI or index of choice . All parameters save ` ` dry _ run ` ` and ` ` directory ` ` honor config settings of ...
# Don ' t hide by default , this step likes to be verbose most of the time . c . config . run . hide = False # Config hooks config = c . config . get ( "packaging" , { } ) index = config . get ( "index" , index ) sign = config . get ( "sign" , sign ) dual_wheels = config . get ( "dual_wheels" , dual_wheels ) check_desc...
def sample_ruptures ( sources , srcfilter , param , monitor = Monitor ( ) ) : """: param sources : a sequence of sources of the same group : param srcfilter : SourceFilter instance used also for bounding box post filtering : param param : a dictionary of additional parameters including ses _ per _ logic...
# AccumDict of arrays with 3 elements weight , nsites , calc _ time calc_times = AccumDict ( accum = numpy . zeros ( 3 , numpy . float32 ) ) # Compute and save stochastic event sets num_ses = param [ 'ses_per_logic_tree_path' ] eff_ruptures = 0 ir_mon = monitor ( 'iter_ruptures' , measuremem = False ) # Compute the num...
def config_pp ( subs ) : """Pretty print of configuration options . Args : subs ( iterable of str ) : iterable with the list of conf sections to print ."""
print ( '(c|f): available only as CLI argument/in the config file' , end = '\n\n' ) for sub in subs : hlp_lst = [ ] for opt , meta in conf [ sub ] . defaults_ ( ) : if meta . cmd_arg ^ meta . conf_arg : opt += ' (c)' if meta . cmd_arg else ' (f)' hlp_lst . append ( ( opt , meta . hel...
def set_state ( self , state , speed = None ) : """: param state : bool : param speed : a string one of [ " lowest " , " low " , " medium " , " high " , " auto " ] defaults to last speed : return : nothing"""
speed = speed or self . current_fan_speed ( ) if state : desired_state = { "powered" : state , "mode" : speed } else : desired_state = { "powered" : state } response = self . api_interface . set_device_state ( self , { "desired_state" : desired_state } ) self . _update_state_from_response ( response )
def shuffle_models ( self , start_iteration = 0 , end_iteration = - 1 ) : """Shuffle models . Parameters start _ iteration : int , optional ( default = 0) The first iteration that will be shuffled . end _ iteration : int , optional ( default = - 1) The last iteration that will be shuffled . If < = 0 , m...
_safe_call ( _LIB . LGBM_BoosterShuffleModels ( self . handle , ctypes . c_int ( start_iteration ) , ctypes . c_int ( end_iteration ) ) ) return self
def main ( * args ) : """Contains flow control"""
options , args , parser = parse_options ( args ) if options . regex and options . write_changes : print ( 'ERROR: --write-changes cannot be used together with ' '--regex' ) parser . print_help ( ) return 1 word_regex = options . regex or word_regex_def try : word_regex = re . compile ( word_regex ) exce...
def to_file ( self , filename , ** kwargs ) : """dump a representation of the nparray object to a json - formatted file . The nparray object should then be able to be fully restored via nparray . from _ file @ parameter str filename : path to the file to be created ( will overwrite if already exists ) @ r...
f = open ( filename , 'w' ) f . write ( self . to_json ( ** kwargs ) ) f . close ( ) return filename
def resize ( self , new_data_size ) : """Resize the file and update the chunk sizes"""
resize_bytes ( self . __fileobj , self . data_size , new_data_size , self . data_offset ) self . _update_size ( new_data_size )
def thread_values_df ( run_list , estimator_list , estimator_names , ** kwargs ) : """Calculates estimator values for the constituent threads of the input runs . Parameters run _ list : list of dicts List of nested sampling run dicts . estimator _ list : list of functions Estimators to apply to runs . ...
tqdm_kwargs = kwargs . pop ( 'tqdm_kwargs' , { 'desc' : 'thread values' } ) assert len ( estimator_list ) == len ( estimator_names ) , ( 'len(estimator_list) = {0} != len(estimator_names = {1}' . format ( len ( estimator_list ) , len ( estimator_names ) ) ) # get thread results thread_vals_arrays = pu . parallel_apply ...
def choose_tag ( self : object , tokens : List [ str ] , index : int , history : List [ str ] ) : """Use regular expressions for rules - based lemmatizing based on word endings ; tokens are matched for patterns with the base kept as a group ; an word ending replacement is added to the ( base ) group . : rtype...
for pattern , replace in self . _regexs : if re . search ( pattern , tokens [ index ] ) : if self . default : return self . default else : return replace
def downstream_index ( dir_value , i , j , alg = 'taudem' ) : """find downslope coordinate for D8 direction ."""
assert alg . lower ( ) in FlowModelConst . d8_deltas delta = FlowModelConst . d8_deltas . get ( alg . lower ( ) ) drow , dcol = delta [ int ( dir_value ) ] return i + drow , j + dcol
def do_get ( url , params , to = 3 ) : """使用 ` ` request . get ` ` 从指定 url 获取数据 : param params : ` ` 输入参数 , 可为空 ` ` : type params : dict : param url : ` ` 接口地址 ` ` : type url : : param to : ` ` 响应超时返回时间 ` ` : type to : : return : ` ` 接口返回的数据 ` ` : rtype : dict"""
try : rs = requests . get ( url , params = params , timeout = to ) if rs . status_code == 200 : try : return rs . json ( ) except Exception as __e : # log . error ( _ _ e ) return rs . text except Exception as er : log . error ( 'get {} ({}) with err: {}' . format ( u...
def get_assessment_taken_ids_by_bank ( self , bank_id ) : """Gets the list of ` ` AssessmentTaken ` ` ` ` Ids ` ` associated with a ` ` Bank ` ` . arg : bank _ id ( osid . id . Id ) : ` ` Id ` ` of the ` ` Bank ` ` return : ( osid . id . IdList ) - list of related assessment taken ` ` Ids ` ` raise : NotFou...
# Implemented from template for # osid . resource . ResourceBinSession . get _ resource _ ids _ by _ bin id_list = [ ] for assessment_taken in self . get_assessments_taken_by_bank ( bank_id ) : id_list . append ( assessment_taken . get_id ( ) ) return IdList ( id_list )
def _to_edge_list ( self , G ) : """Transform NetworkX object to an edge list . Parameters G : Graph object . Returns node _ pairs : ( M , 2 ) numpy array , where M is the number of edges . node _ pairs [ i , 0 ] and node _ pairs [ i , 1 ] are the endpoints of the ith edge . w : Mx1 numpy array . w [ i ] ...
node2id = dict ( zip ( G . nodes , range ( len ( G . nodes ) ) ) ) id2node = dict ( ( v , k ) for k , v in node2id . items ( ) ) nx . relabel_nodes ( G , node2id , False ) edges = G . edges ( data = "weight" ) node_pairs = np . array ( [ [ edge [ 0 ] , edge [ 1 ] ] for edge in edges ] ) . astype ( int ) w = np . array ...
def is_endpoint_expecting ( self , endpoint , * arguments ) : """Iterate over all rules and check if the endpoint expects the arguments provided . This is for example useful if you have some URLs that expect a language code and others that do not and you want to wrap the builder a bit so that the current lang...
self . update ( ) arguments = set ( arguments ) for rule in self . _rules_by_endpoint [ endpoint ] : if arguments . issubset ( rule . arguments ) : return True return False
def kms_encrypt ( kms_client , service , env , secret ) : """Encrypt string for use by a given service / environment Args : kms _ client ( boto3 kms client object ) : Instantiated kms client object . Usually created through create _ aws _ clients . service ( string ) : name of the service that the secret is b...
# Converting all periods to underscores because they are invalid in KMS alias names key_alias = '{}-{}' . format ( env , service . replace ( '.' , '_' ) ) try : response = kms_client . encrypt ( KeyId = 'alias/{}' . format ( key_alias ) , Plaintext = secret . encode ( ) ) except ClientError as error : if error ...
def precision_curve ( self , delta_tau = 0.01 ) : """Computes the relationship between probability threshold and classification precision ."""
# compute thresholds based on the sorted probabilities orig_thresh = self . threshold sorted_labels , sorted_probs = self . sorted_values scores = [ ] taus = [ ] tau = 0 for k in range ( len ( sorted_labels ) ) : # compute new accuracy self . threshold = tau scores . append ( self . precision ) taus . appen...
def _platform_name ( ) : """Returns information about the current operating system and version : return : A unicode string containing the OS name and version"""
if sys . platform == 'darwin' : version = _plat . mac_ver ( ) [ 0 ] _plat_ver_info = tuple ( map ( int , version . split ( '.' ) ) ) if _plat_ver_info < ( 10 , 12 ) : name = 'OS X' else : name = 'macOS' return '%s %s' % ( name , version ) elif sys . platform == 'win32' : _win_ver...
def put ( self , user_id ) : """Update a user object"""
self . reqparse . add_argument ( 'roles' , type = str , action = 'append' ) args = self . reqparse . parse_args ( ) auditlog ( event = 'user.create' , actor = session [ 'user' ] . username , data = args ) user = db . User . find_one ( User . user_id == user_id ) roles = db . Role . find ( Role . name . in_ ( args [ 'ro...
def plan ( self ) : """Gets the associated plan for this invoice . In order to provide a consistent view of invoices , the plan object should be taken from the first invoice item that has one , rather than using the plan associated with the subscription . Subscriptions ( and their associated plan ) are upda...
for invoiceitem in self . invoiceitems . all ( ) : if invoiceitem . plan : return invoiceitem . plan if self . subscription : return self . subscription . plan
def notify_task ( self , task_id , ** kwargs ) : """Notify PNC about a BPM task event . Accepts polymorphic JSON { \" eventType \" : \" string \" } based on \" eventType \" field . This method makes a synchronous HTTP request by default . To make an asynchronous HTTP request , please define a ` callback ` funct...
kwargs [ '_return_http_data_only' ] = True if kwargs . get ( 'callback' ) : return self . notify_task_with_http_info ( task_id , ** kwargs ) else : ( data ) = self . notify_task_with_http_info ( task_id , ** kwargs ) return data
def export_to_pem ( self , private_key = False , password = False ) : """Exports keys to a data buffer suitable to be stored as a PEM file . Either the public or the private key can be exported to a PEM file . For private keys the PKCS # 8 format is used . If a password is provided the best encryption method ...
e = serialization . Encoding . PEM if private_key : if not self . has_private : raise InvalidJWKType ( "No private key available" ) f = serialization . PrivateFormat . PKCS8 if password is None : a = serialization . NoEncryption ( ) elif isinstance ( password , bytes ) : a = seri...
def _implementation ( ) : """Return a dict with the Python implementation and version . Provide both the name and the version of the Python implementation currently running . For example , on CPython 2.7.5 it will return { ' name ' : ' CPython ' , ' version ' : ' 2.7.5 ' } . This function works best on CPyt...
implementation = platform . python_implementation ( ) if implementation == 'CPython' : implementation_version = platform . python_version ( ) elif implementation == 'PyPy' : implementation_version = '%s.%s.%s' % ( sys . pypy_version_info . major , sys . pypy_version_info . minor , sys . pypy_version_info . micr...
def close ( self ) : """Close the connection ."""
if not self . _closed : if self . protocol_version >= 3 : log_debug ( "[#%04X] C: GOODBYE" , self . local_port ) self . _append ( b"\x02" , ( ) ) try : self . send ( ) except ServiceUnavailable : pass log_debug ( "[#%04X] C: <CLOSE>" , self . local_port ...
def vector_poly_data ( orig , vec ) : """Creates a vtkPolyData object composed of vectors"""
# shape , dimention checking if not isinstance ( orig , np . ndarray ) : orig = np . asarray ( orig ) if not isinstance ( vec , np . ndarray ) : vec = np . asarray ( vec ) if orig . ndim != 2 : orig = orig . reshape ( ( - 1 , 3 ) ) elif orig . shape [ 1 ] != 3 : raise Exception ( 'orig array must be 3D'...
def _set_network ( self , v , load = False ) : """Setter method for network , mapped from YANG variable / routing _ system / interface / ve / ipv6 / interface _ ospfv3 _ conf / network ( enumeration ) If this variable is read - only ( config : false ) in the source YANG file , then _ set _ network is considered...
if hasattr ( v , "_utype" ) : v = v . _utype ( v ) try : t = YANGDynClass ( v , base = RestrictedClassType ( base_type = unicode , restriction_type = "dict_key" , restriction_arg = { u'broadcast' : { 'value' : 1 } , u'point-to-point' : { 'value' : 2 } } , ) , is_leaf = True , yang_name = "network" , rest_name =...
def spin_sx ( self ) : """Returns the x - component of the spin of the secondary mass ."""
return conversions . secondary_spin ( self . mass1 , self . mass2 , self . spin1x , self . spin2x )
def _fix_attribute_names ( attrs , change_map ) : """Change attribute names as per values in change _ map dictionary . Parameters : param attrs : dict Dict of operator attributes : param change _ map : dict Dict of onnx attribute name to mxnet attribute names . Returns : return new _ attr : dict Converted...
new_attr = { } for k in attrs . keys ( ) : if k in change_map : new_attr [ change_map [ k ] ] = attrs [ k ] else : new_attr [ k ] = attrs [ k ] return new_attr
def build_graph ( formula ) : '''Builds the implication graph from the formula'''
graph = { } for clause in formula : for ( lit , _ ) in clause : for neg in [ False , True ] : graph [ ( lit , neg ) ] = [ ] for ( ( a_lit , a_neg ) , ( b_lit , b_neg ) ) in formula : add_edge ( graph , ( a_lit , a_neg ) , ( b_lit , not b_neg ) ) add_edge ( graph , ( b_lit , b_neg ) , ( a...
def entries ( self ) : """Returns a list of all entries"""
def add ( x , y ) : return x + y try : return reduce ( add , list ( self . cache . values ( ) ) ) except : return [ ]
def create ( self , user : str , * , pwd : str , sgrp : str , comment : str = None ) -> None : """Create new user ."""
data = { 'action' : 'add' , 'user' : user , 'pwd' : pwd , 'grp' : 'users' , 'sgrp' : sgrp } if comment : data [ 'comment' ] = comment self . _request ( 'post' , URL , data = data )
def _store_generic_inference_results ( self , results_dict , all_params , all_names ) : """Store the model inference values that are common to all choice models . This includes things like index coefficients , gradients , hessians , asymptotic covariance matrices , t - values , p - values , and robust versions ...
# Store the utility coefficients self . _store_inferential_results ( results_dict [ "utility_coefs" ] , index_names = self . ind_var_names , attribute_name = "coefs" , series_name = "coefficients" ) # Store the gradient self . _store_inferential_results ( results_dict [ "final_gradient" ] , index_names = all_names , at...
def show_edge ( self , edge_id ) : """Displays edge with ce _ ratio . : param edge _ id : Edge ID for which to show the ce _ ratio . : type edge _ id : int"""
# pylint : disable = unused - variable , relative - import from mpl_toolkits . mplot3d import Axes3D from matplotlib import pyplot as plt if "faces" not in self . cells : self . create_cell_face_relationships ( ) if "edges" not in self . faces : self . create_face_edge_relationships ( ) fig = plt . figure ( ) a...
def _QA_data_stock_to_fq ( bfq_data , xdxr_data , fqtype ) : '使用数据库数据进行复权'
info = xdxr_data . query ( 'category==1' ) bfq_data = bfq_data . assign ( if_trade = 1 ) if len ( info ) > 0 : data = pd . concat ( [ bfq_data , info . loc [ bfq_data . index [ 0 ] : bfq_data . index [ - 1 ] , [ 'category' ] ] ] , axis = 1 ) data [ 'if_trade' ] . fillna ( value = 0 , inplace = True ) data =...
def reloadCollections ( self ) : "reloads the collection list ."
r = self . connection . session . get ( self . collectionsURL ) data = r . json ( ) if r . status_code == 200 : self . collections = { } for colData in data [ "result" ] : colName = colData [ 'name' ] if colData [ 'isSystem' ] : colObj = COL . SystemCollection ( self , colData ) ...
def Voevent ( stream , stream_id , role ) : """Create a new VOEvent element tree , with specified IVORN and role . Args : stream ( str ) : used to construct the IVORN like so : : ivorn = ' ivo : / / ' + stream + ' # ' + stream _ id ( N . B . ` ` stream _ id ` ` is converted to string if required . ) So , ...
parser = objectify . makeparser ( remove_blank_text = True ) v = objectify . fromstring ( voeventparse . definitions . v2_0_skeleton_str , parser = parser ) _remove_root_tag_prefix ( v ) if not isinstance ( stream_id , string_types ) : stream_id = repr ( stream_id ) v . attrib [ 'ivorn' ] = '' . join ( ( 'ivo://' ,...
def init_app ( self , app = None , blueprint = None , additional_blueprints = None ) : """Update flask application with our api : param Application app : a flask application"""
if app is not None : self . app = app if blueprint is not None : self . blueprint = blueprint for resource in self . resources : self . route ( resource [ 'resource' ] , resource [ 'view' ] , * resource [ 'urls' ] , url_rule_options = resource [ 'url_rule_options' ] ) if self . blueprint is not None : s...
def write_int ( fo , datum , schema = None ) : """int and long values are written using variable - length , zig - zag coding ."""
datum = ( datum << 1 ) ^ ( datum >> 63 ) while ( datum & ~ 0x7F ) != 0 : fo . write ( pack ( 'B' , ( datum & 0x7f ) | 0x80 ) ) datum >>= 7 fo . write ( pack ( 'B' , datum ) )
def create_installer ( self , rpm_py_version , ** kwargs ) : """Create Installer object ."""
return FedoraInstaller ( rpm_py_version , self . python , self . rpm , ** kwargs )
def valid_vlan_id ( vlan_id , extended = True ) : """Validates a VLAN ID . Args : vlan _ id ( integer ) : VLAN ID to validate . If passed as ` ` str ` ` , it will be cast to ` ` int ` ` . extended ( bool ) : If the VLAN ID range should be considered extended for Virtual Fabrics . Returns : bool : ` ` ...
minimum_vlan_id = 1 maximum_vlan_id = 4095 if extended : maximum_vlan_id = 8191 return minimum_vlan_id <= int ( vlan_id ) <= maximum_vlan_id
def property_observer ( self , name ) : """Function decorator to register a property observer . See ` ` MPV . observe _ property ` ` for details ."""
def wrapper ( fun ) : self . observe_property ( name , fun ) fun . unobserve_mpv_properties = lambda : self . unobserve_property ( name , fun ) return fun return wrapper
def tables_insert ( self , table_name , schema = None , query = None , friendly_name = None , description = None ) : """Issues a request to create a table or view in the specified dataset with the specified id . A schema must be provided to create a Table , or a query must be provided to create a View . Args : ...
url = Api . _ENDPOINT + ( Api . _TABLES_PATH % ( table_name . project_id , table_name . dataset_id , '' , '' ) ) data = { 'kind' : 'bigquery#table' , 'tableReference' : { 'projectId' : table_name . project_id , 'datasetId' : table_name . dataset_id , 'tableId' : table_name . table_id } } if schema : data [ 'schema'...
def layout_filename ( fallback ) : '''get location of layout file'''
global display_size global vehiclename ( dw , dh ) = display_size if 'HOME' in os . environ : dirname = os . path . join ( os . environ [ 'HOME' ] , ".mavproxy" ) if not os . path . exists ( dirname ) : try : os . mkdir ( dirname ) except Exception : pass elif 'LOCALAPPDA...
def sql_context ( self , application_name ) : """Create a spark context given the parameters configured in this class . The caller is responsible for calling ` ` . close ` ` on the resulting spark context Parameters application _ name : string Returns sc : SparkContext"""
sc = self . spark_context ( application_name ) import pyspark sqlContext = pyspark . SQLContext ( sc ) return ( sc , sqlContext )
async def preProcessForComparison ( results , target_size , size_tolerance_prct ) : """Process results to prepare them for future comparison and sorting ."""
# find reference ( = image most likely to match target cover ignoring factors like size and format ) reference = None for result in results : if result . source_quality is CoverSourceQuality . REFERENCE : if ( ( reference is None ) or ( CoverSourceResult . compare ( result , reference , target_size = target...
def _process_genotypes ( self , limit ) : """Add the genotype internal id to flybase mapping to the idhashmap . Also , add them as individuals to the graph . Triples created : < genotype id > a GENO : intrinsic _ genotype < genotype id > rdfs : label " < gvc > [ bkgd ] " : param limit : : return :"""
if self . test_mode : graph = self . testgraph else : graph = self . graph model = Model ( graph ) line_counter = 0 raw = '/' . join ( ( self . rawdir , 'genotype' ) ) LOG . info ( "building labels for genotypes" ) geno = Genotype ( graph ) fly_tax = self . globaltt [ 'Drosophila melanogaster' ] with open ( raw...
def _ExtractContentFromDataStream ( self , mediator , file_entry , data_stream_name ) : """Extracts content from a data stream . Args : mediator ( ParserMediator ) : mediates the interactions between parsers and other components , such as storage and abort signals . file _ entry ( dfvfs . FileEntry ) : file...
self . processing_status = definitions . STATUS_INDICATOR_EXTRACTING if self . _processing_profiler : self . _processing_profiler . StartTiming ( 'extracting' ) self . _event_extractor . ParseDataStream ( mediator , file_entry , data_stream_name ) if self . _processing_profiler : self . _processing_profiler . S...
def nested_set_dict ( d , keys , value ) : """Set a value to a sequence of nested keys Parameters d : Mapping keys : Sequence [ str ] value : Any"""
assert keys key = keys [ 0 ] if len ( keys ) == 1 : if key in d : raise ValueError ( "duplicated key '{}'" . format ( key ) ) d [ key ] = value return d = d . setdefault ( key , { } ) nested_set_dict ( d , keys [ 1 : ] , value )
def colors ( ) : """Creates an enum for colors"""
enums = dict ( TIME_LEFT = "red" , CONTEST_NAME = "yellow" , HOST = "green" , MISC = "blue" , TIME_TO_START = "green" , ) return type ( 'Enum' , ( ) , enums )
def add_tip_labels_to_axes ( self ) : """Add text offset from tips of tree with correction for orientation , and fixed _ order which is usually used in multitree plotting ."""
# get tip - coords and replace if using fixed _ order xpos = self . ttree . get_tip_coordinates ( 'x' ) ypos = self . ttree . get_tip_coordinates ( 'y' ) if self . style . orient in ( "up" , "down" ) : if self . ttree . _fixed_order : xpos = list ( range ( self . ttree . ntips ) ) ypos = ypos [ self...
def make_future_info ( first_sid , root_symbols , years , notice_date_func , expiration_date_func , start_date_func , month_codes = None , multiplier = 500 ) : """Create a DataFrame representing futures for ` root _ symbols ` during ` year ` . Generates a contract per triple of ( symbol , year , month ) supplied ...
if month_codes is None : month_codes = CMES_CODE_TO_MONTH year_strs = list ( map ( str , years ) ) years = [ pd . Timestamp ( s , tz = 'UTC' ) for s in year_strs ] # Pairs of string / date like ( ' K06 ' , 2006-05-01) contract_suffix_to_beginning_of_month = tuple ( ( month_code + year_str [ - 2 : ] , year + MonthBe...
def preprocess ( S , coloring_method = None ) : """Preprocess splitting functions . Parameters S : csr _ matrix Strength of connection matrix method : string Algorithm used to compute the vertex coloring : * ' MIS ' - Maximal Independent Set * ' JP ' - Jones - Plassmann ( parallel ) * ' LDF ' - Larg...
if not isspmatrix_csr ( S ) : raise TypeError ( 'expected csr_matrix' ) if S . shape [ 0 ] != S . shape [ 1 ] : raise ValueError ( 'expected square matrix, shape=%s' % ( S . shape , ) ) N = S . shape [ 0 ] S = csr_matrix ( ( np . ones ( S . nnz , dtype = 'int8' ) , S . indices , S . indptr ) , shape = ( N , N )...
def GetEventTagByIdentifier ( self , identifier ) : """Retrieves a specific event tag . Args : identifier ( SQLTableIdentifier ) : event tag identifier . Returns : EventTag : event tag or None if not available ."""
event_tag = self . _GetAttributeContainerByIndex ( self . _CONTAINER_TYPE_EVENT_TAG , identifier . row_identifier - 1 ) if event_tag : event_identifier = identifiers . SQLTableIdentifier ( self . _CONTAINER_TYPE_EVENT , event_tag . event_row_identifier ) event_tag . SetEventIdentifier ( event_identifier ) d...
def add_schema ( self , schema ) : """Merge in a JSON schema . This can be a ` ` dict ` ` or another ` ` SchemaBuilder ` ` : param schema : a JSON Schema . . note : : There is no schema validation . If you pass in a bad schema , you might get back a bad schema ."""
if isinstance ( schema , SchemaBuilder ) : schema_uri = schema . schema_uri schema = schema . to_schema ( ) if schema_uri is None : del schema [ '$schema' ] elif isinstance ( schema , SchemaNode ) : schema = schema . to_schema ( ) if '$schema' in schema : self . schema_uri = self . schema_ur...