idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
2,600
def total_marks ( self ) : total = 0 for answer in self . answers : for number , part in enumerate ( answer ) : if number > 0 : if part [ 2 ] > 0 : total += part [ 2 ] return total
Compute the total mark for the assessment .
2,601
def permute ( num ) : "Permutation for randomizing data order." if permute_data : return np . random . permutation ( num ) else : logging . warning ( "Warning not permuting data" ) return np . arange ( num )
Permutation for randomizing data order .
2,602
def discrete ( cats , name = 'discrete' ) : import json ks = list ( cats ) for key in ks : if isinstance ( key , bytes ) : cats [ key . decode ( 'utf-8' ) ] = cats . pop ( key ) return 'discrete(' + json . dumps ( [ cats , name ] ) + ')'
Return a class category that shows the encoding
2,603
def clear_cache ( dataset_name = None ) : dr = data_resources [ dataset_name ] if 'dirs' in dr : for dirs , files in zip ( dr [ 'dirs' ] , dr [ 'files' ] ) : for dir , file in zip ( dirs , files ) : path = os . path . join ( data_path , dataset_name , dir , file ) if os . path . exists ( path ) : logging . info ( "clea...
Remove a data set from the cache
2,604
def to_arff ( dataset , ** kwargs ) : pods_data = dataset ( ** kwargs ) vals = list ( kwargs . values ( ) ) for i , v in enumerate ( vals ) : if isinstance ( v , list ) : vals [ i ] = '|' . join ( v ) else : vals [ i ] = str ( v ) args = '_' . join ( vals ) n = dataset . __name__ if len ( args ) > 0 : n += '_' + args n...
Take a pods data set and write it as an ARFF file
2,605
def epomeo_gpx ( data_set = 'epomeo_gpx' , sample_every = 4 ) : import gpxpy import gpxpy . gpx if not data_available ( data_set ) : download_data ( data_set ) files = [ 'endomondo_1' , 'endomondo_2' , 'garmin_watch_via_endomondo' , 'viewranger_phone' , 'viewranger_tablet' ] X = [ ] for file in files : gpx_file = open ...
Data set of three GPS traces of the same movement on Mt Epomeo in Ischia . Requires gpxpy to run .
2,606
def pmlr ( volumes = 'all' , data_set = 'pmlr' ) : if not data_available ( data_set ) : download_data ( data_set ) proceedings_file = open ( os . path . join ( data_path , data_set , 'proceedings.yaml' ) , 'r' ) import yaml proceedings = yaml . load ( proceedings_file ) data_name_full = 'pmlr_volumes' data_resources [ ...
Abstracts from the Proceedings of Machine Learning Research
2,607
def lee_yeast_ChIP ( data_set = 'lee_yeast_ChIP' ) : if not data_available ( data_set ) : download_data ( data_set ) from pandas import read_csv dir_path = os . path . join ( data_path , data_set ) filename = os . path . join ( dir_path , 'binding_by_gene.tsv' ) S = read_csv ( filename , header = 1 , index_col = 0 , se...
Yeast ChIP data from Lee et al .
2,608
def osu_run1 ( data_set = 'osu_run1' , sample_every = 4 ) : path = os . path . join ( data_path , data_set ) if not data_available ( data_set ) : import zipfile download_data ( data_set ) zip = zipfile . ZipFile ( os . path . join ( data_path , data_set , 'run1TXT.ZIP' ) , 'r' ) for name in zip . namelist ( ) : zip . e...
Ohio State University s Run1 motion capture data set .
2,609
def toy_linear_1d_classification ( seed = default_seed ) : def sample_class ( f ) : p = 1. / ( 1. + np . exp ( - f ) ) c = np . random . binomial ( 1 , p ) c = np . where ( c , 1 , - 1 ) return c np . random . seed ( seed = seed ) x1 = np . random . normal ( - 3 , 5 , 20 ) x2 = np . random . normal ( 3 , 5 , 20 ) X = (...
Simple classification data in one dimension for illustrating models .
2,610
def airline_delay ( data_set = 'airline_delay' , num_train = 700000 , num_test = 100000 , seed = default_seed ) : if not data_available ( data_set ) : download_data ( data_set ) dir_path = os . path . join ( data_path , data_set ) filename = os . path . join ( dir_path , 'filtered_data.pickle' ) import pandas as pd dat...
Airline delay data used in Gaussian Processes for Big Data by Hensman Fusi and Lawrence
2,611
def olympic_sprints ( data_set = 'rogers_girolami_data' ) : X = np . zeros ( ( 0 , 2 ) ) Y = np . zeros ( ( 0 , 1 ) ) cats = { } for i , dataset in enumerate ( [ olympic_100m_men , olympic_100m_women , olympic_200m_men , olympic_200m_women , olympic_400m_men , olympic_400m_women ] ) : data = dataset ( ) year = data [ '...
All olympics sprint winning times for multiple output prediction .
2,612
def movielens100k ( data_set = 'movielens100k' ) : if not data_available ( data_set ) : import zipfile download_data ( data_set ) dir_path = os . path . join ( data_path , data_set ) zip = zipfile . ZipFile ( os . path . join ( dir_path , 'ml-100k.zip' ) , 'r' ) for name in zip . namelist ( ) : zip . extract ( name , d...
Data set of movie ratings collected by the University of Minnesota and cleaned up for use .
2,613
def ceres ( data_set = 'ceres' ) : if not data_available ( data_set ) : download_data ( data_set ) import pandas as pd data = pd . read_csv ( os . path . join ( data_path , data_set , 'ceresData.txt' ) , index_col = 'Tag' , header = None , sep = '\t' , names = [ 'Tag' , 'Mittlere Sonnenzeit' , 'Gerade Aufstig in Zeit' ...
Twenty two observations of the Dwarf planet Ceres as observed by Giueseppe Piazzi and published in the September edition of Monatlicher Correspondenz in 1801 . These were the measurements used by Gauss to fit a model of the planets orbit through which the planet was recovered three months later .
2,614
def access_elementusers ( self , elementuser_id , access_id = None , tenant_id = None , api_version = "v2.0" ) : if tenant_id is None and self . _parent_class . tenant_id : tenant_id = self . _parent_class . tenant_id elif not tenant_id : raise TypeError ( "tenant_id is required but not set or cached." ) cur_ctlr = sel...
Get all accesses for a particular user
2,615
def logout ( self , api_version = "v2.0" ) : cur_ctlr = self . _parent_class . controller url = str ( cur_ctlr ) + "/{}/api/logout" . format ( api_version ) api_logger . debug ( "URL = %s" , url ) return self . _parent_class . rest_call ( url , "get" )
Logout current session
2,616
def use_token ( self , token = None ) : api_logger . info ( 'use_token function:' ) if not isinstance ( token , ( text_type , binary_type ) ) : api_logger . debug ( '"token" was not a text-style string: {}' . format ( text_type ( token ) ) ) return False session = self . _parent_class . expose_session ( ) session . coo...
Function to use static AUTH_TOKEN as auth for the constructor instead of full login process .
2,617
def interactive_tenant_update_vars ( self ) : api_logger . info ( 'interactive_tenant_update_vars function:' ) tenant_resp = self . _parent_class . get . tenants ( self . _parent_class . tenant_id ) status = tenant_resp . cgx_status tenant_dict = tenant_resp . cgx_content if status : api_logger . debug ( "new tenant_di...
Function to update the cloudgenix . API object with tenant login info . Run after login or client login .
2,618
def interactive_update_profile_vars ( self ) : profile = self . _parent_class . get . profile ( ) if profile . cgx_status : self . _parent_class . tenant_id = profile . cgx_content . get ( 'tenant_id' ) self . _parent_class . email = profile . cgx_content . get ( 'email' ) self . _parent_class . _user_id = profile . cg...
Function to update the cloudgenix . API object with profile info . Run after login or client login .
2,619
def quick_menu ( self , banner , list_line_format , choice_list ) : invalid = True menu_int = - 1 while invalid : print ( banner ) for item_index , item_value in enumerate ( choice_list ) : print ( list_line_format . format ( item_index + 1 , * item_value ) ) menu_choice = compat_input ( "\nChoose a Number or (Q)uit: "...
Function to display a quick menu for user input
2,620
def check_sso_login ( self , operator_email , request_id ) : data = { "email" : operator_email , "requestId" : request_id } api_logger . info ( 'check_sso_login function:' ) response = self . _parent_class . post . login ( data = data ) if not response . cgx_content . get ( 'x_auth_token' ) : return response auth_regio...
Login to the CloudGenix API and see if SAML SSO has occurred . This function is used to check and see if SAML SSO has succeeded while waiting .
2,621
def quick_confirm ( prompt , default_value ) : valid = False value = default_value . lower ( ) while not valid : input_val = compat_input ( prompt + "[{0}]: " . format ( default_value ) ) if input_val == "" : value = default_value . lower ( ) valid = True else : try : if input_val . lower ( ) in [ 'y' , 'n' ] : value =...
Function to display a quick confirmation for user input
2,622
def quick_int_input ( prompt , default_value , min_val = 1 , max_val = 30 ) : valid = False num_val = default_value while not valid : input_val = compat_input ( prompt + "[{0}]: " . format ( default_value ) ) if input_val == "" : num_val = default_value valid = True else : try : num_val = int ( input_val ) if min_val <...
Function to display a quick question for integer user input
2,623
def quick_str_input ( prompt , default_value ) : valid = False str_val = default_value while not valid : input_val = raw_input ( prompt + "[{0}]: " . format ( default_value ) ) if input_val == "" : str_val = default_value valid = True else : try : str_val = text_type ( input_val ) valid = True except ValueError : print...
Function to display a quick question for text input .
2,624
def tran_hash ( self , a , b , c , n ) : return ( ( ( TRAN [ ( a + n ) & 255 ] ^ TRAN [ b ] * ( n + n + 1 ) ) + TRAN [ ( c ) ^ TRAN [ n ] ] ) & 255 )
implementation of the tran53 hash function
2,625
def process ( self , chunk ) : self . _digest = None if isinstance ( chunk , text_type ) : chunk = chunk . encode ( 'utf-8' ) for char in chunk : self . num_char += 1 if PY3 : c = char else : c = ord ( char ) if len ( self . window ) > 1 : self . acc [ self . tran_hash ( c , self . window [ 0 ] , self . window [ 1 ] , ...
computes the hash of all of the trigrams in the chunk using a window of length 5
2,626
def from_file ( self , fname ) : f = open ( fname , "rb" ) data = f . read ( ) self . update ( data ) f . close ( )
read in a file and compute digest
2,627
def compare ( self , digest_2 , is_hex = False ) : if is_hex : digest_2 = convert_hex_to_ints ( digest_2 ) bit_diff = 0 for i in range ( len ( self . digest ) ) : bit_diff += POPC [ self . digest [ i ] ^ digest_2 [ i ] ] return 128 - bit_diff
returns difference between the nilsimsa digests between the current object and a given digest
2,628
def tenant_forgot_password_login ( self , data , tenant_id = None , api_version = "v2.0" ) : if tenant_id is None and self . _parent_class . tenant_id : tenant_id = self . _parent_class . tenant_id elif not tenant_id : raise TypeError ( "tenant_id is required but not set or cached." ) cur_ctlr = self . _parent_class . ...
Forgot password API
2,629
def is_valid_file ( parser , arg ) : if not os . path . exists ( arg ) : parser . error ( "File %s not found" % arg ) else : return arg
verify the validity of the given file . Never trust the End - User
2,630
def getID ( code_file ) : json_path = ghostfolder + '/' + json_file if os . path . exists ( json_path ) : pass else : download_file ( 'https://ghostbin.com/languages.json' ) lang = detect_lang ( code_file ) json_data = json . load ( file ( json_path ) ) ID = '' for i in range ( len ( json_data ) ) : temp = len ( json_d...
Get the language ID of the input file language
2,631
def detect_lang ( path ) : blob = FileBlob ( path , os . getcwd ( ) ) if blob . is_text : print ( 'Programming language of the file detected: {0}' . format ( blob . language . name ) ) return blob . language . name else : print ( 'File not a text file. Exiting...' ) sys . exit ( )
Detect the language used in the given file .
2,632
def screenshot ( self , scale = None , quality = None ) : output_dir = BuiltIn ( ) . get_variable_value ( '${OUTPUTDIR}' ) ts = time . time ( ) st = datetime . datetime . fromtimestamp ( ts ) . strftime ( '%Y%m%d%H%M%S' ) screenshot_path = '%s%s%s.png' % ( output_dir , os . sep , st ) self . device . screenshot ( scree...
Take a screenshot of device and log in the report with timestamp scale for screenshot size and quality for screenshot quality default scale = 1 . 0 quality = 100
2,633
def call ( self , obj , method , * args , ** selectors ) : func = getattr ( obj , method ) return func ( ** selectors )
This keyword can use object method from original python uiautomator
2,634
def merge_sims ( oldsims , newsims , clip = None ) : if oldsims is None : result = newsims or [ ] elif newsims is None : result = oldsims else : result = sorted ( oldsims + newsims , key = lambda item : - item [ 1 ] ) if clip is not None : result = result [ : clip ] return result
Merge two precomputed similarity lists truncating the result to clip most similar items .
2,635
def terminate ( self ) : try : self . id2sims . terminate ( ) except : pass import glob for fname in glob . glob ( self . fname + '*' ) : try : os . remove ( fname ) logger . info ( "deleted %s" % fname ) except Exception , e : logger . warning ( "failed to delete %s: %s" % ( fname , e ) ) for val in self . __dict__ . ...
Delete all files created by this index invalidating self . Use with care .
2,636
def update_ids ( self , docids ) : logger . info ( "updating %i id mappings" % len ( docids ) ) for docid in docids : if docid is not None : pos = self . id2pos . get ( docid , None ) if pos is not None : logger . info ( "replacing existing document %r in %s" % ( docid , self ) ) del self . pos2id [ pos ] self . id2pos...
Update id - > pos mapping with new document ids .
2,637
def vec_by_id ( self , docid ) : pos = self . id2pos [ docid ] return self . qindex . vector_by_id ( pos )
Return indexed vector corresponding to document docid .
2,638
def merge ( self , other ) : other . qindex . normalize , other . qindex . num_best = False , self . topsims logger . info ( "updating old precomputed values" ) pos , lenself = 0 , len ( self . qindex ) for chunk in self . qindex . iter_chunks ( ) : for sims in other . qindex [ chunk ] : if pos in self . pos2id : docid...
Merge documents from the other index . Update precomputed similarities in the process .
2,639
def doc2vec ( self , doc ) : bow = self . dictionary . doc2bow ( doc [ 'tokens' ] ) if self . method == 'lsi' : return self . lsi [ self . tfidf [ bow ] ] elif self . method == 'lda' : return self . lda [ bow ] elif self . method == 'lda_tfidf' : return self . lda [ self . tfidf [ bow ] ] elif self . method == 'logentr...
Convert a single SimilarityDocument to vector .
2,640
def flush ( self , save_index = False , save_model = False , clear_buffer = False ) : if save_index : if self . fresh_index is not None : self . fresh_index . save ( self . location ( 'index_fresh' ) ) if self . opt_index is not None : self . opt_index . save ( self . location ( 'index_opt' ) ) if save_model : if self ...
Commit all changes clear all caches .
2,641
def close ( self ) : try : self . payload . close ( ) except : pass try : self . model . close ( ) except : pass try : self . fresh_index . close ( ) except : pass try : self . opt_index . close ( ) except : pass try : self . fresh_docs . terminate ( ) except : pass
Explicitly close open file handles databases etc .
2,642
def train ( self , corpus = None , method = 'auto' , clear_buffer = True , params = None ) : if corpus is not None : self . flush ( clear_buffer = True ) self . buffer ( corpus ) if not self . fresh_docs : msg = "train called but no training corpus specified for %s" % self logger . error ( msg ) raise ValueError ( msg ...
Create an indexing model . Will overwrite the model if it already exists . All indexes become invalid because documents in them use a now - obsolete representation .
2,643
def index ( self , corpus = None , clear_buffer = True ) : if not self . model : msg = 'must initialize model for %s before indexing documents' % self . basename logger . error ( msg ) raise AttributeError ( msg ) if corpus is not None : self . flush ( clear_buffer = True ) self . buffer ( corpus ) if not self . fresh_...
Permanently index all documents previously added via buffer or directly index documents from corpus if specified .
2,644
def drop_index ( self , keep_model = True ) : modelstr = "" if keep_model else "and model " logger . info ( "deleting similarity index " + modelstr + "from %s" % self . basename ) for index in [ self . fresh_index , self . opt_index ] : if index is not None : index . terminate ( ) self . fresh_index , self . opt_index ...
Drop all indexed documents . If keep_model is False also dropped the model .
2,645
def delete ( self , docids ) : logger . info ( "asked to drop %i documents" % len ( docids ) ) for index in [ self . opt_index , self . fresh_index ] : if index is not None : index . delete ( docids ) self . flush ( save_index = True )
Delete specified documents from the index .
2,646
def find_similar ( self , doc , min_score = 0.0 , max_results = 100 ) : logger . debug ( "received query call with %r" % doc ) if self . is_locked ( ) : msg = "cannot query while the server is being updated" logger . error ( msg ) raise RuntimeError ( msg ) sims_opt , sims_fresh = None , None for index in [ self . fres...
Find max_results most similar articles in the index each having similarity score of at least min_score . The resulting list may be shorter than max_results in case there are not enough matching documents .
2,647
def keys ( self ) : result = [ ] if self . fresh_index is not None : result += self . fresh_index . keys ( ) if self . opt_index is not None : result += self . opt_index . keys ( ) return result
Return ids of all indexed documents .
2,648
def check_session ( self ) : if self . session is None : if self . autosession : self . open_session ( ) else : msg = "must open a session before modifying %s" % self raise RuntimeError ( msg )
Make sure a session is open .
2,649
def open_session ( self ) : if self . session is not None : msg = "session already open; commit it or rollback before opening another one in %s" % self logger . error ( msg ) raise RuntimeError ( msg ) logger . info ( "opening a new session" ) logger . info ( "removing %s" % self . loc_session ) try : shutil . rmtree (...
Open a new session to modify this server .
2,650
def buffer ( self , * args , ** kwargs ) : self . check_session ( ) result = self . session . buffer ( * args , ** kwargs ) return result
Buffer documents in the current session
2,651
def index ( self , * args , ** kwargs ) : self . check_session ( ) result = self . session . index ( * args , ** kwargs ) if self . autosession : self . commit ( ) return result
Index documents in the current session
2,652
def drop_index ( self , keep_model = True ) : self . check_session ( ) result = self . session . drop_index ( keep_model ) if self . autosession : self . commit ( ) return result
Drop all indexed documents from the session . Optionally drop model too .
2,653
def delete ( self , docids ) : self . check_session ( ) result = self . session . delete ( docids ) if self . autosession : self . commit ( ) return result
Delete documents from the current session .
2,654
def optimize ( self ) : self . check_session ( ) result = self . session . optimize ( ) if self . autosession : self . commit ( ) return result
Optimize index for faster by - document - id queries .
2,655
def commit ( self ) : if self . session is not None : logger . info ( "committing transaction in %s" % self ) tmp = self . stable self . stable , self . session = self . session , None self . istable = 1 - self . istable self . write_istable ( ) tmp . close ( ) self . lock_update . release ( ) else : logger . warning (...
Commit changes made by the latest session .
2,656
def terminate ( self ) : logger . info ( "deleting entire server %s" % self ) self . close ( ) try : shutil . rmtree ( self . basedir ) logger . info ( "deleted server under %s" % self . basedir ) for val in self . __dict__ . keys ( ) : try : delattr ( self , val ) except : pass except Exception , e : logger . warning ...
Delete all files created by this server invalidating self . Use with care .
2,657
def find_similar ( self , * args , ** kwargs ) : if self . session is not None and self . autosession : self . commit ( ) return self . stable . find_similar ( * args , ** kwargs )
Find similar articles .
2,658
async def profile ( self , ctx , platform , name ) : player = await self . client . get_player ( platform , name ) solos = await player . get_solos ( ) await ctx . send ( "# of kills in solos for {}: {}" . format ( name , solos . kills . value ) )
Fetch a profile .
2,659
def generate_chunks ( data , chunk_size = DEFAULT_CHUNK_SIZE ) : iterator = iter ( repeated . getvalues ( data ) ) while True : chunk = list ( itertools . islice ( iterator , chunk_size ) ) if not chunk : return yield chunk
Yield chunk_size items from data at a time .
2,660
def reduce ( reducer , data , chunk_size = DEFAULT_CHUNK_SIZE ) : if not chunk_size : return finalize ( reducer , fold ( reducer , data ) ) chunks = generate_chunks ( data , chunk_size ) intermediate = fold ( reducer , next ( chunks ) ) for chunk in chunks : intermediate = merge ( reducer , intermediate , fold ( reduce...
Repeatedly call fold and merge on data and then finalize .
2,661
def conditions ( self ) : for idx in six . moves . range ( 1 , len ( self . children ) , 2 ) : yield ( self . children [ idx - 1 ] , self . children [ idx ] )
The if - else pairs .
2,662
def handle_noargs ( self , ** options ) : r = get_r ( ) since = datetime . utcnow ( ) - timedelta ( days = 1 ) metrics = { } categories = r . metric_slugs_by_category ( ) for category_name , slug_list in categories . items ( ) : metrics [ category_name ] = [ ] for slug in slug_list : metric_values = r . get_metric_hist...
Send Report E - mails .
2,663
def add_tasks ( self , value ) : tasks = self . _validate_entities ( value ) self . _tasks . update ( tasks ) self . _task_count = len ( self . _tasks )
Adds tasks to the existing set of tasks of the Stage
2,664
def to_dict ( self ) : stage_desc_as_dict = { 'uid' : self . _uid , 'name' : self . _name , 'state' : self . _state , 'state_history' : self . _state_history , 'parent_pipeline' : self . _p_pipeline } return stage_desc_as_dict
Convert current Stage into a dictionary
2,665
def from_dict ( self , d ) : if 'uid' in d : if d [ 'uid' ] : self . _uid = d [ 'uid' ] if 'name' in d : if d [ 'name' ] : self . _name = d [ 'name' ] if 'state' in d : if isinstance ( d [ 'state' ] , str ) or isinstance ( d [ 'state' ] , unicode ) : if d [ 'state' ] in states . _stage_state_values . keys ( ) : self . ...
Create a Stage from a dictionary . The change is in inplace .
2,666
def _make_spec_file ( self ) : if issubclass ( BdistRPMCommand , object ) : spec_file = super ( BdistRPMCommand , self ) . _make_spec_file ( ) else : spec_file = bdist_rpm . _make_spec_file ( self ) if sys . version_info [ 0 ] < 3 : python_package = "python" else : python_package = "python3" description = [ ] summary =...
Generates the text of an RPM spec file .
2,667
def resolve ( self , name ) : for scope in reversed ( self . scopes ) : try : return structured . resolve ( scope , name ) except ( KeyError , AttributeError ) : continue raise AttributeError ( name )
Call IStructured . resolve across all scopes and return first hit .
2,668
def reflect ( self , name ) : result = None for scope in reversed ( self . scopes ) : try : if isinstance ( scope , type ) : result = structured . reflect_static_member ( scope , name ) else : result = structured . reflect_runtime_member ( scope , name ) if result is not None : return result except ( NotImplementedErro...
Reflect name starting with local scope all the way up to global .
2,669
def reflect_runtime_member ( self , name ) : for scope in reversed ( self . scopes ) : try : return structured . reflect_runtime_member ( scope , name ) except ( NotImplementedError , KeyError , AttributeError ) : continue return protocol . AnyType
Reflect name using ONLY runtime reflection .
2,670
def reflect_static_member ( cls , name ) : for scope in reversed ( cls . scopes ) : try : return structured . reflect_static_member ( scope , name ) except ( NotImplementedError , KeyError , AttributeError ) : continue return protocol . AnyType
Reflect name using ONLY static reflection .
2,671
def get_hostmap ( profile ) : hostmap = dict ( ) for entry in profile : if entry [ ru . EVENT ] == 'hostname' : hostmap [ entry [ ru . UID ] ] = entry [ ru . MSG ] return hostmap
We abuse the profile combination to also derive a pilot - host map which will tell us on what exact host each pilot has been running . To do so we check for the PMGR_ACTIVE advance event in agent_0 . prof and use the NTP sync info to associate a hostname .
2,672
def get_hostmap_deprecated ( profiles ) : hostmap = dict ( ) for pname , prof in profiles . iteritems ( ) : if not len ( prof ) : continue if not prof [ 0 ] [ ru . MSG ] : continue host , ip , _ , _ , _ = prof [ 0 ] [ ru . MSG ] . split ( ':' ) host_id = '%s:%s' % ( host , ip ) for row in prof : if 'agent_0.prof' in pn...
This method mangles combine_profiles and get_hostmap and is deprecated . At this point it only returns the hostmap
2,673
def categorize_metrics ( self ) : category = self . cleaned_data [ 'category_name' ] metrics = self . cleaned_data [ 'metrics' ] self . r . reset_category ( category , metrics )
Called only on a valid form this method will place the chosen metrics in the given catgory .
2,674
def match ( self , f , * args ) : try : match = f ( self . tokenizer , * args ) except StopIteration : return if match is None : return if not isinstance ( match , grammar . TokenMatch ) : raise TypeError ( "Invalid grammar function %r returned %r." % ( f , match ) ) self . matched = match return match
Match grammar function f against next token and set self . matched .
2,675
def reject ( self , f , * args ) : match = self . match ( f , * args ) if match : token = self . peek ( 0 ) raise errors . EfilterParseError ( query = self . tokenizer . source , token = token , message = "Was not expecting a %s here." % token . name )
Like match but throw a parse error if f matches .
2,676
def expect ( self , f , * args ) : match = self . accept ( f , * args ) if match : return match try : func_name = f . func_name except AttributeError : func_name = "<unnamed grammar function>" start , end = self . current_position ( ) raise errors . EfilterParseError ( query = self . tokenizer . source , start = start ...
Like accept but throws a parse error if f doesn t match .
2,677
def solve_var ( expr , vars ) : try : return Result ( structured . resolve ( vars , expr . value ) , ( ) ) except ( KeyError , AttributeError ) as e : raise errors . EfilterKeyError ( root = expr , key = expr . value , message = e , query = expr . source ) except ( TypeError , ValueError ) as e : if vars . locals is No...
Returns the value of the var named in the expression .
2,678
def solve_repeat ( expr , vars ) : try : result = repeated . meld ( * [ solve ( x , vars ) . value for x in expr . children ] ) return Result ( result , ( ) ) except TypeError : raise errors . EfilterTypeError ( root = expr , query = expr . source , message = "All values in a repeated value must be of the same type." )
Build a repeated value from subexpressions .
2,679
def solve_tuple ( expr , vars ) : result = tuple ( solve ( x , vars ) . value for x in expr . children ) return Result ( result , ( ) )
Build a tuple from subexpressions .
2,680
def solve_ifelse ( expr , vars ) : for condition , result in expr . conditions ( ) : if boolean . asbool ( solve ( condition , vars ) . value ) : return solve ( result , vars ) return solve ( expr . default ( ) , vars )
Evaluate conditions and return the one that matches .
2,681
def solve_map ( expr , vars ) : lhs_values , _ = __solve_for_repeated ( expr . lhs , vars ) def lazy_map ( ) : try : for lhs_value in repeated . getvalues ( lhs_values ) : yield solve ( expr . rhs , __nest_scope ( expr . lhs , vars , lhs_value ) ) . value except errors . EfilterNoneError as error : error . root = expr ...
Solves the map - form by recursively calling its RHS with new vars .
2,682
def solve_let ( expr , vars ) : lhs_value = solve ( expr . lhs , vars ) . value if not isinstance ( lhs_value , structured . IStructured ) : raise errors . EfilterTypeError ( root = expr . lhs , query = expr . original , message = "The LHS of 'let' must evaluate to an IStructured. Got %r." % ( lhs_value , ) ) return so...
Solves a let - form by calling RHS with nested scope .
2,683
def solve_filter ( expr , vars ) : lhs_values , _ = __solve_for_repeated ( expr . lhs , vars ) def lazy_filter ( ) : for lhs_value in repeated . getvalues ( lhs_values ) : if solve ( expr . rhs , __nest_scope ( expr . lhs , vars , lhs_value ) ) . value : yield lhs_value return Result ( repeated . lazy ( lazy_filter ) ,...
Filter values on the LHS by evaluating RHS with each value .
2,684
def solve_sort ( expr , vars ) : lhs_values = repeated . getvalues ( __solve_for_repeated ( expr . lhs , vars ) [ 0 ] ) sort_expression = expr . rhs def _key_func ( x ) : return solve ( sort_expression , __nest_scope ( expr . lhs , vars , x ) ) . value results = ordered . ordered ( lhs_values , key_func = _key_func ) r...
Sort values on the LHS by the value they yield when passed to RHS .
2,685
def solve_each ( expr , vars ) : lhs_values , _ = __solve_for_repeated ( expr . lhs , vars ) for lhs_value in repeated . getvalues ( lhs_values ) : result = solve ( expr . rhs , __nest_scope ( expr . lhs , vars , lhs_value ) ) if not result . value : return result . _replace ( value = False ) return Result ( True , ( )...
Return True if RHS evaluates to a true value with each state of LHS .
2,686
def solve_cast ( expr , vars ) : lhs = solve ( expr . lhs , vars ) . value t = solve ( expr . rhs , vars ) . value if t is None : raise errors . EfilterTypeError ( root = expr , query = expr . source , message = "Cannot find type named %r." % expr . rhs . value ) if not isinstance ( t , type ) : raise errors . EfilterT...
Get cast LHS to RHS .
2,687
def solve_isinstance ( expr , vars ) : lhs = solve ( expr . lhs , vars ) try : t = solve ( expr . rhs , vars ) . value except errors . EfilterKeyError : t = None if t is None : raise errors . EfilterTypeError ( root = expr . rhs , query = expr . source , message = "Cannot find type named %r." % expr . rhs . value ) if ...
Typecheck whether LHS is type on the RHS .
2,688
def set_version ( mod_root ) : try : version_base = None version_detail = None src_root = os . path . dirname ( __file__ ) if not src_root : src_root = '.' with open ( src_root + '/VERSION' , 'r' ) as f : version_base = f . readline ( ) . strip ( ) p = sp . Popen ( 'cd %s ; ' 'test -z `git rev-parse --show-prefix` || e...
mod_root a VERSION file containes the version strings is created in mod_root during installation . That file is used at runtime to get the version information .
2,689
def isgood ( name ) : if not isbad ( name ) : if name . endswith ( '.py' ) or name . endswith ( '.json' ) or name . endswith ( '.tar' ) : return True return False
Whether name should be installed
2,690
def meld ( * values ) : values = [ x for x in values if x is not None ] if not values : return None result = repeated ( * values ) if isrepeating ( result ) : return result return getvalue ( result )
Return the repeated value or the first value if there s only one .
2,691
def getvalue ( x ) : if isrepeating ( x ) : raise TypeError ( "Ambiguous call to getvalue for %r which has more than one value." % x ) for value in getvalues ( x ) : return value
Return the single value of x or raise TypError if more than one value .
2,692
def to_dict ( self ) : task_desc_as_dict = { 'uid' : self . _uid , 'name' : self . _name , 'state' : self . _state , 'state_history' : self . _state_history , 'pre_exec' : self . _pre_exec , 'executable' : self . _executable , 'arguments' : self . _arguments , 'post_exec' : self . _post_exec , 'cpu_reqs' : self . _cpu_...
Convert current Task into a dictionary
2,693
def keyword ( tokens , expected ) : try : token = next ( iter ( tokens ) ) except StopIteration : return if token and token . name == "symbol" and token . value . lower ( ) == expected : return TokenMatch ( None , token . value , ( token , ) )
Case - insensitive keyword match .
2,694
def multi_keyword ( tokens , keyword_parts ) : tokens = iter ( tokens ) matched_tokens = [ ] limit = len ( keyword_parts ) for idx in six . moves . range ( limit ) : try : token = next ( tokens ) except StopIteration : return if ( not token or token . name != "symbol" or token . value . lower ( ) != keyword_parts [ idx...
Match a case - insensitive keyword consisting of multiple tokens .
2,695
def prefix ( tokens , operator_table ) : operator , matched_tokens = operator_table . prefix . match ( tokens ) if operator : return TokenMatch ( operator , None , matched_tokens )
Match a prefix of an operator .
2,696
def infix ( tokens , operator_table ) : operator , matched_tokens = operator_table . infix . match ( tokens ) if operator : return TokenMatch ( operator , None , matched_tokens )
Match an infix of an operator .
2,697
def suffix ( tokens , operator_table ) : operator , matched_tokens = operator_table . suffix . match ( tokens ) if operator : return TokenMatch ( operator , None , matched_tokens )
Match a suffix of an operator .
2,698
def match_tokens ( expected_tokens ) : if isinstance ( expected_tokens , Token ) : def _grammar_func ( tokens ) : try : next_token = next ( iter ( tokens ) ) except StopIteration : return if next_token == expected_tokens : return TokenMatch ( None , next_token . value , ( next_token , ) ) elif isinstance ( expected_tok...
Generate a grammar function that will match expected_tokens only .
2,699
def expression ( self , previous_precedence = 0 ) : lhs = self . atom ( ) return self . operator ( lhs , previous_precedence )
An expression is an atom or an infix expression .