idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
1,500 | def pause ( self , unique_id , configs = None ) : pids = self . get_pid ( unique_id , configs ) if pids != constants . PROCESS_NOT_RUNNING_PID : pid_str = ' ' . join ( str ( pid ) for pid in pids ) hostname = self . processes [ unique_id ] . hostname with get_ssh_client ( hostname , username = runtime . get_username ( ... | Issues a sigstop for the specified process |
1,501 | def _send_signal ( self , unique_id , signalno , configs ) : pids = self . get_pid ( unique_id , configs ) if pids != constants . PROCESS_NOT_RUNNING_PID : pid_str = ' ' . join ( str ( pid ) for pid in pids ) hostname = self . processes [ unique_id ] . hostname msg = Deployer . _signalnames . get ( signalno , "SENDING ... | Issues a signal for the specified process |
1,502 | def resume ( self , unique_id , configs = None ) : self . _send_signal ( unique_id , signal . SIGCONT , configs ) | Issues a sigcont for the specified process |
1,503 | def terminate ( self , unique_id , configs = None ) : self . _send_signal ( unique_id , signal . SIGTERM , configs ) | Issues a kill - 15 to the specified process |
1,504 | def hangup ( self , unique_id , configs = None ) : self . _send_signal ( unique_id , signal . SIGHUP , configs ) | Issue a signal to hangup the specified process |
1,505 | def get_logs ( self , unique_id , logs , directory , pattern = constants . FILTER_NAME_ALLOW_NONE ) : self . fetch_logs ( unique_id , logs , directory , pattern ) | deprecated name for fetch_logs |
1,506 | def fetch_logs ( self , unique_id , logs , directory , pattern = constants . FILTER_NAME_ALLOW_NONE ) : hostname = self . processes [ unique_id ] . hostname install_path = self . processes [ unique_id ] . install_path self . fetch_logs_from_host ( hostname , install_path , unique_id , logs , directory , pattern ) | Copies logs from the remote host that the process is running on to the provided directory |
1,507 | def fetch_logs_from_host ( hostname , install_path , prefix , logs , directory , pattern ) : if hostname is not None : with get_sftp_client ( hostname , username = runtime . get_username ( ) , password = runtime . get_password ( ) ) as ftp : for f in logs : try : mode = ftp . stat ( f ) . st_mode except IOError , e : i... | Static method Copies logs from specified host on the specified install path |
1,508 | def get_pid ( self , unique_id , configs = None ) : RECV_BLOCK_SIZE = 16 if configs is None : configs = { } tmp = self . default_configs . copy ( ) tmp . update ( configs ) configs = tmp if unique_id in self . processes : hostname = self . processes [ unique_id ] . hostname else : return constants . PROCESS_NOT_RUNNING... | Gets the pid of the process with unique_id . If the deployer does not know of a process with unique_id then it should return a value of constants . PROCESS_NOT_RUNNING_PID |
1,509 | def get_host ( self , unique_id ) : if unique_id in self . processes : return self . processes [ unique_id ] . hostname logger . error ( "{0} not a known process" . format ( unique_id ) ) raise NameError ( "{0} not a known process" . format ( unique_id ) ) | Gets the host of the process with unique_id . If the deployer does not know of a process with unique_id then it should return a value of SOME_SENTINAL_VALUE |
1,510 | def kill_all_process ( self ) : if ( runtime . get_active_config ( "cleanup_pending_process" , False ) ) : for process in self . get_processes ( ) : self . terminate ( process . unique_id ) | Terminates all the running processes . By default it is set to false . Users can set to true in config once the method to get_pid is done deterministically either using pid_file or an accurate keyword |
1,511 | def string_to_level ( log_level ) : if ( log_level . strip ( ) . upper ( ) == "DEBUG" ) : return logging . DEBUG if ( log_level . strip ( ) . upper ( ) == "INFO" ) : return logging . INFO if ( log_level . strip ( ) . upper ( ) == "WARNING" ) : return logging . WARNING if ( log_level . strip ( ) . upper ( ) == "ERROR" )... | Converts a string to the corresponding log level |
1,512 | def execute ( self , conn , dataset , dataset_access_type , transaction = False ) : if not conn : dbsExceptionHandler ( "dbsException-failed-connect2host" , "Oracle/Dataset/UpdateType. Expects db connection from upper layer." , self . logger . exception ) binds = { "dataset" : dataset , "dataset_access_type" : dataset... | for a given file |
1,513 | def validateStringInput ( input_key , input_data , read = False ) : log = clog . error_log func = None if '*' in input_data or '%' in input_data : func = validationFunctionWildcard . get ( input_key ) if func is None : func = searchstr elif input_key == 'migration_input' : if input_data . find ( '#' ) != - 1 : func = b... | To check if a string has the required format . This is only used for POST APIs . |
1,514 | def jsonstreamer ( func ) : def wrapper ( self , * args , ** kwds ) : gen = func ( self , * args , ** kwds ) yield "[" firstItem = True for item in gen : if not firstItem : yield "," else : firstItem = False yield cjson . encode ( item ) yield "]" return wrapper | JSON streamer decorator |
1,515 | def listDatasetAccessTypes ( self , dataset_access_type = "" ) : if isinstance ( dataset_access_type , basestring ) : try : dataset_access_type = str ( dataset_access_type ) except : dbsExceptionHandler ( 'dbsException-invalid-input' , 'dataset_access_type given is not valid : %s' % dataset_access_type ) else : dbsExce... | List dataset access types |
1,516 | def block_before ( self ) : if request . path . startswith ( url_for ( 'static' , filename = '' ) ) : return ignored_extensions = ( 'ico' , 'png' , 'txt' , 'xml' ) if request . path . rsplit ( '.' , 1 ) [ - 1 ] in ignored_extensions : return ips = request . headers . getlist ( 'X-Forwarded-For' ) if not ips : return ip... | Check the current request and block it if the IP address it s coming from is blacklisted . |
1,517 | def matches_ip ( self , ip ) : if self . cache is not None : matches_ip = self . cache . get ( ip ) if matches_ip is not None : return matches_ip matches_ip = IPNetwork . matches_ip ( ip , read_preference = self . read_preference ) if self . cache is not None : self . cache [ ip ] = matches_ip return matches_ip | Return True if the given IP is blacklisted False otherwise . |
1,518 | def processDatasetBlocks ( self , url , conn , inputdataset , order_counter ) : ordered_dict = { } srcblks = self . getSrcBlocks ( url , dataset = inputdataset ) if len ( srcblks ) < 0 : e = "DBSMigration: No blocks in the required dataset %s found at source %s." % ( inputdataset , url ) dbsExceptionHandler ( 'dbsExcep... | Utility function that comapares blocks of a dataset at source and dst and returns an ordered list of blocks not already at dst for migration |
1,519 | def removeMigrationRequest ( self , migration_rqst ) : conn = self . dbi . connection ( ) try : tran = conn . begin ( ) self . mgrremove . execute ( conn , migration_rqst ) tran . commit ( ) except dbsException as he : if conn : conn . close ( ) raise except Exception as ex : if conn : conn . close ( ) raise if conn : ... | Method to remove pending or failed migration request from the queue . |
1,520 | def listMigrationBlocks ( self , migration_request_id = "" ) : conn = self . dbi . connection ( ) try : return self . mgrblklist . execute ( conn , migration_request_id = migration_request_id ) finally : if conn : conn . close ( ) | get eveything of block that is has status = 0 and migration_request_id as specified . |
1,521 | def getSrcBlocks ( self , url , dataset = "" , block = "" ) : if block : params = { 'block_name' : block , 'open_for_writing' : 0 } elif dataset : params = { 'dataset' : dataset , 'open_for_writing' : 0 } else : m = 'DBSMigration: Invalid input. Either block or dataset name has to be provided' e = 'DBSMigrate/getSrcBl... | Need to list all blocks of the dataset and its parents starting from the top For now just list the blocks from this dataset . Client type call ... |
1,522 | def executeSingle ( self , conn , daoinput , tablename , transaction = False ) : sql1 = " insert into %s%s( " % ( self . owner , tablename ) sql2 = " values(" "Now loop over all the input keys. We need to check if all the keys are valid !!!" for key in daoinput : sql1 += "%s," % key . upper ( ) sql2 += ":%s," % key . l... | build dynamic sql based on daoinput |
1,523 | def parse_requirements ( requirements_file ) : if os . path . exists ( requirements_file ) : return open ( requirements_file , 'r' ) . read ( ) . splitlines ( ) else : print ( "ERROR: requirements file " + requirements_file + " not found." ) sys . exit ( 1 ) | Create a list for the install_requires component of the setup function by parsing a requirements file |
1,524 | def execute ( self , conn , dsType = "" , dataset = "" , transaction = False ) : sql = self . sql binds = { } if not dsType and not dataset : pass elif dsType and dataset in ( "" , None , '%' ) : op = ( "=" , "like" ) [ "%" in dsType ] sql += "WHERE PDT.PRIMARY_DS_TYPE %s :primdstype" % op binds = { "primdstype" : dsTy... | Lists all primary dataset types if no user input is provided . |
1,525 | def listReleaseVersions ( self , release_version = "" , dataset = '' , logical_file_name = '' ) : if dataset and ( '%' in dataset or '*' in dataset ) : dbsExceptionHandler ( 'dbsException-invalid-input' , " DBSReleaseVersion/listReleaseVersions. No wildcards are" + " allowed in dataset.\n." ) if logical_file_name and (... | List release versions |
1,526 | def __search_ca_path ( self ) : if "X509_CERT_DIR" in os . environ : self . _ca_path = os . environ [ 'X509_CERT_DIR' ] elif os . path . exists ( '/etc/grid-security/certificates' ) : self . _ca_path = '/etc/grid-security/certificates' else : raise ClientAuthException ( "Could not find a valid CA path" ) | Get CA Path to check the validity of the server host certificate on the client side |
1,527 | def authInsert ( user , role , group , site ) : if not role : return True for k , v in user [ 'roles' ] . iteritems ( ) : for g in v [ 'group' ] : if k in role . get ( g , '' ) . split ( ':' ) : return True return False | Authorization function for general insert |
1,528 | def listDatasetParents ( self , dataset = "" ) : if ( dataset == "" ) : dbsExceptionHandler ( "dbsException-invalid-input" , "DBSDataset/listDatasetParents. Child Dataset name is required." ) conn = self . dbi . connection ( ) try : result = self . datasetparentlist . execute ( conn , dataset ) return result finally : ... | takes required dataset parameter returns only parent dataset name |
1,529 | def listDatasetChildren ( self , dataset ) : if ( dataset == "" ) : dbsExceptionHandler ( "dbsException-invalid-input" , "DBSDataset/listDatasetChildren. Parent Dataset name is required." ) conn = self . dbi . connection ( ) try : result = self . datasetchildlist . execute ( conn , dataset ) return result finally : if ... | takes required dataset parameter returns only children dataset name |
1,530 | def listDatasets ( self , dataset = "" , parent_dataset = "" , is_dataset_valid = 1 , release_version = "" , pset_hash = "" , app_name = "" , output_module_label = "" , global_tag = "" , processing_version = 0 , acquisition_era = "" , run_num = - 1 , physics_group_name = "" , logical_file_name = "" , primary_ds_name = ... | lists all datasets if dataset parameter is not given . The parameter can include % character . all other parameters are not wild card ones . |
1,531 | def execute ( self , conn , block_name , origin_site_name , transaction = False ) : if not conn : dbsExceptionHandler ( "dbsException-failed-connect2host" , "Oracle/Block/UpdateStatus. \Expects db connection from upper layer." , self . logger . exception ) binds = { "block_name" : block_name , "origin_site_name" : orig... | Update origin_site_name for a given block_name |
1,532 | def increment ( self , conn , seqName , transaction = False , incCount = 1 ) : try : seqTable = "%sS" % seqName tlock = "lock tables %s write" % seqTable self . dbi . processData ( tlock , [ ] , conn , transaction ) sql = "select ID from %s" % seqTable result = self . dbi . processData ( sql , [ ] , conn , transaction ... | increments the sequence seqName by default Incremented by one and returns its value |
1,533 | def listRuns ( self , run_num = - 1 , logical_file_name = "" , block_name = "" , dataset = "" ) : if ( '%' in logical_file_name or '%' in block_name or '%' in dataset ) : dbsExceptionHandler ( 'dbsException-invalid-input' , " DBSDatasetRun/listRuns. No wildcards are allowed in logical_file_name, block_name or dataset.\... | List run known to DBS . |
1,534 | def insertPrimaryDataset ( self ) : try : body = request . body . read ( ) indata = cjson . decode ( body ) indata = validateJSONInputNoCopy ( "primds" , indata ) indata . update ( { "creation_date" : dbsUtils ( ) . getTime ( ) , "create_by" : dbsUtils ( ) . getCreateBy ( ) } ) self . dbsPrimaryDataset . insertPrimaryD... | API to insert A primary dataset in DBS |
1,535 | def insertBlock ( self ) : try : body = request . body . read ( ) indata = cjson . decode ( body ) indata = validateJSONInputNoCopy ( "block" , indata ) self . dbsBlock . insertBlock ( indata ) except cjson . DecodeError as dc : dbsExceptionHandler ( "dbsException-invalid-input2" , "Wrong format/data from insert Block ... | API to insert a block into DBS |
1,536 | def updateFile ( self , logical_file_name = [ ] , is_file_valid = 1 , lost = 0 , dataset = '' ) : if lost in [ 1 , True , 'True' , 'true' , '1' , 'y' , 'yes' ] : lost = 1 if is_file_valid in [ 1 , True , 'True' , 'true' , '1' , 'y' , 'yes' ] : dbsExceptionHandler ( "dbsException-invalid-input2" , dbsExceptionCode [ "db... | API to update file status |
1,537 | def qs_for_ip ( cls , ip_str ) : ip = int ( netaddr . IPAddress ( ip_str ) ) if ip > 4294967295 : return cls . objects . none ( ) ip_range_query = { 'start__lte' : ip , 'stop__gte' : ip } return cls . objects . filter ( ** ip_range_query ) | Returns a queryset with matching IPNetwork objects for the given IP . |
1,538 | def matches_ip ( cls , ip_str , read_preference = None ) : qs = cls . qs_for_ip ( ip_str ) . only ( 'whitelist' ) if read_preference : qs = qs . read_preference ( read_preference ) return bool ( qs ) and not any ( obj . whitelist for obj in qs ) | Return True if provided IP exists in the blacklist and doesn t exist in the whitelist . Otherwise return False . |
1,539 | def dbsExceptionHandler ( eCode = '' , message = '' , logger = None , serverError = '' ) : if logger : if eCode == "dbsException-invalid-input" : raise HTTPError ( 400 , message ) elif eCode == "dbsException-missing-data" : logger ( time . asctime ( time . gmtime ( ) ) + " " + eCode + ": " + serverError ) raise HTTPErr... | This utility function handles all dbs exceptions . It will log raise exception based on input condition . It loggs the traceback on the server log . Send HTTPError 400 for invalid client input and HTTPError 404 for NOT FOUND required pre - existing condition . |
1,540 | def configure_proxy ( self , curl_object ) : curl_object . setopt ( curl_object . PROXY , self . _proxy_hostname ) curl_object . setopt ( curl_object . PROXYPORT , self . _proxy_port ) curl_object . setopt ( curl_object . PROXYTYPE , curl_object . PROXYTYPE_SOCKS5 ) if self . _proxy_user and self . _proxy_passwd : curl... | configure pycurl proxy settings |
1,541 | def execute ( self , conn , acquisition_era_name , end_date , transaction = False ) : if not conn : dbsExceptionHandler ( "dbsException-failed-connect2host" , "dbs/dao/Oracle/AcquisitionEra/updateEndDate expects db connection from upper layer." , self . logger . exception ) binds = { "acquisition_era_name" : acquisitio... | for a given block_id |
1,542 | def updateStatus ( self , block_name = "" , open_for_writing = 0 ) : if open_for_writing not in [ 1 , 0 , '1' , '0' ] : msg = "DBSBlock/updateStatus. open_for_writing can only be 0 or 1 : passed %s." % open_for_writing dbsExceptionHandler ( 'dbsException-invalid-input' , msg ) conn = self . dbi . connection ( ) trans =... | Used to toggle the status of a block open_for_writing = 1 open for writing open_for_writing = 0 closed |
1,543 | def updateSiteName ( self , block_name , origin_site_name ) : if not origin_site_name : dbsExceptionHandler ( 'dbsException-invalid-input' , "DBSBlock/updateSiteName. origin_site_name is mandatory." ) conn = self . dbi . connection ( ) trans = conn . begin ( ) try : self . updatesitename . execute ( conn , block_name ,... | Update the origin_site_name for a given block name |
1,544 | def listBlocks ( self , dataset = "" , block_name = "" , data_tier_name = "" , origin_site_name = "" , logical_file_name = "" , run_num = - 1 , min_cdate = 0 , max_cdate = 0 , min_ldate = 0 , max_ldate = 0 , cdate = 0 , ldate = 0 , open_for_writing = - 1 , detail = False ) : if ( not dataset ) or re . search ( "['%','*... | dataset block_name data_tier_name or logical_file_name must be passed . |
1,545 | def execute ( self , conn , site_name = "" , transaction = False ) : sql = self . sql if site_name == "" : result = self . dbi . processData ( sql , conn = conn , transaction = transaction ) else : sql += "WHERE S.SITE_NAME = :site_name" binds = { "site_name" : site_name } result = self . dbi . processData ( sql , bind... | Lists all sites types if site_name is not provided . |
1,546 | def getBlocks ( self ) : try : conn = self . dbi . connection ( ) result = self . buflistblks . execute ( conn ) return result finally : if conn : conn . close ( ) | Get the blocks that need to be migrated |
1,547 | def getBufferedFiles ( self , block_id ) : try : conn = self . dbi . connection ( ) result = self . buflist . execute ( conn , block_id ) return result finally : if conn : conn . close ( ) | Get some files from the insert buffer |
1,548 | def execute ( self , conn , data_tier_name = '' , transaction = False , cache = None ) : if cache : ret = cache . get ( "DATA_TIERS" ) if not ret == None : return ret sql = self . sql binds = { } if data_tier_name : op = ( '=' , 'like' ) [ '%' in data_tier_name ] sql += "WHERE DT.DATA_TIER_NAME %s :datatier" % op binds... | returns id for a given datatier name |
1,549 | def execute ( self , conn , migration_url = "" , migration_input = "" , create_by = "" , migration_request_id = "" , transaction = False ) : binds = { } result = self . dbi . processData ( self . sql , binds , conn , transaction ) result = self . formatDict ( result ) if len ( result ) == 0 : return [ ] if result [ 0 ]... | Lists the oldest request queued |
1,550 | def listProcessingEras ( self , processing_version = '' ) : conn = self . dbi . connection ( ) try : result = self . pelst . execute ( conn , processing_version ) return result finally : if conn : conn . close ( ) | Returns all processing eras in dbs |
1,551 | def listPhysicsGroups ( self , physics_group_name = "" ) : if not isinstance ( physics_group_name , basestring ) : dbsExceptionHandler ( 'dbsException-invalid-input' , 'physics group name given is not valid : %s' % physics_group_name ) else : try : physics_group_name = str ( physics_group_name ) except : dbsExceptionHa... | Returns all physics groups if physics group names are not passed . |
1,552 | def getServices ( self ) : try : conn = self . dbi . connection ( ) result = self . serviceslist . execute ( conn ) return result except Exception as ex : msg = ( ( "%s DBSServicesRegistry/getServices." + " %s\n. Exception trace: \n %s" ) % ( DBSEXCEPTIONS [ 'dbsException-3' ] , ex , traceback . format_exc ( ) ) ) self... | Simple method that returs list of all know DBS instances instances known to this registry |
1,553 | def addService ( self ) : conn = self . dbi . connection ( ) tran = conn . begin ( ) try : body = request . body . read ( ) service = cjson . decode ( body ) addthis = { } addthis [ 'service_id' ] = self . sm . increment ( conn , "SEQ_RS" , tran ) addthis [ 'name' ] = service . get ( 'NAME' , '' ) if addthis [ 'name' ]... | Add a service to service registry |
1,554 | def execute ( self , conn , migration_url = "" , migration_input = "" , create_by = "" , migration_request_id = "" , oldest = False , transaction = False ) : sql = self . sql binds = { } if migration_request_id : sql += " WHERE MR.MIGRATION_REQUEST_ID=:migration_request_id" binds [ 'migration_request_id' ] = migration_... | Lists all requests if pattern is not provided . |
1,555 | def listPrimaryDatasets ( self , primary_ds_name = "" , primary_ds_type = "" ) : conn = self . dbi . connection ( ) try : result = self . primdslist . execute ( conn , primary_ds_name , primary_ds_type ) if conn : conn . close ( ) return result finally : if conn : conn . close ( ) | Returns all primary dataset if primary_ds_name or primary_ds_type are not passed . |
1,556 | def listPrimaryDSTypes ( self , primary_ds_type = "" , dataset = "" ) : conn = self . dbi . connection ( ) try : result = self . primdstypeList . execute ( conn , primary_ds_type , dataset ) if conn : conn . close ( ) return result finally : if conn : conn . close ( ) | Returns all primary dataset types if dataset or primary_ds_type are not passed . |
1,557 | def execute ( self , conn , name = '' , transaction = False ) : binds = { } if name : op = ( '=' , 'like' ) [ '%' in name ] sql = self . sql + " WHERE pg.physics_group_name %s :physicsgroup" % ( op ) binds = { "physicsgroup" : name } else : sql = self . sql self . logger . debug ( sql ) result = self . dbi . processDa... | returns id for a given physics group name |
1,558 | def getHelp ( self , call = "" ) : if call : params = self . methods [ 'GET' ] [ call ] [ 'args' ] doc = self . methods [ 'GET' ] [ call ] [ 'call' ] . __doc__ return dict ( params = params , doc = doc ) else : return self . methods [ 'GET' ] . keys ( ) | API to get a list of supported REST APIs . In the case a particular API is specified the docstring of that API is displayed . |
1,559 | def listPrimaryDatasets ( self , primary_ds_name = "" , primary_ds_type = "" ) : primary_ds_name = primary_ds_name . replace ( "*" , "%" ) primary_ds_type = primary_ds_type . replace ( "*" , "%" ) try : return self . dbsPrimaryDataset . listPrimaryDatasets ( primary_ds_name , primary_ds_type ) except dbsException as de... | API to list primary datasets |
1,560 | def listDatasetArray ( self ) : ret = [ ] try : body = request . body . read ( ) if body : data = cjson . decode ( body ) data = validateJSONInputNoCopy ( "dataset" , data , read = True ) max_array_size = 1000 if ( 'dataset' in data . keys ( ) and isinstance ( data [ 'dataset' ] , list ) and len ( data [ 'dataset' ] ) ... | API to list datasets in DBS . To be called by datasetlist url with post call . |
1,561 | def listDataTiers ( self , data_tier_name = "" ) : data_tier_name = data_tier_name . replace ( "*" , "%" ) try : conn = self . dbi . connection ( ) return self . dbsDataTierListDAO . execute ( conn , data_tier_name . upper ( ) ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger... | API to list data tiers known to DBS . |
1,562 | def listBlockOrigin ( self , origin_site_name = "" , dataset = "" , block_name = "" ) : try : return self . dbsBlock . listBlocksOrigin ( origin_site_name , dataset , block_name ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exceptio... | API to list blocks first generated in origin_site_name . |
1,563 | def listBlocksParents ( self ) : try : body = request . body . read ( ) data = cjson . decode ( body ) data = validateJSONInputNoCopy ( "block" , data , read = True ) max_array_size = 1000 if ( 'block_names' in data . keys ( ) and isinstance ( data [ 'block_names' ] , list ) and len ( data [ 'block_names' ] ) > max_arr... | API to list block parents of multiple blocks . To be called by blockparents url with post call . |
1,564 | def listBlockChildren ( self , block_name = "" ) : block_name = block_name . replace ( "*" , "%" ) try : return self . dbsBlock . listBlockChildren ( block_name ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError ... | API to list block children . |
1,565 | def listBlockSummaries ( self , block_name = "" , dataset = "" , detail = False ) : if bool ( dataset ) + bool ( block_name ) != 1 : dbsExceptionHandler ( "dbsException-invalid-input2" , dbsExceptionCode [ "dbsException-invalid-input2" ] , self . logger . exception , "Dataset or block_names must be specified at a time.... | API that returns summary information like total size and total number of events in a dataset or a list of blocks |
1,566 | def listDatasetParents ( self , dataset = '' ) : try : return self . dbsDataset . listDatasetParents ( dataset ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listDatasetParents. %s\n. Except... | API to list A datasets parents in DBS . |
1,567 | def listOutputConfigs ( self , dataset = "" , logical_file_name = "" , release_version = "" , pset_hash = "" , app_name = "" , output_module_label = "" , block_id = 0 , global_tag = '' ) : release_version = release_version . replace ( "*" , "%" ) pset_hash = pset_hash . replace ( "*" , "%" ) app_name = app_name . repla... | API to list OutputConfigs in DBS . |
1,568 | def listFileParents ( self , logical_file_name = '' , block_id = 0 , block_name = '' ) : try : r = self . dbsFile . listFileParents ( logical_file_name , block_id , block_name ) for item in r : yield item except HTTPError as he : raise he except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , sel... | API to list file parents |
1,569 | def listFileChildren ( self , logical_file_name = '' , block_name = '' , block_id = 0 ) : if isinstance ( logical_file_name , list ) : for f in logical_file_name : if '*' in f or '%' in f : dbsExceptionHandler ( "dbsException-invalid-input2" , dbsExceptionCode [ "dbsException-invalid-input2" ] , self . logger . excepti... | API to list file children . One of the parameters in mandatory . |
1,570 | def listFileLumis ( self , logical_file_name = "" , block_name = "" , run_num = - 1 , validFileOnly = 0 ) : if ( run_num != - 1 and logical_file_name == '' ) : for r in parseRunRange ( run_num ) : if isinstance ( r , basestring ) or isinstance ( r , int ) or isinstance ( r , long ) : if r == 1 or r == '1' : dbsExceptio... | API to list Lumi for files . Either logical_file_name or block_name is required . No wild card support in this API |
1,571 | def listRuns ( self , run_num = - 1 , logical_file_name = "" , block_name = "" , dataset = "" ) : if ( run_num != - 1 and logical_file_name == '' ) : for r in parseRunRange ( run_num ) : if isinstance ( r , basestring ) or isinstance ( r , int ) or isinstance ( r , long ) : if r == 1 or r == '1' : dbsExceptionHandler (... | API to list all runs in DBS . At least one parameter is mandatory . |
1,572 | def dumpBlock ( self , block_name ) : try : return self . dbsBlock . dumpBlock ( block_name ) except HTTPError as he : raise he except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/dumpBlock. %s\n. ... | API the list all information related with the block_name |
1,573 | def listAcquisitionEras ( self , acquisition_era_name = '' ) : try : acquisition_era_name = acquisition_era_name . replace ( '*' , '%' ) return self . dbsAcqEra . listAcquisitionEras ( acquisition_era_name ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . s... | API to list all Acquisition Eras in DBS . |
1,574 | def listProcessingEras ( self , processing_version = 0 ) : try : return self . dbsProcEra . listProcessingEras ( processing_version ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . exception , de . serverError ) except Exception as ex : sError = "DBSReaderModel/listProcess... | API to list all Processing Eras in DBS . |
1,575 | def listReleaseVersions ( self , release_version = '' , dataset = '' , logical_file_name = '' ) : if release_version : release_version = release_version . replace ( "*" , "%" ) try : return self . dbsReleaseVersion . listReleaseVersions ( release_version , dataset , logical_file_name ) except dbsException as de : dbsEx... | API to list all release versions in DBS |
1,576 | def listDatasetAccessTypes ( self , dataset_access_type = '' ) : if dataset_access_type : dataset_access_type = dataset_access_type . replace ( "*" , "%" ) try : return self . dbsDatasetAccessType . listDatasetAccessTypes ( dataset_access_type ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . messag... | API to list dataset access types . |
1,577 | def listPhysicsGroups ( self , physics_group_name = '' ) : if physics_group_name : physics_group_name = physics_group_name . replace ( '*' , '%' ) try : return self . dbsPhysicsGroup . listPhysicsGroups ( physics_group_name ) except dbsException as de : dbsExceptionHandler ( de . eCode , de . message , self . logger . ... | API to list all physics groups . |
1,578 | def listRunSummaries ( self , dataset = "" , run_num = - 1 ) : if run_num == - 1 : dbsExceptionHandler ( "dbsException-invalid-input" , "The run_num parameter is mandatory" , self . logger . exception ) if re . search ( '[*,%]' , dataset ) : dbsExceptionHandler ( "dbsException-invalid-input" , "No wildcards are allowed... | API to list run summaries like the maximal lumisection in a run . |
1,579 | def list ( ) : entries = lambder . list_events ( ) for e in entries : click . echo ( str ( e ) ) | List all events |
1,580 | def add ( name , function_name , cron ) : lambder . add_event ( name = name , function_name = function_name , cron = cron ) | Create an event |
1,581 | def load ( file ) : with open ( file , 'r' ) as f : contents = f . read ( ) lambder . load_events ( contents ) | Load events from a json file |
1,582 | def functions ( context ) : config_file = "./lambder.json" if os . path . isfile ( config_file ) : context . obj = FunctionConfig ( config_file ) pass | Manage AWS Lambda functions |
1,583 | def list ( ) : functions = lambder . list_functions ( ) output = json . dumps ( functions , sort_keys = True , indent = 4 , separators = ( ',' , ':' ) ) click . echo ( output ) | List lambder functions |
1,584 | def new ( name , bucket , timeout , memory , description , subnet_ids , security_group_ids ) : config = { } if timeout : config [ 'timeout' ] = timeout if memory : config [ 'memory' ] = memory if description : config [ 'description' ] = description if subnet_ids : config [ 'subnet_ids' ] = subnet_ids if security_group_... | Create a new lambda project |
1,585 | def rm ( config , name , bucket ) : myname = name or config . name mybucket = bucket or config . bucket click . echo ( 'Deleting {} from {}' . format ( myname , mybucket ) ) lambder . delete_function ( myname , mybucket ) | Delete lambda function role and zipfile |
1,586 | def invoke ( config , name , input ) : myname = name or config . name click . echo ( 'Invoking ' + myname ) output = lambder . invoke_function ( myname , input ) click . echo ( output ) | Invoke function in AWS |
1,587 | def putBlock ( self , blockcontent , migration = False ) : try : self . logger . debug ( "insert configuration" ) configList = self . insertOutputModuleConfig ( blockcontent [ 'dataset_conf_list' ] , migration ) self . logger . debug ( "insert dataset" ) datasetId = self . insertDataset ( blockcontent , configList , mi... | Insert the data in sereral steps and commit when each step finishes or rollback if there is a problem . |
1,588 | def listSites ( self , block_name = "" , site_name = "" ) : try : conn = self . dbi . connection ( ) if block_name : result = self . blksitelist . execute ( conn , block_name ) else : result = self . sitelist . execute ( conn , site_name ) return result finally : if conn : conn . close ( ) | Returns sites . |
1,589 | def checkInputParameter ( method , parameters , validParameters , requiredParameters = None ) : for parameter in parameters : if parameter not in validParameters : raise dbsClientException ( "Invalid input" , "API %s does not support parameter %s. Supported parameters are %s" % ( method , parameter , validParameters ) ... | Helper function to check input by using before sending to the server |
1,590 | def split_calls ( func ) : def wrapper ( * args , ** kwargs ) : size_limit = 8000 encoded_url = urllib . urlencode ( kwargs ) if len ( encoded_url ) > size_limit : for key , value in kwargs . iteritems ( ) : if key in ( 'logical_file_name' , 'block_name' , 'lumi_list' , 'run_num' ) and isinstance ( value , list ) : ret... | Decorator to split up server calls for methods using url parameters due to the lenght limitation of the URI in Apache . By default 8190 bytes |
1,591 | def __callServer ( self , method = "" , params = { } , data = { } , callmethod = 'GET' , content = 'application/json' ) : UserID = os . environ [ 'USER' ] + '@' + socket . gethostname ( ) try : UserAgent = "DBSClient/" + os . environ [ 'DBS3_CLIENT_VERSION' ] + "/" + self . userAgent except : UserAgent = "DBSClient/Unk... | A private method to make HTTP call to the DBS Server |
1,592 | def __parseForException ( self , http_error ) : data = http_error . body try : if isinstance ( data , str ) : data = cjson . decode ( data ) except : raise http_error if isinstance ( data , dict ) and 'exception' in data : raise HTTPError ( http_error . url , data [ 'exception' ] , data [ 'message' ] , http_error . hea... | An internal method should not be used by clients |
1,593 | def requestTimingInfo ( self ) : try : return tuple ( item . split ( '=' ) [ 1 ] for item in self . http_response . header . get ( 'CMS-Server-Time' ) . split ( ) ) except AttributeError : return None , None | Returns the time needed to process the request by the frontend server in microseconds and the EPOC timestamp of the request in microseconds . |
1,594 | def listFileParentsByLumi ( self , ** kwargs ) : validParameters = [ 'block_name' , 'logical_file_name' ] requiredParameters = { 'forced' : [ 'block_name' ] } checkInputParameter ( method = "listFileParentsByLumi" , parameters = kwargs . keys ( ) , validParameters = validParameters , requiredParameters = requiredParame... | API to list file parents using lumi section info . |
1,595 | def listBlockParents ( self , ** kwargs ) : validParameters = [ 'block_name' ] requiredParameters = { 'forced' : validParameters } checkInputParameter ( method = "listBlockParents" , parameters = kwargs . keys ( ) , validParameters = validParameters , requiredParameters = requiredParameters ) if isinstance ( kwargs [ "... | API to list block parents . |
1,596 | def listDatasetArray ( self , ** kwargs ) : validParameters = [ 'dataset' , 'dataset_access_type' , 'detail' , 'dataset_id' ] requiredParameters = { 'multiple' : [ 'dataset' , 'dataset_id' ] } checkInputParameter ( method = "listDatasetArray" , parameters = kwargs . keys ( ) , validParameters = validParameters , requir... | API to list datasets in DBS . |
1,597 | def find_devices ( ) : num_devices = api . py_aa_find_devices ( 0 , array . array ( 'H' ) ) _raise_error_if_negative ( num_devices ) if num_devices == 0 : return list ( ) ports = array . array ( 'H' , ( 0 , ) * num_devices ) unique_ids = array . array ( 'I' , ( 0 , ) * num_devices ) num_devices = api . py_aa_find_devic... | Return a list of dictionaries . Each dictionary represents one device . |
1,598 | def i2c_bitrate ( self ) : ret = api . py_aa_i2c_bitrate ( self . handle , 0 ) _raise_error_if_negative ( ret ) return ret | I2C bitrate in kHz . Not every bitrate is supported by the host adapter . Therefore the actual bitrate may be less than the value which is set . |
1,599 | def i2c_pullups ( self ) : ret = api . py_aa_i2c_pullup ( self . handle , I2C_PULLUP_QUERY ) _raise_error_if_negative ( ret ) return ret | Setting this to True will enable the I2C pullup resistors . If set to False the pullup resistors will be disabled . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.