idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
242,700
def _clear_config ( self ) : # type: () -> None for section in self . _config . sections ( ) : self . _config . remove_section ( section )
Clearout config object in memory .
38
7
242,701
def format_parameters ( self , params ) : # type: (Dict[str, str]) -> None query = urlparse ( self . url ) . query if query : self . url = self . url . partition ( '?' ) [ 0 ] existing_params = { p [ 0 ] : p [ - 1 ] for p in [ p . partition ( '=' ) for p in query . split ( '&' ) ] } params . update ( existing_params ) ...
Format parameters into a valid query string . It s assumed all parameters have already been quoted as valid URL strings .
152
22
242,702
def _format_data ( data ) : # type: (Union[str, IO]) -> Union[Tuple[None, str], Tuple[Optional[str], IO, str]] if hasattr ( data , 'read' ) : data = cast ( IO , data ) data_name = None try : if data . name [ 0 ] != '<' and data . name [ - 1 ] != '>' : data_name = os . path . basename ( data . name ) except ( AttributeE...
Format field data according to whether it is a stream or a string for a form - data request .
144
20
242,703
def add_formdata ( self , content = None ) : # type: (Optional[Dict[str, str]]) -> None if content is None : content = { } content_type = self . headers . pop ( 'Content-Type' , None ) if self . headers else None if content_type and content_type . lower ( ) == 'application/x-www-form-urlencoded' : # Do NOT use "add_con...
Add data as a multipart form - data request to the request .
173
14
242,704
def raise_with_traceback ( exception , message = "" , * args , * * kwargs ) : # type: (Callable, str, Any, Any) -> None exc_type , exc_value , exc_traceback = sys . exc_info ( ) # If not called inside a "except", exc_type will be None. Assume it will not happen exc_msg = "{}, {}: {}" . format ( message , exc_type . __n...
Raise exception with a specified traceback .
162
9
242,705
def _patch_redirect ( session ) : # type: (requests.Session) -> None def enforce_http_spec ( resp , request ) : if resp . status_code in ( 301 , 302 ) and request . method not in [ 'GET' , 'HEAD' ] : return False return True redirect_logic = session . resolve_redirects def wrapped_redirect ( resp , req , * * kwargs ) :...
Whether redirect policy should be applied based on status code .
157
11
242,706
def _init_session ( self , session ) : # type: (requests.Session) -> None _patch_redirect ( session ) # Change max_retries in current all installed adapters max_retries = self . config . retry_policy ( ) for protocol in self . _protocols : session . adapters [ protocol ] . max_retries = max_retries
Init session level configuration of requests .
82
7
242,707
def _configure_send ( self , request , * * kwargs ) : # type: (ClientRequest, Any) -> Dict[str, str] requests_kwargs = { } # type: Any session = kwargs . pop ( 'session' , self . session ) # If custom session was not create here if session is not self . session : self . _init_session ( session ) session . max_redirects...
Configure the kwargs to use with requests .
626
11
242,708
def full_restapi_key_transformer ( key , attr_desc , value ) : keys = _FLATTEN . split ( attr_desc [ 'key' ] ) return ( [ _decode_attribute_map_key ( k ) for k in keys ] , value )
A key transformer that returns the full RestAPI key path .
63
12
242,709
def last_restapi_key_transformer ( key , attr_desc , value ) : key , value = full_restapi_key_transformer ( key , attr_desc , value ) return ( key [ - 1 ] , value )
A key transformer that returns the last RestAPI key .
54
11
242,710
def _create_xml_node ( tag , prefix = None , ns = None ) : if prefix and ns : ET . register_namespace ( prefix , ns ) if ns : return ET . Element ( "{" + ns + "}" + tag ) else : return ET . Element ( tag )
Create a XML node .
62
5
242,711
def _create_xml_node ( cls ) : try : xml_map = cls . _xml_map except AttributeError : raise ValueError ( "This model has no XML definition" ) return _create_xml_node ( xml_map . get ( 'name' , cls . __name__ ) , xml_map . get ( "prefix" , None ) , xml_map . get ( "ns" , None ) )
Create XML node from _xml_map .
95
9
242,712
def validate ( self ) : validation_result = [ ] for attr_name , value in [ ( attr , getattr ( self , attr ) ) for attr in self . _attribute_map ] : attr_desc = self . _attribute_map [ attr_name ] if attr_name == "additional_properties" and attr_desc [ "key" ] == '' : # Do NOT validate additional_properties continue att...
Validate this model recursively and return a list of ValidationError .
214
16
242,713
def serialize ( self , keep_readonly = False ) : serializer = Serializer ( self . _infer_class_models ( ) ) return serializer . _serialize ( self , keep_readonly = keep_readonly )
Return the JSON that would be sent to azure from this model .
52
14
242,714
def as_dict ( self , keep_readonly = True , key_transformer = attribute_transformer ) : serializer = Serializer ( self . _infer_class_models ( ) ) return serializer . _serialize ( self , key_transformer = key_transformer , keep_readonly = keep_readonly )
Return a dict that can be JSONify using json . dump .
73
13
242,715
def deserialize ( cls , data , content_type = None ) : deserializer = Deserializer ( cls . _infer_class_models ( ) ) return deserializer ( cls . __name__ , data , content_type = content_type )
Parse a str using the RestAPI syntax and return a model .
60
14
242,716
def from_dict ( cls , data , key_extractors = None , content_type = None ) : deserializer = Deserializer ( cls . _infer_class_models ( ) ) deserializer . key_extractors = [ rest_key_case_insensitive_extractor , attribute_key_case_insensitive_extractor , last_rest_key_case_insensitive_extractor ] if key_extractors is No...
Parse a dict using given key extractor return a model .
131
13
242,717
def _classify ( cls , response , objects ) : for subtype_key in cls . __dict__ . get ( '_subtype_map' , { } ) . keys ( ) : subtype_value = None rest_api_response_key = cls . _get_rest_key_parts ( subtype_key ) [ - 1 ] subtype_value = response . pop ( rest_api_response_key , None ) or response . pop ( subtype_key , None...
Check the class _subtype_map for any child classes . We want to ignore any inherited _subtype_maps . Remove the polymorphic key from the initial data .
291
35
242,718
def body ( self , data , data_type , * * kwargs ) : if data is None : raise ValidationError ( "required" , "body" , True ) # Just in case this is a dict internal_data_type = data_type . strip ( '[]{}' ) internal_data_type = self . dependencies . get ( internal_data_type , None ) if internal_data_type and not isinstance...
Serialize data intended for a request body .
374
9
242,719
def url ( self , name , data , data_type , * * kwargs ) : if self . client_side_validation : data = self . validate ( data , name , required = True , * * kwargs ) try : output = self . serialize_data ( data , data_type , * * kwargs ) if data_type == 'bool' : output = json . dumps ( output ) if kwargs . get ( 'skip_quot...
Serialize data intended for a URL path .
157
9
242,720
def header ( self , name , data , data_type , * * kwargs ) : if self . client_side_validation : data = self . validate ( data , name , required = True , * * kwargs ) try : if data_type in [ '[str]' ] : data = [ "" if d is None else d for d in data ] output = self . serialize_data ( data , data_type , * * kwargs ) if da...
Serialize data intended for a request header .
149
9
242,721
def validate ( cls , data , name , * * kwargs ) : required = kwargs . get ( 'required' , False ) if required and data is None : raise ValidationError ( "required" , name , True ) elif data is None : return elif kwargs . get ( 'readonly' ) : return try : for key , value in kwargs . items ( ) : validator = cls . validati...
Validate that a piece of data meets certain conditions
150
10
242,722
def serialize_data ( self , data , data_type , * * kwargs ) : if data is None : raise ValueError ( "No value for given attribute" ) try : if data_type in self . basic_types . values ( ) : return self . serialize_basic ( data , data_type , * * kwargs ) elif data_type in self . serialize_type : return self . serialize_ty...
Serialize generic data according to supplied data type .
318
10
242,723
def serialize_basic ( self , data , data_type , * * kwargs ) : custom_serializer = self . _get_custom_serializers ( data_type , * * kwargs ) if custom_serializer : return custom_serializer ( data ) if data_type == 'str' : return self . serialize_unicode ( data ) return eval ( data_type ) ( data )
Serialize basic builting data type . Serializes objects to str int float or bool .
90
19
242,724
def serialize_unicode ( self , data ) : try : return data . value except AttributeError : pass try : if isinstance ( data , unicode ) : return data . encode ( encoding = 'utf-8' ) except NameError : return str ( data ) else : return str ( data )
Special handling for serializing unicode strings in Py2 . Encode to UTF - 8 if unicode otherwise handle as a str .
65
27
242,725
def serialize_iter ( self , data , iter_type , div = None , * * kwargs ) : if isinstance ( data , str ) : raise SerializationError ( "Refuse str type as a valid iter type." ) serialization_ctxt = kwargs . get ( "serialization_ctxt" , { } ) serialized = [ ] for d in data : try : serialized . append ( self . serialize_da...
Serialize iterable .
433
5
242,726
def serialize_dict ( self , attr , dict_type , * * kwargs ) : serialization_ctxt = kwargs . get ( "serialization_ctxt" , { } ) serialized = { } for key , value in attr . items ( ) : try : serialized [ self . serialize_unicode ( key ) ] = self . serialize_data ( value , dict_type , * * kwargs ) except ValueError : seria...
Serialize a dictionary of objects .
237
7
242,727
def serialize_base64 ( attr , * * kwargs ) : encoded = b64encode ( attr ) . decode ( 'ascii' ) return encoded . strip ( '=' ) . replace ( '+' , '-' ) . replace ( '/' , '_' )
Serialize str into base - 64 string .
64
9
242,728
def serialize_date ( attr , * * kwargs ) : if isinstance ( attr , str ) : attr = isodate . parse_date ( attr ) t = "{:04}-{:02}-{:02}" . format ( attr . year , attr . month , attr . day ) return t
Serialize Date object into ISO - 8601 formatted string .
76
12
242,729
def serialize_duration ( attr , * * kwargs ) : if isinstance ( attr , str ) : attr = isodate . parse_duration ( attr ) return isodate . duration_isoformat ( attr )
Serialize TimeDelta object into ISO - 8601 formatted string .
53
13
242,730
def serialize_rfc ( attr , * * kwargs ) : try : if not attr . tzinfo : _LOGGER . warning ( "Datetime with no tzinfo will be considered UTC." ) utc = attr . utctimetuple ( ) except AttributeError : raise TypeError ( "RFC1123 object must be valid Datetime object." ) return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT" . for...
Serialize Datetime object into RFC - 1123 formatted string .
178
13
242,731
def serialize_iso ( attr , * * kwargs ) : if isinstance ( attr , str ) : attr = isodate . parse_datetime ( attr ) try : if not attr . tzinfo : _LOGGER . warning ( "Datetime with no tzinfo will be considered UTC." ) utc = attr . utctimetuple ( ) if utc . tm_year > 9999 or utc . tm_year < 1 : raise OverflowError ( "Hit m...
Serialize Datetime object into ISO - 8601 formatted string .
335
13
242,732
def _deserialize ( self , target_obj , data ) : # This is already a model, go recursive just in case if hasattr ( data , "_attribute_map" ) : constants = [ name for name , config in getattr ( data , '_validation' , { } ) . items ( ) if config . get ( 'constant' ) ] try : for attr , mapconfig in data . _attribute_map . ...
Call the deserializer on a model .
669
9
242,733
def _classify_target ( self , target , data ) : if target is None : return None , None if isinstance ( target , basestring ) : try : target = self . dependencies [ target ] except KeyError : return target , target try : target = target . _classify ( data , self . dependencies ) except AttributeError : pass # Target is ...
Check to see whether the deserialization target object can be classified into a subclass . Once classification has been determined initialize object .
95
25
242,734
def _unpack_content ( raw_data , content_type = None ) : # This avoids a circular dependency. We might want to consider RawDesializer is more generic # than the pipeline concept, and put it in a toolbox, used both here and in pipeline. TBD. from . pipeline . universal import RawDeserializer # Assume this is enough to d...
Extract the correct structure for deserialization .
329
10
242,735
def _instantiate_model ( self , response , attrs , additional_properties = None ) : if callable ( response ) : subtype = getattr ( response , '_subtype_map' , { } ) try : readonly = [ k for k , v in response . _validation . items ( ) if v . get ( 'readonly' ) ] const = [ k for k , v in response . _validation . items ( ...
Instantiate a response model passing in deserialized args .
318
12
242,736
def deserialize_data ( self , data , data_type ) : if data is None : return data try : if not data_type : return data if data_type in self . basic_types . values ( ) : return self . deserialize_basic ( data , data_type ) if data_type in self . deserialize_type : if isinstance ( data , self . deserialize_expected_types ...
Process data for deserialization according to data type .
385
11
242,737
def deserialize_iter ( self , attr , iter_type ) : if attr is None : return None if isinstance ( attr , ET . Element ) : # If I receive an element here, get the children attr = list ( attr ) if not isinstance ( attr , ( list , set ) ) : raise DeserializationError ( "Cannot deserialize as [{}] an object of type {}" . fo...
Deserialize an iterable .
130
7
242,738
def deserialize_dict ( self , attr , dict_type ) : if isinstance ( attr , list ) : return { x [ 'key' ] : self . deserialize_data ( x [ 'value' ] , dict_type ) for x in attr } if isinstance ( attr , ET . Element ) : # Transform <Key>value</Key> into {"Key": "value"} attr = { el . tag : el . text for el in attr } return...
Deserialize a dictionary .
136
6
242,739
def deserialize_object ( self , attr , * * kwargs ) : if attr is None : return None if isinstance ( attr , ET . Element ) : # Do no recurse on XML, just return the tree as-is return attr if isinstance ( attr , basestring ) : return self . deserialize_basic ( attr , 'str' ) obj_type = type ( attr ) if obj_type in self ....
Deserialize a generic object . This will be handled as a dictionary .
298
15
242,740
def deserialize_basic ( self , attr , data_type ) : # If we're here, data is supposed to be a basic type. # If it's still an XML node, take the text if isinstance ( attr , ET . Element ) : attr = attr . text if not attr : if data_type == "str" : # None or '', node <a/> is empty string. return '' else : # None or '', no...
Deserialize baisc builtin data type from string . Will attempt to convert to str int float and bool . This function will also accept 1 0 true and false as valid bool values .
261
38
242,741
def deserialize_unicode ( data ) : # We might be here because we have an enum modeled as string, # and we try to deserialize a partial dict with enum inside if isinstance ( data , Enum ) : return data # Consider this is real string try : if isinstance ( data , unicode ) : return data except NameError : return str ( dat...
Preserve unicode objects in Python 2 otherwise return data as a string .
87
15
242,742
def deserialize_enum ( data , enum_obj ) : if isinstance ( data , enum_obj ) : return data if isinstance ( data , Enum ) : data = data . value if isinstance ( data , int ) : # Workaround. We might consider remove it in the future. # https://github.com/Azure/azure-rest-api-specs/issues/141 try : return list ( enum_obj ....
Deserialize string into enum object .
256
8
242,743
def deserialize_bytearray ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text return bytearray ( b64decode ( attr ) )
Deserialize string into bytearray .
48
10
242,744
def deserialize_base64 ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text padding = '=' * ( 3 - ( len ( attr ) + 3 ) % 4 ) attr = attr + padding encoded = attr . replace ( '-' , '+' ) . replace ( '_' , '/' ) return b64decode ( encoded )
Deserialize base64 encoded string into string .
90
10
242,745
def deserialize_decimal ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text try : return decimal . Decimal ( attr ) except decimal . DecimalException as err : msg = "Invalid decimal {}" . format ( attr ) raise_with_traceback ( DeserializationError , msg , err )
Deserialize string into Decimal object .
80
9
242,746
def deserialize_duration ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text try : duration = isodate . parse_duration ( attr ) except ( ValueError , OverflowError , AttributeError ) as err : msg = "Cannot deserialize duration object." raise_with_traceback ( DeserializationError , msg , err ) else : r...
Deserialize ISO - 8601 formatted string into TimeDelta object .
92
14
242,747
def deserialize_date ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text if re . search ( r"[^\W\d_]" , attr , re . I + re . U ) : raise DeserializationError ( "Date must have only digits and -. Received: %s" % attr ) # This must NOT use defaultmonth/defaultday. Using None ensure this raises an except...
Deserialize ISO - 8601 formatted string into Date object .
122
13
242,748
def deserialize_rfc ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text try : date_obj = datetime . datetime . strptime ( attr , "%a, %d %b %Y %H:%M:%S %Z" ) if not date_obj . tzinfo : date_obj = date_obj . replace ( tzinfo = TZ_UTC ) except ValueError as err : msg = "Cannot deserialize to rfc datetim...
Deserialize RFC - 1123 formatted string into Datetime object .
143
14
242,749
def deserialize_iso ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text try : attr = attr . upper ( ) match = Deserializer . valid_date . match ( attr ) if not match : raise ValueError ( "Invalid datetime string: " + attr ) check_decimal = attr . split ( '.' ) if len ( check_decimal ) > 1 : decimal_st...
Deserialize ISO - 8601 formatted string into Datetime object .
282
14
242,750
def raw ( self ) : # type: () -> ClientRawResponse raw = ClientRawResponse ( self . current_page , self . _response ) if self . _raw_headers : raw . add_headers ( self . _raw_headers ) return raw
Get current page as ClientRawResponse .
54
8
242,751
def advance_page ( self ) : # type: () -> List[Model] if self . next_link is None : raise StopIteration ( "End of paging" ) self . _current_page_iter_index = 0 self . _response = self . _get_next ( self . next_link ) self . _derserializer ( self , self . _response ) return self . current_page
Force moving the cursor to the next azure call .
89
11
242,752
def _ensureAtomicity ( fn ) : @ ensureScoopStartedProperly def wrapper ( * args , * * kwargs ) : """setConst(**kwargs) Set a constant that will be shared to every workers. This call blocks until the constant has propagated to at least one worker. :param \*\*kwargs: One or more combination(s) key=value. Key being the va...
Ensure atomicity of passed elements on the whole worker pool
434
12
242,753
def getConst ( name , timeout = 0.1 ) : from . import _control import time timeStamp = time . time ( ) while True : # Enforce retrieval of currently awaiting constants _control . execQueue . socket . pumpInfoSocket ( ) # Constants concatenation constants = dict ( reduce ( lambda x , y : x + list ( y . items ( ) ) , ele...
Get a shared constant .
138
5
242,754
def launchBootstraps ( ) : global processes worker_amount , verbosity , args = getArgs ( ) was_origin = False if verbosity >= 1 : sys . stderr . write ( "Launching {0} worker(s) using {1}.\n" . format ( worker_amount , os . environ [ 'SHELL' ] if 'SHELL' in os . environ else "an unknown shell" , ) ) sys . stderr . flus...
Launch the bootstrap instances in separate subprocesses
277
10
242,755
def resolve ( self , s ) : name = s . split ( '.' ) used = name . pop ( 0 ) try : found = self . importer ( used ) for frag in name : used += '.' + frag try : found = getattr ( found , frag ) except AttributeError : self . importer ( used ) found = getattr ( found , frag ) return found except ImportError : e , tb = sys...
Resolve strings to objects using standard import and attribute syntax .
142
12
242,756
def as_tuple ( self , value ) : if isinstance ( value , list ) : value = tuple ( value ) return value
Utility function which converts lists to tuples .
28
10
242,757
def configure_formatter ( self , config ) : if '()' in config : factory = config [ '()' ] # for use in exception handler try : result = self . configure_custom ( config ) except TypeError , te : if "'format'" not in str ( te ) : raise #Name of parameter changed from fmt to format. #Retry with old name. #This is so that...
Configure a formatter from a dictionary .
178
9
242,758
def configure_filter ( self , config ) : if '()' in config : result = self . configure_custom ( config ) else : name = config . get ( 'name' , '' ) result = logging . Filter ( name ) return result
Configure a filter from a dictionary .
51
8
242,759
def configure_logger ( self , name , config , incremental = False ) : logger = logging . getLogger ( name ) self . common_logger_config ( logger , config , incremental ) propagate = config . get ( 'propagate' , None ) if propagate is not None : logger . propagate = propagate
Configure a non - root logger from a dictionary .
66
11
242,760
def configure_root ( self , config , incremental = False ) : root = logging . getLogger ( ) self . common_logger_config ( root , config , incremental )
Configure a root logger from a dictionary .
38
9
242,761
def sliceImage ( image , divWidth , divHeight ) : w , h = image . size tiles = [ ] for y in range ( 0 , h - 1 , h / divHeight ) : my = min ( y + h / divHeight , h ) for x in range ( 0 , w - 1 , w / divWidth ) : mx = min ( x + w / divWidth , w ) tiles . append ( image . crop ( ( x , y , mx , my ) ) ) return tiles
Divide the received image in multiple tiles
107
8
242,762
def resizeTile ( index , size ) : resized = tiles [ index ] . resize ( size , Image . ANTIALIAS ) return sImage ( resized . tostring ( ) , resized . size , resized . mode )
Apply Antialiasing resizing to tile
50
9
242,763
def initLogging ( verbosity = 0 , name = "SCOOP" ) : global loggingConfig verbose_levels = { - 2 : "CRITICAL" , - 1 : "ERROR" , 0 : "WARNING" , 1 : "INFO" , 2 : "DEBUG" , 3 : "DEBUG" , 4 : "NOSET" , } log_handlers = { "console" : { "class" : "logging.StreamHandler" , "formatter" : "{name}Formatter" . format ( name = na...
Creates a logger .
307
5
242,764
def externalHostname ( hosts ) : hostname = hosts [ 0 ] [ 0 ] if hostname in localHostnames and len ( hosts ) > 1 : hostname = socket . getfqdn ( ) . split ( "." ) [ 0 ] try : socket . getaddrinfo ( hostname , None ) except socket . gaierror : raise Exception ( "\nThe first host (containing a broker) is not" " routable...
Ensure external hostname is routable .
106
9
242,765
def getHosts ( filename = None , hostlist = None ) : if filename : return getHostsFromFile ( filename ) elif hostlist : return getHostsFromList ( hostlist ) elif getEnv ( ) == "SLURM" : return getHostsFromSLURM ( ) elif getEnv ( ) == "PBS" : return getHostsFromPBS ( ) elif getEnv ( ) == "SGE" : return getHostsFromSGE (...
Return a list of hosts depending on the environment
119
9
242,766
def getHostsFromFile ( filename ) : valid_hostname = r"^[^ /\t=\n]+" workers = r"\d+" hostname_re = re . compile ( valid_hostname ) worker_re = re . compile ( workers ) hosts = [ ] with open ( filename ) as f : for line in f : # check to see if it is a SLURM grouping instead of a # regular list of hosts if re . search ...
Parse a file to return a list of hosts .
212
11
242,767
def getHostsFromList ( hostlist ) : # check to see if it is a SLURM grouping instead of a # regular list of hosts if any ( re . search ( '[\[\]]' , x ) for x in hostlist ) : return parseSLURM ( str ( hostlist ) ) # Counter would be more efficient but: # 1. Won't be Python 2.6 compatible # 2. Won't be ordered hostlist =...
Return the hosts from the command line
138
7
242,768
def parseSLURM ( string ) : # Use scontrol utility to get the hosts list import subprocess , os hostsstr = subprocess . check_output ( [ "scontrol" , "show" , "hostnames" , string ] ) if sys . version_info . major > 2 : hostsstr = hostsstr . decode ( ) # Split using endline hosts = hostsstr . split ( os . linesep ) # T...
Return a host list from a SLURM string
131
10
242,769
def getHostsFromPBS ( ) : # See above comment about Counter with open ( os . environ [ "PBS_NODEFILE" ] , 'r' ) as hosts : hostlist = groupTogether ( hosts . read ( ) . split ( ) ) retVal = [ ] for key , group in groupby ( hostlist ) : retVal . append ( ( key , len ( list ( group ) ) ) ) return retVal
Return a host list in a PBS environment
94
8
242,770
def getHostsFromSGE ( ) : with open ( os . environ [ "PE_HOSTFILE" ] , 'r' ) as hosts : return [ ( host . split ( ) [ 0 ] , int ( host . split ( ) [ 1 ] ) ) for host in hosts ]
Return a host list in a SGE environment
63
9
242,771
def getWorkerQte ( hosts ) : if "SLURM_NTASKS" in os . environ : return int ( os . environ [ "SLURM_NTASKS" ] ) elif "PBS_NP" in os . environ : return int ( os . environ [ "PBS_NP" ] ) elif "NSLOTS" in os . environ : return int ( os . environ [ "NSLOTS" ] ) else : return sum ( host [ 1 ] for host in hosts )
Return the number of workers to launch depending on the environment
117
11
242,772
def functionFactory ( in_code , name , defaults , globals_ , imports ) : def generatedFunction ( ) : pass generatedFunction . __code__ = marshal . loads ( in_code ) generatedFunction . __name__ = name generatedFunction . __defaults = defaults generatedFunction . __globals__ . update ( pickle . loads ( globals_ ) ) for ...
Creates a function at runtime using binary compiled inCode
143
11
242,773
def makeLambdaPicklable ( lambda_function ) : if isinstance ( lambda_function , type ( lambda : None ) ) and lambda_function . __name__ == '<lambda>' : def __reduce_ex__ ( proto ) : # TODO: argdefs, closure return unpickleLambda , ( marshal . dumps ( lambda_function . __code__ ) , ) lambda_function . __reduce_ex__ = __...
Take input lambda function l and makes it picklable .
108
12
242,774
def addConnector ( self , wire1 , wire2 ) : if wire1 == wire2 : return if wire1 > wire2 : wire1 , wire2 = wire2 , wire1 try : last_level = self [ - 1 ] except IndexError : # Empty network, create new level and connector self . append ( [ ( wire1 , wire2 ) ] ) return for wires in last_level : if wires [ 1 ] >= wire1 and...
Add a connector between wire1 and wire2 in the network .
131
13
242,775
def sort ( self , values ) : for level in self : for wire1 , wire2 in level : if values [ wire1 ] > values [ wire2 ] : values [ wire1 ] , values [ wire2 ] = values [ wire2 ] , values [ wire1 ]
Sort the values in - place based on the connectors in the network .
58
14
242,776
def draw ( self ) : str_wires = [ [ "-" ] * 7 * self . depth ] str_wires [ 0 ] [ 0 ] = "0" str_wires [ 0 ] [ 1 ] = " o" str_spaces = [ ] for i in range ( 1 , self . dimension ) : str_wires . append ( [ "-" ] * 7 * self . depth ) str_spaces . append ( [ " " ] * 7 * self . depth ) str_wires [ i ] [ 0 ] = str ( i ) str_wi...
Return an ASCII representation of the network .
341
8
242,777
def getWorkersName ( data ) : names = [ fichier for fichier in data . keys ( ) ] names . sort ( ) try : names . remove ( "broker" ) except ValueError : pass return names
Returns the list of the names of the workers sorted alphabetically
49
12
242,778
def importData ( directory ) : dataTask = OrderedDict ( ) dataQueue = OrderedDict ( ) for fichier in sorted ( os . listdir ( directory ) ) : try : with open ( "{directory}/{fichier}" . format ( * * locals ( ) ) , 'rb' ) as f : fileName , fileType = fichier . rsplit ( '-' , 1 ) if fileType == "QUEUE" : dataQueue [ fileN...
Parse the input files and return two dictionnaries
144
11
242,779
def getTimes ( dataTasks ) : global begin_time start_time , end_time = float ( 'inf' ) , 0 for fichier , vals in dataTask . items ( ) : try : if hasattr ( vals , 'values' ) : tmp_start_time = min ( [ a [ 'start_time' ] for a in vals . values ( ) ] ) [ 0 ] if tmp_start_time < start_time : start_time = tmp_start_time tmp...
Get the start time and the end time of data in milliseconds
186
12
242,780
def WorkersDensity ( dataTasks ) : start_time , end_time = getTimes ( dataTasks ) graphdata = [ ] for name in getWorkersName ( dataTasks ) : vals = dataTasks [ name ] if hasattr ( vals , 'values' ) : # Data from worker workerdata = [ ] print ( "Plotting density map for {}" . format ( name ) ) # We only have 800 pixels ...
Return the worker density data for the graph .
371
9
242,781
def plotDensity ( dataTask , filename ) : #def format_worker(x, pos=None): # """Formats the worker name""" # #workers = filter (lambda a: a[:6] != "broker", dataTask.keys()) # workers = [a for a in dataTask.keys() if a[:6] != "broker"] # return workers[x] def format_time ( x , pos = None ) : """Formats the time""" star...
Plot the worker density graph
602
5
242,782
def plotBrokerQueue ( dataTask , filename ) : print ( "Plotting broker queue length for {0}." . format ( filename ) ) plt . figure ( ) # Queue length plt . subplot ( 211 ) for fichier , vals in dataTask . items ( ) : if type ( vals ) == list : timestamps = list ( map ( datetime . fromtimestamp , map ( int , list ( zip ...
Generates the broker queue length graphic .
351
8
242,783
def getWorkerInfo ( dataTask ) : workertime = [ ] workertasks = [ ] for fichier , vals in dataTask . items ( ) : if hasattr ( vals , 'values' ) : #workers_names.append(fichier) # Data from worker totaltime = sum ( [ a [ 'executionTime' ] for a in vals . values ( ) ] ) totaltasks = sum ( [ 1 for a in vals . values ( ) ]...
Returns the total execution time and task quantity by worker
135
10
242,784
def timelines ( fig , y , xstart , xstop , color = 'b' ) : fig . hlines ( y , xstart , xstop , color , lw = 4 ) fig . vlines ( xstart , y + 0.03 , y - 0.03 , color , lw = 2 ) fig . vlines ( xstop , y + 0.03 , y - 0.03 , color , lw = 2 )
Plot timelines at y from xstart to xstop with given color .
94
14
242,785
def plotTimeline ( dataTask , filename ) : fig = plt . figure ( ) ax = fig . gca ( ) worker_names = [ x for x in dataTask . keys ( ) if "broker" not in x ] min_time = getMinimumTime ( dataTask ) ystep = 1. / ( len ( worker_names ) + 1 ) y = 0 for worker , vals in dataTask . items ( ) : if "broker" in worker : continue ...
Build a timeline
336
3
242,786
def setWorker ( self , * args , * * kwargs ) : try : la = self . LAUNCHING_ARGUMENTS ( * args , * * kwargs ) except TypeError as e : scoop . logger . error ( ( "addWorker failed to convert args %s and kwargs %s " "to namedtuple (requires %s arguments (names %s)" ) % ( args , kwargs , len ( self . LAUNCHING_ARGUMENTS . ...
Add a worker assignation Arguments and order to pass are defined in LAUNCHING_ARGUMENTS Using named args is advised .
134
28
242,787
def _WorkerCommand_environment ( self ) : worker = self . workersArguments c = [ ] if worker . prolog : c . extend ( [ "source" , worker . prolog , "&&" , ] ) if worker . pythonPath and not self . isLocal ( ) : # Tried to make it compliant to all shell variants. c . extend ( [ "env" , "PYTHONPATH={0}:$PYTHONPATH" . for...
Return list of shell commands to prepare the environment for bootstrap .
189
13
242,788
def _WorkerCommand_launcher ( self ) : return [ self . workersArguments . pythonExecutable , '-m' , 'scoop.launch.__main__' , str ( self . workerAmount ) , str ( self . workersArguments . verbose ) , ]
Return list commands to start the bootstrap process
62
9
242,789
def _WorkerCommand_options ( self ) : worker = self . workersArguments c = [ ] # If broker is on localhost if self . hostname == worker . brokerHostname : broker = "127.0.0.1" else : broker = worker . brokerHostname if worker . nice is not None : c . extend ( [ '--nice' , str ( worker . nice ) ] ) c . extend ( [ '--siz...
Return list of options for bootstrap
345
7
242,790
def _WorkerCommand_executable ( self ) : worker = self . workersArguments c = [ ] if worker . executable : c . append ( worker . executable ) # This trick is used to parse correctly quotes # (ie. myScript.py 'arg1 "arg2" arg3') # Because it is parsed by a shell, every quote gets re-interpreted. # It replaces simple quo...
Return executable and any options to be executed by bootstrap
196
11
242,791
def _getWorkerCommandList ( self ) : c = [ ] c . extend ( self . _WorkerCommand_environment ( ) ) c . extend ( self . _WorkerCommand_launcher ( ) ) c . extend ( self . _WorkerCommand_options ( ) ) c . extend ( self . _WorkerCommand_executable ( ) ) return c
Generate the workerCommand as list
79
7
242,792
def launch ( self , tunnelPorts = None ) : if self . isLocal ( ) : # Launching local workers c = self . _getWorkerCommandList ( ) self . subprocesses . append ( subprocess . Popen ( c ) ) else : # Launching remotely BASE_SSH [ 0 ] = self . ssh_executable sshCmd = BASE_SSH if not self . rsh else BASE_RSH if tunnelPorts ...
Launch every worker assigned on this host .
226
8
242,793
def _switch ( self , future ) : scoop . _control . current = self assert self . greenlet is not None , ( "No greenlet to switch to:" "\n{0}" . format ( self . __dict__ ) ) return self . greenlet . switch ( future )
Switch greenlet .
60
4
242,794
def cancel ( self ) : if self in scoop . _control . execQueue . movable : self . exceptionValue = CancelledError ( ) scoop . _control . futureDict [ self . id ] . _delete ( ) scoop . _control . execQueue . remove ( self ) return True return False
If the call is currently being executed or sent for remote execution then it cannot be cancelled and the method will return False otherwise the call will be cancelled and the method will return True .
64
36
242,795
def done ( self ) : # Flush the current future in the local buffer (potential deadlock # otherwise) try : scoop . _control . execQueue . remove ( self ) scoop . _control . execQueue . socket . sendFuture ( self ) except ValueError as e : # Future was not in the local queue, everything is fine pass # Process buffers sco...
Returns True if the call was successfully cancelled or finished running False otherwise . This function updates the executionQueue so it receives all the awaiting message .
94
28
242,796
def add_done_callback ( self , callable_ , inCallbackType = CallbackType . standard , inCallbackGroup = None ) : self . callback . append ( callbackEntry ( callable_ , inCallbackType , inCallbackGroup ) ) # If already completed or cancelled, execute it immediately if self . _ended ( ) : self . callback [ - 1 ] . func (...
Attach a callable to the future that will be called when the future is cancelled or finishes running . Callable will be called with the future as its only argument .
82
33
242,797
def append ( self , future ) : if future . _ended ( ) and future . index is None : self . inprogress . add ( future ) elif future . _ended ( ) and future . index is not None : self . ready . append ( future ) elif future . greenlet is not None : self . inprogress . add ( future ) else : self . movable . append ( future...
Append a future to the queue .
208
8
242,798
def askForPreviousFutures ( self ) : # Don't request it too often (otherwise it ping-pongs because) # the broker answer triggers the _poll of pop() if time . time ( ) < self . lastStatus + POLLING_TIME / 1000 : return self . lastStatus = time . time ( ) for future in scoop . _control . futureDict . values ( ) : # Skip ...
Request a status for every future to the broker .
132
10
242,799
def pop ( self ) : self . updateQueue ( ) # If our buffer is underflowing, request more Futures if self . timelen ( self ) < self . lowwatermark : self . requestFuture ( ) # If an unmovable Future is ready to be executed, return it if len ( self . ready ) != 0 : return self . ready . popleft ( ) # Then, use Futures in ...
Pop the next future from the queue ; in progress futures have priority over those that have not yet started ; higher level futures have priority over lower level ones ;
221
31