idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
243,700
def _create_xml_node ( cls ) : try : xml_map = cls . _xml_map except AttributeError : raise ValueError ( "This model has no XML definition" ) return _create_xml_node ( xml_map . get ( 'name' , cls . __name__ ) , xml_map . get ( "prefix" , None ) , xml_map . get ( "ns" , None ) )
Create XML node from _xml_map .
243,701
def validate ( self ) : validation_result = [ ] for attr_name , value in [ ( attr , getattr ( self , attr ) ) for attr in self . _attribute_map ] : attr_desc = self . _attribute_map [ attr_name ] if attr_name == "additional_properties" and attr_desc [ "key" ] == '' : continue attr_type = attr_desc [ 'type' ] try : debu...
Validate this model recursively and return a list of ValidationError .
243,702
def serialize ( self , keep_readonly = False ) : serializer = Serializer ( self . _infer_class_models ( ) ) return serializer . _serialize ( self , keep_readonly = keep_readonly )
Return the JSON that would be sent to azure from this model .
243,703
def as_dict ( self , keep_readonly = True , key_transformer = attribute_transformer ) : serializer = Serializer ( self . _infer_class_models ( ) ) return serializer . _serialize ( self , key_transformer = key_transformer , keep_readonly = keep_readonly )
Return a dict that can be JSONify using json . dump .
243,704
def deserialize ( cls , data , content_type = None ) : deserializer = Deserializer ( cls . _infer_class_models ( ) ) return deserializer ( cls . __name__ , data , content_type = content_type )
Parse a str using the RestAPI syntax and return a model .
243,705
def from_dict ( cls , data , key_extractors = None , content_type = None ) : deserializer = Deserializer ( cls . _infer_class_models ( ) ) deserializer . key_extractors = [ rest_key_case_insensitive_extractor , attribute_key_case_insensitive_extractor , last_rest_key_case_insensitive_extractor ] if key_extractors is No...
Parse a dict using given key extractor return a model .
243,706
def _classify ( cls , response , objects ) : for subtype_key in cls . __dict__ . get ( '_subtype_map' , { } ) . keys ( ) : subtype_value = None rest_api_response_key = cls . _get_rest_key_parts ( subtype_key ) [ - 1 ] subtype_value = response . pop ( rest_api_response_key , None ) or response . pop ( subtype_key , None...
Check the class _subtype_map for any child classes . We want to ignore any inherited _subtype_maps . Remove the polymorphic key from the initial data .
243,707
def body ( self , data , data_type , ** kwargs ) : if data is None : raise ValidationError ( "required" , "body" , True ) internal_data_type = data_type . strip ( '[]{}' ) internal_data_type = self . dependencies . get ( internal_data_type , None ) if internal_data_type and not isinstance ( internal_data_type , Enum ) ...
Serialize data intended for a request body .
243,708
def url ( self , name , data , data_type , ** kwargs ) : if self . client_side_validation : data = self . validate ( data , name , required = True , ** kwargs ) try : output = self . serialize_data ( data , data_type , ** kwargs ) if data_type == 'bool' : output = json . dumps ( output ) if kwargs . get ( 'skip_quote' ...
Serialize data intended for a URL path .
243,709
def header ( self , name , data , data_type , ** kwargs ) : if self . client_side_validation : data = self . validate ( data , name , required = True , ** kwargs ) try : if data_type in [ '[str]' ] : data = [ "" if d is None else d for d in data ] output = self . serialize_data ( data , data_type , ** kwargs ) if data_...
Serialize data intended for a request header .
243,710
def validate ( cls , data , name , ** kwargs ) : required = kwargs . get ( 'required' , False ) if required and data is None : raise ValidationError ( "required" , name , True ) elif data is None : return elif kwargs . get ( 'readonly' ) : return try : for key , value in kwargs . items ( ) : validator = cls . validatio...
Validate that a piece of data meets certain conditions
243,711
def serialize_data ( self , data , data_type , ** kwargs ) : if data is None : raise ValueError ( "No value for given attribute" ) try : if data_type in self . basic_types . values ( ) : return self . serialize_basic ( data , data_type , ** kwargs ) elif data_type in self . serialize_type : return self . serialize_type...
Serialize generic data according to supplied data type .
243,712
def serialize_basic ( self , data , data_type , ** kwargs ) : custom_serializer = self . _get_custom_serializers ( data_type , ** kwargs ) if custom_serializer : return custom_serializer ( data ) if data_type == 'str' : return self . serialize_unicode ( data ) return eval ( data_type ) ( data )
Serialize basic builting data type . Serializes objects to str int float or bool .
243,713
def serialize_unicode ( self , data ) : try : return data . value except AttributeError : pass try : if isinstance ( data , unicode ) : return data . encode ( encoding = 'utf-8' ) except NameError : return str ( data ) else : return str ( data )
Special handling for serializing unicode strings in Py2 . Encode to UTF - 8 if unicode otherwise handle as a str .
243,714
def serialize_iter ( self , data , iter_type , div = None , ** kwargs ) : if isinstance ( data , str ) : raise SerializationError ( "Refuse str type as a valid iter type." ) serialization_ctxt = kwargs . get ( "serialization_ctxt" , { } ) serialized = [ ] for d in data : try : serialized . append ( self . serialize_dat...
Serialize iterable .
243,715
def serialize_dict ( self , attr , dict_type , ** kwargs ) : serialization_ctxt = kwargs . get ( "serialization_ctxt" , { } ) serialized = { } for key , value in attr . items ( ) : try : serialized [ self . serialize_unicode ( key ) ] = self . serialize_data ( value , dict_type , ** kwargs ) except ValueError : seriali...
Serialize a dictionary of objects .
243,716
def serialize_base64 ( attr , ** kwargs ) : encoded = b64encode ( attr ) . decode ( 'ascii' ) return encoded . strip ( '=' ) . replace ( '+' , '-' ) . replace ( '/' , '_' )
Serialize str into base - 64 string .
243,717
def serialize_date ( attr , ** kwargs ) : if isinstance ( attr , str ) : attr = isodate . parse_date ( attr ) t = "{:04}-{:02}-{:02}" . format ( attr . year , attr . month , attr . day ) return t
Serialize Date object into ISO - 8601 formatted string .
243,718
def serialize_duration ( attr , ** kwargs ) : if isinstance ( attr , str ) : attr = isodate . parse_duration ( attr ) return isodate . duration_isoformat ( attr )
Serialize TimeDelta object into ISO - 8601 formatted string .
243,719
def serialize_rfc ( attr , ** kwargs ) : try : if not attr . tzinfo : _LOGGER . warning ( "Datetime with no tzinfo will be considered UTC." ) utc = attr . utctimetuple ( ) except AttributeError : raise TypeError ( "RFC1123 object must be valid Datetime object." ) return "{}, {:02} {} {:04} {:02}:{:02}:{:02} GMT" . form...
Serialize Datetime object into RFC - 1123 formatted string .
243,720
def serialize_iso ( attr , ** kwargs ) : if isinstance ( attr , str ) : attr = isodate . parse_datetime ( attr ) try : if not attr . tzinfo : _LOGGER . warning ( "Datetime with no tzinfo will be considered UTC." ) utc = attr . utctimetuple ( ) if utc . tm_year > 9999 or utc . tm_year < 1 : raise OverflowError ( "Hit ma...
Serialize Datetime object into ISO - 8601 formatted string .
243,721
def _deserialize ( self , target_obj , data ) : if hasattr ( data , "_attribute_map" ) : constants = [ name for name , config in getattr ( data , '_validation' , { } ) . items ( ) if config . get ( 'constant' ) ] try : for attr , mapconfig in data . _attribute_map . items ( ) : if attr in constants : continue value = g...
Call the deserializer on a model .
243,722
def _classify_target ( self , target , data ) : if target is None : return None , None if isinstance ( target , basestring ) : try : target = self . dependencies [ target ] except KeyError : return target , target try : target = target . _classify ( data , self . dependencies ) except AttributeError : pass return targe...
Check to see whether the deserialization target object can be classified into a subclass . Once classification has been determined initialize object .
243,723
def _unpack_content ( raw_data , content_type = None ) : from . pipeline . universal import RawDeserializer context = getattr ( raw_data , "context" , { } ) if context : if RawDeserializer . CONTEXT_NAME in context : return context [ RawDeserializer . CONTEXT_NAME ] raise ValueError ( "This pipeline didn't have the Raw...
Extract the correct structure for deserialization .
243,724
def _instantiate_model ( self , response , attrs , additional_properties = None ) : if callable ( response ) : subtype = getattr ( response , '_subtype_map' , { } ) try : readonly = [ k for k , v in response . _validation . items ( ) if v . get ( 'readonly' ) ] const = [ k for k , v in response . _validation . items ( ...
Instantiate a response model passing in deserialized args .
243,725
def deserialize_data ( self , data , data_type ) : if data is None : return data try : if not data_type : return data if data_type in self . basic_types . values ( ) : return self . deserialize_basic ( data , data_type ) if data_type in self . deserialize_type : if isinstance ( data , self . deserialize_expected_types ...
Process data for deserialization according to data type .
243,726
def deserialize_iter ( self , attr , iter_type ) : if attr is None : return None if isinstance ( attr , ET . Element ) : attr = list ( attr ) if not isinstance ( attr , ( list , set ) ) : raise DeserializationError ( "Cannot deserialize as [{}] an object of type {}" . format ( iter_type , type ( attr ) ) ) return [ sel...
Deserialize an iterable .
243,727
def deserialize_dict ( self , attr , dict_type ) : if isinstance ( attr , list ) : return { x [ 'key' ] : self . deserialize_data ( x [ 'value' ] , dict_type ) for x in attr } if isinstance ( attr , ET . Element ) : attr = { el . tag : el . text for el in attr } return { k : self . deserialize_data ( v , dict_type ) fo...
Deserialize a dictionary .
243,728
def deserialize_object ( self , attr , ** kwargs ) : if attr is None : return None if isinstance ( attr , ET . Element ) : return attr if isinstance ( attr , basestring ) : return self . deserialize_basic ( attr , 'str' ) obj_type = type ( attr ) if obj_type in self . basic_types : return self . deserialize_basic ( att...
Deserialize a generic object . This will be handled as a dictionary .
243,729
def deserialize_basic ( self , attr , data_type ) : if isinstance ( attr , ET . Element ) : attr = attr . text if not attr : if data_type == "str" : return '' else : return None if data_type == 'bool' : if attr in [ True , False , 1 , 0 ] : return bool ( attr ) elif isinstance ( attr , basestring ) : if attr . lower ( ...
Deserialize baisc builtin data type from string . Will attempt to convert to str int float and bool . This function will also accept 1 0 true and false as valid bool values .
243,730
def deserialize_unicode ( data ) : if isinstance ( data , Enum ) : return data try : if isinstance ( data , unicode ) : return data except NameError : return str ( data ) else : return str ( data )
Preserve unicode objects in Python 2 otherwise return data as a string .
243,731
def deserialize_enum ( data , enum_obj ) : if isinstance ( data , enum_obj ) : return data if isinstance ( data , Enum ) : data = data . value if isinstance ( data , int ) : try : return list ( enum_obj . __members__ . values ( ) ) [ data ] except IndexError : error = "{!r} is not a valid index for enum {!r}" raise Des...
Deserialize string into enum object .
243,732
def deserialize_bytearray ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text return bytearray ( b64decode ( attr ) )
Deserialize string into bytearray .
243,733
def deserialize_base64 ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text padding = '=' * ( 3 - ( len ( attr ) + 3 ) % 4 ) attr = attr + padding encoded = attr . replace ( '-' , '+' ) . replace ( '_' , '/' ) return b64decode ( encoded )
Deserialize base64 encoded string into string .
243,734
def deserialize_decimal ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text try : return decimal . Decimal ( attr ) except decimal . DecimalException as err : msg = "Invalid decimal {}" . format ( attr ) raise_with_traceback ( DeserializationError , msg , err )
Deserialize string into Decimal object .
243,735
def deserialize_duration ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text try : duration = isodate . parse_duration ( attr ) except ( ValueError , OverflowError , AttributeError ) as err : msg = "Cannot deserialize duration object." raise_with_traceback ( DeserializationError , msg , err ) else : r...
Deserialize ISO - 8601 formatted string into TimeDelta object .
243,736
def deserialize_date ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text if re . search ( r"[^\W\d_]" , attr , re . I + re . U ) : raise DeserializationError ( "Date must have only digits and -. Received: %s" % attr ) return isodate . parse_date ( attr , defaultmonth = None , defaultday = None )
Deserialize ISO - 8601 formatted string into Date object .
243,737
def deserialize_rfc ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text try : date_obj = datetime . datetime . strptime ( attr , "%a, %d %b %Y %H:%M:%S %Z" ) if not date_obj . tzinfo : date_obj = date_obj . replace ( tzinfo = TZ_UTC ) except ValueError as err : msg = "Cannot deserialize to rfc datetim...
Deserialize RFC - 1123 formatted string into Datetime object .
243,738
def deserialize_iso ( attr ) : if isinstance ( attr , ET . Element ) : attr = attr . text try : attr = attr . upper ( ) match = Deserializer . valid_date . match ( attr ) if not match : raise ValueError ( "Invalid datetime string: " + attr ) check_decimal = attr . split ( '.' ) if len ( check_decimal ) > 1 : decimal_st...
Deserialize ISO - 8601 formatted string into Datetime object .
243,739
def raw ( self ) : raw = ClientRawResponse ( self . current_page , self . _response ) if self . _raw_headers : raw . add_headers ( self . _raw_headers ) return raw
Get current page as ClientRawResponse .
243,740
def advance_page ( self ) : if self . next_link is None : raise StopIteration ( "End of paging" ) self . _current_page_iter_index = 0 self . _response = self . _get_next ( self . next_link ) self . _derserializer ( self , self . _response ) return self . current_page
Force moving the cursor to the next azure call .
243,741
def _ensureAtomicity ( fn ) : @ ensureScoopStartedProperly def wrapper ( * args , ** kwargs ) : from . import _control _control . execQueue . socket . pumpInfoSocket ( ) for key , value in kwargs . items ( ) : if key in itertools . chain ( * ( elem . keys ( ) for elem in elements . values ( ) ) ) : raise TypeError ( "T...
Ensure atomicity of passed elements on the whole worker pool
243,742
def getConst ( name , timeout = 0.1 ) : from . import _control import time timeStamp = time . time ( ) while True : _control . execQueue . socket . pumpInfoSocket ( ) constants = dict ( reduce ( lambda x , y : x + list ( y . items ( ) ) , elements . values ( ) , [ ] ) ) timeoutHappened = time . time ( ) - timeStamp > t...
Get a shared constant .
243,743
def launchBootstraps ( ) : global processes worker_amount , verbosity , args = getArgs ( ) was_origin = False if verbosity >= 1 : sys . stderr . write ( "Launching {0} worker(s) using {1}.\n" . format ( worker_amount , os . environ [ 'SHELL' ] if 'SHELL' in os . environ else "an unknown shell" , ) ) sys . stderr . flus...
Launch the bootstrap instances in separate subprocesses
243,744
def resolve ( self , s ) : name = s . split ( '.' ) used = name . pop ( 0 ) try : found = self . importer ( used ) for frag in name : used += '.' + frag try : found = getattr ( found , frag ) except AttributeError : self . importer ( used ) found = getattr ( found , frag ) return found except ImportError : e , tb = sys...
Resolve strings to objects using standard import and attribute syntax .
243,745
def as_tuple ( self , value ) : if isinstance ( value , list ) : value = tuple ( value ) return value
Utility function which converts lists to tuples .
243,746
def configure_formatter ( self , config ) : if '()' in config : factory = config [ '()' ] try : result = self . configure_custom ( config ) except TypeError , te : if "'format'" not in str ( te ) : raise config [ 'fmt' ] = config . pop ( 'format' ) config [ '()' ] = factory result = self . configure_custom ( config ) e...
Configure a formatter from a dictionary .
243,747
def configure_filter ( self , config ) : if '()' in config : result = self . configure_custom ( config ) else : name = config . get ( 'name' , '' ) result = logging . Filter ( name ) return result
Configure a filter from a dictionary .
243,748
def configure_logger ( self , name , config , incremental = False ) : logger = logging . getLogger ( name ) self . common_logger_config ( logger , config , incremental ) propagate = config . get ( 'propagate' , None ) if propagate is not None : logger . propagate = propagate
Configure a non - root logger from a dictionary .
243,749
def configure_root ( self , config , incremental = False ) : root = logging . getLogger ( ) self . common_logger_config ( root , config , incremental )
Configure a root logger from a dictionary .
243,750
def sliceImage ( image , divWidth , divHeight ) : w , h = image . size tiles = [ ] for y in range ( 0 , h - 1 , h / divHeight ) : my = min ( y + h / divHeight , h ) for x in range ( 0 , w - 1 , w / divWidth ) : mx = min ( x + w / divWidth , w ) tiles . append ( image . crop ( ( x , y , mx , my ) ) ) return tiles
Divide the received image in multiple tiles
243,751
def resizeTile ( index , size ) : resized = tiles [ index ] . resize ( size , Image . ANTIALIAS ) return sImage ( resized . tostring ( ) , resized . size , resized . mode )
Apply Antialiasing resizing to tile
243,752
def initLogging ( verbosity = 0 , name = "SCOOP" ) : global loggingConfig verbose_levels = { - 2 : "CRITICAL" , - 1 : "ERROR" , 0 : "WARNING" , 1 : "INFO" , 2 : "DEBUG" , 3 : "DEBUG" , 4 : "NOSET" , } log_handlers = { "console" : { "class" : "logging.StreamHandler" , "formatter" : "{name}Formatter" . format ( name = na...
Creates a logger .
243,753
def externalHostname ( hosts ) : hostname = hosts [ 0 ] [ 0 ] if hostname in localHostnames and len ( hosts ) > 1 : hostname = socket . getfqdn ( ) . split ( "." ) [ 0 ] try : socket . getaddrinfo ( hostname , None ) except socket . gaierror : raise Exception ( "\nThe first host (containing a broker) is not" " routable...
Ensure external hostname is routable .
243,754
def getHosts ( filename = None , hostlist = None ) : if filename : return getHostsFromFile ( filename ) elif hostlist : return getHostsFromList ( hostlist ) elif getEnv ( ) == "SLURM" : return getHostsFromSLURM ( ) elif getEnv ( ) == "PBS" : return getHostsFromPBS ( ) elif getEnv ( ) == "SGE" : return getHostsFromSGE (...
Return a list of hosts depending on the environment
243,755
def getHostsFromFile ( filename ) : valid_hostname = r"^[^ /\t=\n]+" workers = r"\d+" hostname_re = re . compile ( valid_hostname ) worker_re = re . compile ( workers ) hosts = [ ] with open ( filename ) as f : for line in f : if re . search ( '[\[\]]' , line ) : hosts = hosts + parseSLURM ( line . strip ( ) ) else : h...
Parse a file to return a list of hosts .
243,756
def getHostsFromList ( hostlist ) : if any ( re . search ( '[\[\]]' , x ) for x in hostlist ) : return parseSLURM ( str ( hostlist ) ) hostlist = groupTogether ( hostlist ) retVal = [ ] for key , group in groupby ( hostlist ) : retVal . append ( ( key , len ( list ( group ) ) ) ) return retVal
Return the hosts from the command line
243,757
def parseSLURM ( string ) : import subprocess , os hostsstr = subprocess . check_output ( [ "scontrol" , "show" , "hostnames" , string ] ) if sys . version_info . major > 2 : hostsstr = hostsstr . decode ( ) hosts = hostsstr . split ( os . linesep ) hosts = filter ( None , hosts ) hosts = [ ( host , 1 ) for host in hos...
Return a host list from a SLURM string
243,758
def getHostsFromPBS ( ) : with open ( os . environ [ "PBS_NODEFILE" ] , 'r' ) as hosts : hostlist = groupTogether ( hosts . read ( ) . split ( ) ) retVal = [ ] for key , group in groupby ( hostlist ) : retVal . append ( ( key , len ( list ( group ) ) ) ) return retVal
Return a host list in a PBS environment
243,759
def getHostsFromSGE ( ) : with open ( os . environ [ "PE_HOSTFILE" ] , 'r' ) as hosts : return [ ( host . split ( ) [ 0 ] , int ( host . split ( ) [ 1 ] ) ) for host in hosts ]
Return a host list in a SGE environment
243,760
def getWorkerQte ( hosts ) : if "SLURM_NTASKS" in os . environ : return int ( os . environ [ "SLURM_NTASKS" ] ) elif "PBS_NP" in os . environ : return int ( os . environ [ "PBS_NP" ] ) elif "NSLOTS" in os . environ : return int ( os . environ [ "NSLOTS" ] ) else : return sum ( host [ 1 ] for host in hosts )
Return the number of workers to launch depending on the environment
243,761
def functionFactory ( in_code , name , defaults , globals_ , imports ) : def generatedFunction ( ) : pass generatedFunction . __code__ = marshal . loads ( in_code ) generatedFunction . __name__ = name generatedFunction . __defaults = defaults generatedFunction . __globals__ . update ( pickle . loads ( globals_ ) ) for ...
Creates a function at runtime using binary compiled inCode
243,762
def makeLambdaPicklable ( lambda_function ) : if isinstance ( lambda_function , type ( lambda : None ) ) and lambda_function . __name__ == '<lambda>' : def __reduce_ex__ ( proto ) : return unpickleLambda , ( marshal . dumps ( lambda_function . __code__ ) , ) lambda_function . __reduce_ex__ = __reduce_ex__ return lambda...
Take input lambda function l and makes it picklable .
243,763
def addConnector ( self , wire1 , wire2 ) : if wire1 == wire2 : return if wire1 > wire2 : wire1 , wire2 = wire2 , wire1 try : last_level = self [ - 1 ] except IndexError : self . append ( [ ( wire1 , wire2 ) ] ) return for wires in last_level : if wires [ 1 ] >= wire1 and wires [ 0 ] <= wire2 : self . append ( [ ( wire...
Add a connector between wire1 and wire2 in the network .
243,764
def sort ( self , values ) : for level in self : for wire1 , wire2 in level : if values [ wire1 ] > values [ wire2 ] : values [ wire1 ] , values [ wire2 ] = values [ wire2 ] , values [ wire1 ]
Sort the values in - place based on the connectors in the network .
243,765
def draw ( self ) : str_wires = [ [ "-" ] * 7 * self . depth ] str_wires [ 0 ] [ 0 ] = "0" str_wires [ 0 ] [ 1 ] = " o" str_spaces = [ ] for i in range ( 1 , self . dimension ) : str_wires . append ( [ "-" ] * 7 * self . depth ) str_spaces . append ( [ " " ] * 7 * self . depth ) str_wires [ i ] [ 0 ] = str ( i ) str_wi...
Return an ASCII representation of the network .
243,766
def getWorkersName ( data ) : names = [ fichier for fichier in data . keys ( ) ] names . sort ( ) try : names . remove ( "broker" ) except ValueError : pass return names
Returns the list of the names of the workers sorted alphabetically
243,767
def importData ( directory ) : dataTask = OrderedDict ( ) dataQueue = OrderedDict ( ) for fichier in sorted ( os . listdir ( directory ) ) : try : with open ( "{directory}/{fichier}" . format ( ** locals ( ) ) , 'rb' ) as f : fileName , fileType = fichier . rsplit ( '-' , 1 ) if fileType == "QUEUE" : dataQueue [ fileNa...
Parse the input files and return two dictionnaries
243,768
def getTimes ( dataTasks ) : global begin_time start_time , end_time = float ( 'inf' ) , 0 for fichier , vals in dataTask . items ( ) : try : if hasattr ( vals , 'values' ) : tmp_start_time = min ( [ a [ 'start_time' ] for a in vals . values ( ) ] ) [ 0 ] if tmp_start_time < start_time : start_time = tmp_start_time tmp...
Get the start time and the end time of data in milliseconds
243,769
def WorkersDensity ( dataTasks ) : start_time , end_time = getTimes ( dataTasks ) graphdata = [ ] for name in getWorkersName ( dataTasks ) : vals = dataTasks [ name ] if hasattr ( vals , 'values' ) : workerdata = [ ] print ( "Plotting density map for {}" . format ( name ) ) try : for graphtime in timeRange ( start_time...
Return the worker density data for the graph .
243,770
def plotDensity ( dataTask , filename ) : def format_time ( x , pos = None ) : start_time , end_time = [ ( a - begin_time ) / 1000 for a in getTimes ( dataTask ) ] return int ( end_time * x / DENSITY_MAP_TIME_AXIS_LENGTH ) graphdata = WorkersDensity ( dataTask ) if len ( graphdata ) : fig = plt . figure ( ) ax = fig . ...
Plot the worker density graph
243,771
def plotBrokerQueue ( dataTask , filename ) : print ( "Plotting broker queue length for {0}." . format ( filename ) ) plt . figure ( ) plt . subplot ( 211 ) for fichier , vals in dataTask . items ( ) : if type ( vals ) == list : timestamps = list ( map ( datetime . fromtimestamp , map ( int , list ( zip ( * vals ) ) [ ...
Generates the broker queue length graphic .
243,772
def getWorkerInfo ( dataTask ) : workertime = [ ] workertasks = [ ] for fichier , vals in dataTask . items ( ) : if hasattr ( vals , 'values' ) : totaltime = sum ( [ a [ 'executionTime' ] for a in vals . values ( ) ] ) totaltasks = sum ( [ 1 for a in vals . values ( ) ] ) workertime . append ( totaltime ) workertasks ....
Returns the total execution time and task quantity by worker
243,773
def timelines ( fig , y , xstart , xstop , color = 'b' ) : fig . hlines ( y , xstart , xstop , color , lw = 4 ) fig . vlines ( xstart , y + 0.03 , y - 0.03 , color , lw = 2 ) fig . vlines ( xstop , y + 0.03 , y - 0.03 , color , lw = 2 )
Plot timelines at y from xstart to xstop with given color .
243,774
def plotTimeline ( dataTask , filename ) : fig = plt . figure ( ) ax = fig . gca ( ) worker_names = [ x for x in dataTask . keys ( ) if "broker" not in x ] min_time = getMinimumTime ( dataTask ) ystep = 1. / ( len ( worker_names ) + 1 ) y = 0 for worker , vals in dataTask . items ( ) : if "broker" in worker : continue ...
Build a timeline
243,775
def setWorker ( self , * args , ** kwargs ) : try : la = self . LAUNCHING_ARGUMENTS ( * args , ** kwargs ) except TypeError as e : scoop . logger . error ( ( "addWorker failed to convert args %s and kwargs %s " "to namedtuple (requires %s arguments (names %s)" ) % ( args , kwargs , len ( self . LAUNCHING_ARGUMENTS . _f...
Add a worker assignation Arguments and order to pass are defined in LAUNCHING_ARGUMENTS Using named args is advised .
243,776
def _WorkerCommand_environment ( self ) : worker = self . workersArguments c = [ ] if worker . prolog : c . extend ( [ "source" , worker . prolog , "&&" , ] ) if worker . pythonPath and not self . isLocal ( ) : c . extend ( [ "env" , "PYTHONPATH={0}:$PYTHONPATH" . format ( worker . pythonPath ) , ] ) elif worker . pyth...
Return list of shell commands to prepare the environment for bootstrap .
243,777
def _WorkerCommand_launcher ( self ) : return [ self . workersArguments . pythonExecutable , '-m' , 'scoop.launch.__main__' , str ( self . workerAmount ) , str ( self . workersArguments . verbose ) , ]
Return list commands to start the bootstrap process
243,778
def _WorkerCommand_options ( self ) : worker = self . workersArguments c = [ ] if self . hostname == worker . brokerHostname : broker = "127.0.0.1" else : broker = worker . brokerHostname if worker . nice is not None : c . extend ( [ '--nice' , str ( worker . nice ) ] ) c . extend ( [ '--size' , str ( worker . size ) ]...
Return list of options for bootstrap
243,779
def _WorkerCommand_executable ( self ) : worker = self . workersArguments c = [ ] if worker . executable : c . append ( worker . executable ) if worker . args : if self . isLocal ( ) : c . extend ( [ '{0}' . format ( a ) for a in worker . args ] ) else : c . extend ( [ '"{0}"' . format ( a . replace ( '"' , '\\\"' ) ) ...
Return executable and any options to be executed by bootstrap
243,780
def _getWorkerCommandList ( self ) : c = [ ] c . extend ( self . _WorkerCommand_environment ( ) ) c . extend ( self . _WorkerCommand_launcher ( ) ) c . extend ( self . _WorkerCommand_options ( ) ) c . extend ( self . _WorkerCommand_executable ( ) ) return c
Generate the workerCommand as list
243,781
def launch ( self , tunnelPorts = None ) : if self . isLocal ( ) : c = self . _getWorkerCommandList ( ) self . subprocesses . append ( subprocess . Popen ( c ) ) else : BASE_SSH [ 0 ] = self . ssh_executable sshCmd = BASE_SSH if not self . rsh else BASE_RSH if tunnelPorts is not None : sshCmd += [ '-R {0}:127.0.0.1:{0}...
Launch every worker assigned on this host .
243,782
def _switch ( self , future ) : scoop . _control . current = self assert self . greenlet is not None , ( "No greenlet to switch to:" "\n{0}" . format ( self . __dict__ ) ) return self . greenlet . switch ( future )
Switch greenlet .
243,783
def cancel ( self ) : if self in scoop . _control . execQueue . movable : self . exceptionValue = CancelledError ( ) scoop . _control . futureDict [ self . id ] . _delete ( ) scoop . _control . execQueue . remove ( self ) return True return False
If the call is currently being executed or sent for remote execution then it cannot be cancelled and the method will return False otherwise the call will be cancelled and the method will return True .
243,784
def done ( self ) : try : scoop . _control . execQueue . remove ( self ) scoop . _control . execQueue . socket . sendFuture ( self ) except ValueError as e : pass scoop . _control . execQueue . updateQueue ( ) return self . _ended ( )
Returns True if the call was successfully cancelled or finished running False otherwise . This function updates the executionQueue so it receives all the awaiting message .
243,785
def add_done_callback ( self , callable_ , inCallbackType = CallbackType . standard , inCallbackGroup = None ) : self . callback . append ( callbackEntry ( callable_ , inCallbackType , inCallbackGroup ) ) if self . _ended ( ) : self . callback [ - 1 ] . func ( self )
Attach a callable to the future that will be called when the future is cancelled or finishes running . Callable will be called with the future as its only argument .
243,786
def append ( self , future ) : if future . _ended ( ) and future . index is None : self . inprogress . add ( future ) elif future . _ended ( ) and future . index is not None : self . ready . append ( future ) elif future . greenlet is not None : self . inprogress . add ( future ) else : self . movable . append ( future...
Append a future to the queue .
243,787
def askForPreviousFutures ( self ) : if time . time ( ) < self . lastStatus + POLLING_TIME / 1000 : return self . lastStatus = time . time ( ) for future in scoop . _control . futureDict . values ( ) : if scoop . IS_ORIGIN and future . id == ( scoop . worker , 0 ) : continue if future not in self . inprogress : self . ...
Request a status for every future to the broker .
243,788
def pop ( self ) : self . updateQueue ( ) if self . timelen ( self ) < self . lowwatermark : self . requestFuture ( ) if len ( self . ready ) != 0 : return self . ready . popleft ( ) elif len ( self . movable ) != 0 : return self . movable . popleft ( ) else : self . lastStatus = time . time ( ) while len ( self ) == 0...
Pop the next future from the queue ; in progress futures have priority over those that have not yet started ; higher level futures have priority over lower level ones ;
243,789
def flush ( self ) : for elem in self : if elem . id [ 0 ] != scoop . worker : elem . _delete ( ) self . socket . sendFuture ( elem ) self . ready . clear ( ) self . movable . clear ( )
Empty the local queue and send its elements to be executed remotely .
243,790
def updateQueue ( self ) : for future in self . socket . recvFuture ( ) : if future . _ended ( ) : try : thisFuture = scoop . _control . futureDict [ future . id ] except KeyError : scoop . logger . warn ( '{0}: Received an unexpected future: ' '{1}' . format ( scoop . worker , future . id ) ) continue thisFuture . res...
Process inbound communication buffer . Updates the local queue with elements from the broker .
243,791
def sendResult ( self , future ) : future . greenlet = None assert future . _ended ( ) , "The results are not valid" self . socket . sendResult ( future )
Send back results to broker for distribution to parent task .
243,792
def shutdown ( self ) : self . socket . shutdown ( ) if scoop : if scoop . DEBUG : from scoop import _debug _debug . writeWorkerDebug ( scoop . _control . debug_stats , scoop . _control . QueueLength , )
Shutdown the ressources used by the queue
243,793
def redirectSTDOUTtoDebugFile ( ) : import sys kwargs = { } if sys . version_info >= ( 3 , ) : kwargs [ "encoding" ] = "utf8" sys . stdout = open ( os . path . join ( getDebugDirectory ( ) , "{0}.stdout" . format ( getDebugIdentifier ( ) ) , ) , "w" , 1 , ** kwargs ) sys . stderr = open ( os . path . join ( getDebugDir...
Redirects the stdout and stderr of the current process to a file .
243,794
def writeWorkerDebug ( debugStats , queueLength , path_suffix = "" ) : createDirectory ( path_suffix ) origin_prefix = "origin-" if scoop . IS_ORIGIN else "" statsFilename = os . path . join ( getDebugDirectory ( ) , path_suffix , "{1}worker-{0}-STATS" . format ( getDebugIdentifier ( ) , origin_prefix ) ) lengthFilenam...
Serialize the execution data using pickle and writes it into the debug directory .
243,795
def main ( ) : parser = makeParser ( ) args = parser . parse_args ( ) hosts = utils . getHosts ( args . hostfile , args . hosts ) if args . n : n = args . n else : n = utils . getWorkerQte ( hosts ) assert n >= 0 , ( "Scoop couldn't determine the number of worker to start.\n" "Use the '-n' flag to set it manually." ) i...
Execution of the SCOOP module . Parses its command - line arguments and launch needed resources .
243,796
def initLogging ( self ) : verbose_levels = { 0 : logging . WARNING , 1 : logging . INFO , 2 : logging . DEBUG , } logging . basicConfig ( level = verbose_levels [ self . verbose ] , format = "[%(asctime)-15s] %(module)-9s %(levelname)-7s %(message)s" ) return logging . getLogger ( self . __class__ . __name__ )
Configures the logger .
243,797
def divideHosts ( self , hosts , qty ) : maximumWorkers = sum ( host [ 1 ] for host in hosts ) if qty > maximumWorkers : index = 0 while qty > maximumWorkers : hosts [ index ] = ( hosts [ index ] [ 0 ] , hosts [ index ] [ 1 ] + 1 ) index = ( index + 1 ) % len ( hosts ) maximumWorkers += 1 elif qty < maximumWorkers : wh...
Divide processes among hosts .
243,798
def showHostDivision ( self , headless ) : scoop . logger . info ( 'Worker d--istribution: ' ) for worker , number in self . worker_hosts : first_worker = ( worker == self . worker_hosts [ 0 ] [ 0 ] ) scoop . logger . info ( ' {0}:\t{1} {2}' . format ( worker , number - 1 if first_worker or headless else str ( number...
Show the worker distribution over the hosts .
243,799
def setWorkerInfo ( self , hostname , workerAmount , origin ) : scoop . logger . debug ( 'Initialising {0}{1} worker {2} [{3}].' . format ( "local" if hostname in utils . localHostnames else "remote" , " origin" if origin else "" , self . workersLeft , hostname , ) ) add_args , add_kwargs = self . _setWorker_args ( ori...
Sets the worker information for the current host .