idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
300
def write_utf ( self , s ) : utfstr = s . encode ( 'utf-8' ) length = len ( utfstr ) if length > 64 : raise NamePartTooLongException self . write_byte ( length ) self . write_string ( utfstr , length )
Writes a UTF - 8 string of a given length to the packet
301
def write_name ( self , name ) : try : index = self . names [ name ] except KeyError : self . names [ name ] = self . size parts = name . split ( '.' ) if parts [ - 1 ] == '' : parts = parts [ : - 1 ] for part in parts : self . write_utf ( part ) self . write_byte ( 0 ) return self . write_byte ( ( index >> 8 ) | 0xC0 ) self . write_byte ( index )
Writes a domain name to the packet
302
def write_question ( self , question ) : self . write_name ( question . name ) self . write_short ( question . type ) self . write_short ( question . clazz )
Writes a question to the packet
303
def packet ( self ) : if not self . finished : self . finished = 1 for question in self . questions : self . write_question ( question ) for answer , time in self . answers : self . write_record ( answer , time ) for authority in self . authorities : self . write_record ( authority , 0 ) for additional in self . additionals : self . write_record ( additional , 0 ) self . insert_short ( 0 , len ( self . additionals ) ) self . insert_short ( 0 , len ( self . authorities ) ) self . insert_short ( 0 , len ( self . answers ) ) self . insert_short ( 0 , len ( self . questions ) ) self . insert_short ( 0 , self . flags ) if self . multicast : self . insert_short ( 0 , 0 ) else : self . insert_short ( 0 , self . id ) return b'' . join ( self . data )
Returns a string containing the packet s bytes
304
def add ( self , entry ) : if self . get ( entry ) is not None : return try : list = self . cache [ entry . key ] except : list = self . cache [ entry . key ] = [ ] list . append ( entry )
Adds an entry
305
def sign ( self , entry , signer = None ) : if ( self . get ( entry ) is not None ) : return if ( entry . rrsig is None ) and ( self . private is not None ) : entry . rrsig = DNSSignatureS ( entry . name , _TYPE_RRSIG , _CLASS_IN , entry , self . private , signer ) self . add ( entry ) if ( self . private is not None ) : self . add ( entry . rrsig )
Adds and sign an entry
306
def remove ( self , entry ) : try : list = self . cache [ entry . key ] list . remove ( entry ) except : pass
Removes an entry
307
def get ( self , entry ) : try : list = self . cache [ entry . key ] return list [ list . index ( entry ) ] except : return None
Gets an entry by key . Will return None if there is no matching entry .
308
def get_by_details ( self , name , type , clazz ) : entry = DNSEntry ( name , type , clazz ) return self . get ( entry )
Gets an entry by details . Will return None if there is no matching entry .
309
def entries ( self ) : def add ( x , y ) : return x + y try : return reduce ( add , list ( self . cache . values ( ) ) ) except : return [ ]
Returns a list of all entries
310
def update_record ( self , zeroconf , now , record ) : if record . type == _TYPE_PTR and record . name == self . type : expired = record . is_expired ( now ) try : oldrecord = self . services [ record . alias . lower ( ) ] if not expired : oldrecord . reset_ttl ( record ) else : del ( self . services [ record . alias . lower ( ) ] ) callback = lambda x : self . listener . remove_service ( x , self . type , record . alias ) self . list . append ( callback ) return except : if not expired : self . services [ record . alias . lower ( ) ] = record callback = lambda x : self . listener . add_service ( x , self . type , record . alias ) self . list . append ( callback ) expires = record . get_expiration_time ( 75 ) if expires < self . next_time : self . next_time = expires
Callback invoked by Zeroconf when new information arrives .
311
def set_properties ( self , properties ) : if isinstance ( properties , dict ) : self . properties = properties self . sync_properties ( ) else : self . text = properties
Sets properties and text of this info from a dictionary
312
def set_text ( self , text ) : self . text = text try : self . properties = text_to_dict ( text ) except : traceback . print_exc ( ) self . properties = None
Sets properties and text given a text field
313
def update_record ( self , zeroconf , now , record ) : if record is not None and not record . is_expired ( now ) : if record . type == _TYPE_A : if record . name == self . name : if not record . address in self . address : self . address . append ( record . address ) elif record . type == _TYPE_SRV : if record . name == self . name : self . server = record . server self . port = record . port self . weight = record . weight self . priority = record . priority self . address = [ ] self . update_record ( zeroconf , now , zeroconf . cache . get_by_details ( self . server , _TYPE_A , _CLASS_IN ) ) elif record . type == _TYPE_TXT : if record . name == self . name : self . set_text ( record . text )
Updates service information from a DNS record
314
def request ( self , zeroconf , timeout ) : now = current_time_millis ( ) delay = _LISTENER_TIME next = now + delay last = now + timeout result = 0 try : zeroconf . add_listener ( self , DNSQuestion ( self . name , _TYPE_ANY , _CLASS_IN ) ) while self . server is None or len ( self . address ) == 0 or self . text is None : if last <= now : return 0 if next <= now : out = DNSOutgoing ( _FLAGS_QR_QUERY ) out . add_question ( DNSQuestion ( self . name , _TYPE_SRV , _CLASS_IN ) ) out . add_answer_at_time ( zeroconf . cache . get_by_details ( self . name , _TYPE_SRV , _CLASS_IN ) , now ) out . add_question ( DNSQuestion ( self . name , _TYPE_TXT , _CLASS_IN ) ) out . add_answer_at_time ( zeroconf . cache . get_by_details ( self . name , _TYPE_TXT , _CLASS_IN ) , now ) if self . server is not None : out . add_question ( DNSQuestion ( self . server , _TYPE_A , _CLASS_IN ) ) out . add_answer_at_time ( zeroconf . cache . get_by_details ( self . server , _TYPE_A , _CLASS_IN ) , now ) zeroconf . send ( out ) next = now + delay delay = delay * 2 zeroconf . wait ( min ( next , last ) - now ) now = current_time_millis ( ) result = 1 finally : zeroconf . remove_listener ( self ) return result
Returns true if the service could be discovered on the network and updates this object with details discovered .
315
def wait ( self , timeout ) : self . condition . acquire ( ) self . condition . wait ( timeout // 1000 ) self . condition . release ( )
Calling thread waits for a given number of milliseconds or until notified .
316
def notify_all ( self ) : self . condition . acquire ( ) try : self . condition . notify_all ( ) except : self . condition . notifyAll ( ) self . condition . release ( )
Notifies all waiting threads
317
def get_service_info ( self , type , name , timeout = 3000 ) : info = ServiceInfo ( type , name ) if info . request ( self , timeout ) : return info return None
Returns network s service information for a particular name and type or None if no service matches by the timeout which defaults to 3 seconds .
318
def add_serviceListener ( self , type , listener ) : self . remove_service_listener ( listener ) self . browsers . append ( ServiceBrowser ( self , type , listener ) )
Adds a listener for a particular service type . This object will then have its update_record method called when information arrives for that type .
319
def remove_service_listener ( self , listener ) : for browser in self . browsers : if browser . listener == listener : browser . cancel ( ) del ( browser )
Removes a listener from the set that is currently listening .
320
def register_service ( self , info ) : self . check_service ( info ) self . services [ info . name . lower ( ) ] = info self . transfer_zone ( info . type ) self . announce_service ( info . name )
Registers service information to the network with a default TTL of 60 seconds . Zeroconf will then respond to requests for information for that service . The name of the service may be changed if needed to make it unique on the network .
321
def unregister_service ( self , info ) : try : del ( self . services [ info . name . lower ( ) ] ) except : pass now = current_time_millis ( ) next_time = now i = 0 while i < 3 : if now < next_time : self . wait ( next_time - now ) now = current_time_millis ( ) continue out = DNSOutgoing ( _FLAGS_QR_RESPONSE | _FLAGS_AA ) out . add_answer_at_time ( DNSPointer ( info . type , _TYPE_PTR , _CLASS_IN , 0 , info . name ) , 0 ) out . add_answer_at_time ( DNSService ( info . name , _TYPE_SRV , _CLASS_IN , 0 , info . priority , info . weight , info . port , info . name ) , 0 ) out . add_answer_at_time ( DNSText ( info . name , _TYPE_TXT , _CLASS_IN , 0 , info . text ) , 0 ) for k in info . address : out . add_answer_at_time ( DNSAddress ( info . server , _TYPE_A , _CLASS_IN , 0 , k ) , 0 ) self . send ( out ) i += 1 next_time += _UNREGISTER_TIME
Unregister a service .
322
def check_service ( self , info ) : now = current_time_millis ( ) next_time = now i = 0 while i < 3 : for record in self . cache . entries_with_name ( info . type ) : if record . type == _TYPE_PTR and not record . is_expired ( now ) and record . alias == info . name : if ( info . name . find ( '.' ) < 0 ) : info . name = info . name + ".[" + info . address + ":" + info . port + "]." + info . type self . check_service ( info ) return raise NonUniqueNameException if now < next_time : self . wait ( next_time - now ) now = current_time_millis ( ) continue out = DNSOutgoing ( _FLAGS_QR_QUERY | _FLAGS_AA ) self . debug = out out . add_question ( DNSQuestion ( info . type , _TYPE_PTR , _CLASS_IN ) ) out . add_authorative_answer ( DNSPointer ( info . type , _TYPE_PTR , _CLASS_IN , info . ttl , info . name ) ) self . send ( out ) i += 1 next_time += _CHECK_TIME
Checks the network for a unique service name modifying the ServiceInfo passed in if it is not unique .
323
def add_listener ( self , listener , question ) : now = current_time_millis ( ) self . listeners . append ( listener ) if question is not None : for record in self . cache . entries_with_name ( question . name ) : if question . answered_by ( record ) and not record . is_expired ( now ) : listener . update_record ( self , now , record ) self . notify_all ( )
Adds a listener for a given question . The listener will have its update_record method called when information is available to answer the question .
324
def update_record ( self , now , rec ) : for listener in self . listeners : listener . update_record ( self , now , rec ) self . notify_all ( )
Used to notify listeners of new information that has updated a record .
325
def handle_response ( self , msg , address ) : now = current_time_millis ( ) sigs = [ ] precache = [ ] for record in msg . answers : if isinstance ( record , DNSSignature ) : sigs . append ( record ) else : precache . append ( record ) for e in precache : for s in sigs : if self . verify ( e , s ) : if self . adaptive and e . type == _TYPE_A : if e . address == '\x00\x00\x00\x00' : e . address = socket . inet_aton ( address ) if e in self . cache . entries ( ) : if e . is_expired ( now ) : for i in self . hooks : try : i . remove ( e ) except : pass self . cache . remove ( e ) self . cache . remove ( s ) else : entry = self . cache . get ( e ) sig = self . cache . get ( s ) if ( entry is not None ) and ( sig is not None ) : for i in self . hooks : try : i . update ( e ) except : pass entry . reset_ttl ( e ) sig . reset_ttl ( s ) else : e . rrsig = s self . cache . add ( e ) self . cache . add ( s ) for i in self . hooks : try : i . add ( e ) except : pass precache . remove ( e ) sigs . remove ( s ) self . update_record ( now , record ) if self . bypass : for e in precache : if e in self . cache . entries ( ) : if e . is_expired ( now ) : for i in self . hooks : try : i . remove ( e ) except : pass self . cache . remove ( e ) else : entry = self . cache . get ( e ) if ( entry is not None ) : for i in self . hooks : try : i . update ( e ) except : pass entry . reset_ttl ( e ) else : self . cache . add ( e ) for i in self . hooks : try : i . add ( e ) except : pass self . update_record ( now , record )
Deal with incoming response packets . All answers are held in the cache and listeners are notified .
326
def send ( self , out , addr = _MDNS_ADDR , port = _MDNS_PORT ) : for i in self . intf . values ( ) : try : return i . sendto ( out . packet ( ) , 0 , ( addr , port ) ) except : traceback . print_exc ( ) return - 1
Sends an outgoing packet .
327
def close ( self ) : if globals ( ) [ '_GLOBAL_DONE' ] == 0 : globals ( ) [ '_GLOBAL_DONE' ] = 1 self . notify_all ( ) self . engine . notify ( ) self . unregister_all_services ( ) for i in self . intf . values ( ) : try : i . setsockopt ( socket . SOL_IP , socket . IP_DROP_MEMBERSHIP , socket . inet_aton ( _MDNS_ADDR ) + socket . inet_aton ( '0.0.0.0' ) ) except : pass i . close ( )
Ends the background threads and prevent this instance from servicing further queries .
328
def execute ( self , identity_records : 'RDD' , old_state_rdd : Optional [ 'RDD' ] = None ) -> 'RDD' : identity_records_with_state = identity_records if old_state_rdd : identity_records_with_state = identity_records . fullOuterJoin ( old_state_rdd ) return identity_records_with_state . map ( lambda x : self . _execute_per_identity_records ( x ) )
Executes Blurr BTS with the given records . old_state_rdd can be provided to load an older state from a previous run .
329
def get_record_rdd_from_json_files ( self , json_files : List [ str ] , data_processor : DataProcessor = SimpleJsonDataProcessor ( ) , spark_session : Optional [ 'SparkSession' ] = None ) -> 'RDD' : spark_context = get_spark_session ( spark_session ) . sparkContext raw_records : 'RDD' = spark_context . union ( [ spark_context . textFile ( file ) for file in json_files ] ) return raw_records . mapPartitions ( lambda x : self . get_per_identity_records ( x , data_processor ) ) . groupByKey ( ) . mapValues ( list )
Reads the data from the given json_files path and converts them into the Record s format for processing . data_processor is used to process the per event data in those files to convert them into Record .
330
def get_record_rdd_from_rdd ( self , rdd : 'RDD' , data_processor : DataProcessor = SimpleDictionaryDataProcessor ( ) , ) -> 'RDD' : return rdd . mapPartitions ( lambda x : self . get_per_identity_records ( x , data_processor ) ) . groupByKey ( ) . mapValues ( list )
Converts a RDD of raw events into the Record s format for processing . data_processor is used to process the per row data to convert them into Record .
331
def write_output_file ( self , path : str , per_identity_data : 'RDD' , spark_session : Optional [ 'SparkSession' ] = None ) -> None : _spark_session_ = get_spark_session ( spark_session ) if not self . _window_bts : per_identity_data . flatMap ( lambda x : [ json . dumps ( data , cls = BlurrJSONEncoder ) for data in x [ 1 ] [ 0 ] . items ( ) ] ) . saveAsTextFile ( path ) else : _spark_session_ . createDataFrame ( per_identity_data . flatMap ( lambda x : x [ 1 ] [ 1 ] ) ) . write . csv ( path , header = True )
Basic helper function to persist data to disk .
332
def print_output ( self , per_identity_data : 'RDD' ) -> None : if not self . _window_bts : data = per_identity_data . flatMap ( lambda x : [ json . dumps ( data , cls = BlurrJSONEncoder ) for data in x [ 1 ] [ 0 ] . items ( ) ] ) else : data = per_identity_data . map ( lambda x : json . dumps ( ( x [ 0 ] , x [ 1 ] [ 1 ] ) , cls = BlurrJSONEncoder ) ) for row in data . collect ( ) : print ( row )
Basic helper function to write data to stdout . If window BTS was provided then the window BTS output is written otherwise the streaming BTS output is written to stdout .
333
def find_executable ( executable , path = None ) : if sys . platform != 'win32' : return distutils . spawn . find_executable ( executable , path ) if path is None : path = os . environ [ 'PATH' ] paths = path . split ( os . pathsep ) extensions = os . environ . get ( 'PATHEXT' , '.exe' ) . split ( os . pathsep ) base , ext = os . path . splitext ( executable ) if not os . path . isfile ( executable ) : for p in paths : for ext in extensions : f = os . path . join ( p , base + ext ) if os . path . isfile ( f ) : return f return None else : return executable
As distutils . spawn . find_executable but on Windows look up every extension declared in PATHEXT instead of just . exe
334
def create_environment_dict ( overrides ) : result = os . environ . copy ( ) result . update ( overrides or { } ) return result
Create and return a copy of os . environ with the specified overrides
335
def get ( self , server ) : if not isinstance ( server , six . binary_type ) : server = server . encode ( 'utf-8' ) data = self . _execute ( 'get' , server ) result = json . loads ( data . decode ( 'utf-8' ) ) if result [ 'Username' ] == '' and result [ 'Secret' ] == '' : raise errors . CredentialsNotFound ( 'No matching credentials in {}' . format ( self . program ) ) return result
Retrieve credentials for server . If no credentials are found a StoreError will be raised .
336
def store ( self , server , username , secret ) : data_input = json . dumps ( { 'ServerURL' : server , 'Username' : username , 'Secret' : secret } ) . encode ( 'utf-8' ) return self . _execute ( 'store' , data_input )
Store credentials for server . Raises a StoreError if an error occurs .
337
def erase ( self , server ) : if not isinstance ( server , six . binary_type ) : server = server . encode ( 'utf-8' ) self . _execute ( 'erase' , server )
Erase credentials for server . Raises a StoreError if an error occurs .
338
def extend_schema_spec ( self ) -> None : super ( ) . extend_schema_spec ( ) identity_field = { 'Name' : '_identity' , 'Type' : BtsType . STRING , 'Value' : 'identity' , ATTRIBUTE_INTERNAL : True } if self . ATTRIBUTE_FIELDS in self . _spec : self . _spec [ self . ATTRIBUTE_FIELDS ] . insert ( 0 , identity_field ) self . schema_loader . add_schema_spec ( identity_field , self . fully_qualified_name )
Injects the identity field
339
def _persist ( self ) -> None : if self . _store : self . _store . save ( self . _key , self . _snapshot )
Persists the current data group
340
def add_errors ( self , * errors : Union [ BaseSchemaError , SchemaErrorCollection ] ) -> None : for error in errors : self . _error_cache . add ( error )
Adds errors to the error store for the schema
341
def global_add ( self , key : str , value : Any ) -> None : self . global_context [ key ] = value
Adds a key and value to the global dictionary
342
def _copy_files ( source , target ) : source_files = listdir ( source ) if not exists ( target ) : makedirs ( target ) for filename in source_files : full_filename = join ( source , filename ) if isfile ( full_filename ) : shutil . copy ( full_filename , target )
Copy all the files in source directory to target .
343
def create_copy ( self ) : if self . executable is None or not isfile ( self . executable ) : raise FileNotFoundError ( "Could not find MAGICC{} executable: {}" . format ( self . version , self . executable ) ) if self . is_temp : assert ( self . root_dir is None ) , "A temp copy for this instance has already been created" self . root_dir = mkdtemp ( prefix = "pymagicc-" ) if exists ( self . run_dir ) : raise Exception ( "A copy of MAGICC has already been created." ) if not exists ( self . root_dir ) : makedirs ( self . root_dir ) exec_dir = basename ( self . original_dir ) dirs_to_copy = [ "." , "bin" , "run" ] assert exec_dir in dirs_to_copy , "binary must be in bin/ or run/ directory" for d in dirs_to_copy : source_dir = abspath ( join ( self . original_dir , ".." , d ) ) if exists ( source_dir ) : _copy_files ( source_dir , join ( self . root_dir , d ) ) makedirs ( join ( self . root_dir , "out" ) ) self . set_years ( ) self . set_config ( )
Initialises a temporary directory structure and copy of MAGICC configuration files and binary .
344
def run ( self , scenario = None , only = None , ** kwargs ) : if not exists ( self . root_dir ) : raise FileNotFoundError ( self . root_dir ) if self . executable is None : raise ValueError ( "MAGICC executable not found, try setting an environment variable `MAGICC_EXECUTABLE_{}=/path/to/binary`" . format ( self . version ) ) if scenario is not None : kwargs = self . set_emission_scenario_setup ( scenario , kwargs ) yr_config = { } if "startyear" in kwargs : yr_config [ "startyear" ] = kwargs . pop ( "startyear" ) if "endyear" in kwargs : yr_config [ "endyear" ] = kwargs . pop ( "endyear" ) if yr_config : self . set_years ( ** yr_config ) kwargs . setdefault ( "rundate" , get_date_time_string ( ) ) self . update_config ( ** kwargs ) self . check_config ( ) exec_dir = basename ( self . original_dir ) command = [ join ( self . root_dir , exec_dir , self . binary_name ) ] if not IS_WINDOWS and self . binary_name . endswith ( ".exe" ) : command . insert ( 0 , "wine" ) subprocess . check_call ( command , cwd = self . run_dir , shell = IS_WINDOWS ) outfiles = self . _get_output_filenames ( ) read_cols = { "climate_model" : [ "MAGICC{}" . format ( self . version ) ] } if scenario is not None : read_cols [ "model" ] = scenario [ "model" ] . unique ( ) . tolist ( ) read_cols [ "scenario" ] = scenario [ "scenario" ] . unique ( ) . tolist ( ) else : read_cols . setdefault ( "model" , [ "unspecified" ] ) read_cols . setdefault ( "scenario" , [ "unspecified" ] ) mdata = None for filepath in outfiles : try : openscm_var = _get_openscm_var_from_filepath ( filepath ) if only is None or openscm_var in only : tempdata = MAGICCData ( join ( self . out_dir , filepath ) , columns = deepcopy ( read_cols ) ) mdata = mdata . append ( tempdata ) if mdata is not None else tempdata except ( NoReaderWriterError , InvalidTemporalResError ) : continue if mdata is None : error_msg = "No output found for only={}" . format ( only ) raise ValueError ( error_msg ) try : run_paras = self . read_parameters ( ) self . config = run_paras mdata . metadata [ "parameters" ] = run_paras except FileNotFoundError : pass return mdata
Run MAGICC and parse the output .
345
def check_config ( self ) : cfg_error_msg = ( "PYMAGICC is not the only tuning model that will be used by " "`MAGCFG_USER.CFG`: your run is likely to fail/do odd things" ) emisscen_error_msg = ( "You have more than one `FILE_EMISSCEN_X` flag set. Using more than " "one emissions scenario is hard to debug and unnecessary with " "Pymagicc's dataframe scenario input. Please combine all your " "scenarios into one dataframe with Pymagicc and pandas, then feed " "this single Dataframe into Pymagicc's run API." ) nml_to_check = "nml_allcfgs" usr_cfg = read_cfg_file ( join ( self . run_dir , "MAGCFG_USER.CFG" ) ) for k in usr_cfg [ nml_to_check ] : if k . startswith ( "file_tuningmodel" ) : first_tuningmodel = k in [ "file_tuningmodel" , "file_tuningmodel_1" ] if first_tuningmodel : if usr_cfg [ nml_to_check ] [ k ] != "PYMAGICC" : raise ValueError ( cfg_error_msg ) elif usr_cfg [ nml_to_check ] [ k ] not in [ "USER" , "" ] : raise ValueError ( cfg_error_msg ) elif k . startswith ( "file_emisscen_" ) : if usr_cfg [ nml_to_check ] [ k ] not in [ "NONE" , "" ] : raise ValueError ( emisscen_error_msg )
Check that our MAGICC . CFG files are set to safely work with PYMAGICC
346
def write ( self , mdata , name ) : mdata . write ( join ( self . run_dir , name ) , self . version )
Write an input file to disk
347
def read_parameters ( self ) : param_fname = join ( self . out_dir , "PARAMETERS.OUT" ) if not exists ( param_fname ) : raise FileNotFoundError ( "No PARAMETERS.OUT found" ) with open ( param_fname ) as nml_file : parameters = dict ( f90nml . read ( nml_file ) ) for group in [ "nml_years" , "nml_allcfgs" , "nml_outputcfgs" ] : parameters [ group ] = dict ( parameters [ group ] ) for k , v in parameters [ group ] . items ( ) : parameters [ group ] [ k ] = _clean_value ( v ) parameters [ group . replace ( "nml_" , "" ) ] = parameters . pop ( group ) self . config = parameters return parameters
Read a parameters . out file
348
def remove_temp_copy ( self ) : if self . is_temp and self . root_dir is not None : shutil . rmtree ( self . root_dir ) self . root_dir = None
Removes a temporary copy of the MAGICC version shipped with Pymagicc .
349
def set_config ( self , filename = "MAGTUNE_PYMAGICC.CFG" , top_level_key = "nml_allcfgs" , ** kwargs ) : kwargs = self . _format_config ( kwargs ) fname = join ( self . run_dir , filename ) conf = { top_level_key : kwargs } f90nml . write ( conf , fname , force = True ) return conf
Create a configuration file for MAGICC .
350
def update_config ( self , filename = "MAGTUNE_PYMAGICC.CFG" , top_level_key = "nml_allcfgs" , ** kwargs ) : kwargs = self . _format_config ( kwargs ) fname = join ( self . run_dir , filename ) if exists ( fname ) : conf = f90nml . read ( fname ) else : conf = { top_level_key : { } } conf [ top_level_key ] . update ( kwargs ) f90nml . write ( conf , fname , force = True ) return conf
Updates a configuration file for MAGICC
351
def set_zero_config ( self ) : zero_emissions . write ( join ( self . run_dir , self . _scen_file_name ) , self . version ) time = zero_emissions . filter ( variable = "Emissions|CH4" , region = "World" ) [ "time" ] . values no_timesteps = len ( time ) ch4_conc_pi = 722 ch4_conc = ch4_conc_pi * np . ones ( no_timesteps ) ch4_conc_df = pd . DataFrame ( { "time" : time , "scenario" : "idealised" , "model" : "unspecified" , "climate_model" : "unspecified" , "variable" : "Atmospheric Concentrations|CH4" , "unit" : "ppb" , "todo" : "SET" , "region" : "World" , "value" : ch4_conc , } ) ch4_conc_writer = MAGICCData ( ch4_conc_df ) ch4_conc_filename = "HIST_CONSTANT_CH4_CONC.IN" ch4_conc_writer . metadata = { "header" : "Constant pre-industrial CH4 concentrations" } ch4_conc_writer . write ( join ( self . run_dir , ch4_conc_filename ) , self . version ) fgas_conc_pi = 0 fgas_conc = fgas_conc_pi * np . ones ( no_timesteps ) varname = "FGAS_CONC" fgas_conc_df = pd . DataFrame ( { "time" : time , "scenario" : "idealised" , "model" : "unspecified" , "climate_model" : "unspecified" , "variable" : varname , "unit" : "ppt" , "todo" : "SET" , "region" : "World" , "value" : fgas_conc , } ) fgas_conc_writer = MAGICCData ( fgas_conc_df ) fgas_conc_filename = "HIST_ZERO_{}.IN" . format ( varname ) fgas_conc_writer . metadata = { "header" : "Zero concentrations" } fgas_conc_writer . write ( join ( self . run_dir , fgas_conc_filename ) , self . version ) emis_config = self . _fix_any_backwards_emissions_scen_key_in_config ( { "file_emissionscenario" : self . _scen_file_name } ) self . set_config ( ** emis_config , rf_initialization_method = "ZEROSTARTSHIFT" , rf_total_constantafteryr = 10000 , file_co2i_emis = "" , file_co2b_emis = "" , co2_switchfromconc2emis_year = 1750 , file_ch4i_emis = "" , file_ch4b_emis = "" , file_ch4n_emis = "" , file_ch4_conc = ch4_conc_filename , ch4_switchfromconc2emis_year = 10000 , file_n2oi_emis = "" , file_n2ob_emis = "" , file_n2on_emis = "" , file_n2o_conc = "" , n2o_switchfromconc2emis_year = 1750 , file_noxi_emis = "" , file_noxb_emis = "" , file_noxi_ot = "" , file_noxb_ot = "" , file_noxt_rf = "" , file_soxnb_ot = "" , file_soxi_ot = "" , file_soxt_rf = "" , file_soxi_emis = "" , file_soxb_emis = "" , file_soxn_emis = "" , file_oci_emis = "" , file_ocb_emis = "" , file_oci_ot = "" , file_ocb_ot = "" , file_oci_rf = "" , file_ocb_rf = "" , file_bci_emis = "" , file_bcb_emis = "" , file_bci_ot = "" , file_bcb_ot = "" , file_bci_rf = "" , file_bcb_rf = "" , bcoc_switchfromrf2emis_year = 1750 , file_nh3i_emis = "" , file_nh3b_emis = "" , file_nmvoci_emis = "" , file_nmvocb_emis = "" , file_coi_emis = "" , file_cob_emis = "" , file_mineraldust_rf = "" , file_landuse_rf = "" , file_bcsnow_rf = "" , file_fgas_conc = [ fgas_conc_filename ] * 12 , fgas_switchfromconc2emis_year = 10000 , rf_mhalosum_scale = 0 , mhalo_switch_conc2emis_yr = 1750 , stratoz_o3scale = 0 , rf_volcanic_scale = 0 , rf_solar_scale = 0 , )
Set config such that radiative forcing and temperature output will be zero
352
def set_years ( self , startyear = 1765 , endyear = 2100 ) : return self . set_config ( "MAGCFG_NMLYEARS.CFG" , "nml_years" , endyear = endyear , startyear = startyear , stepsperyear = 12 , )
Set the start and end dates of the simulations .
353
def set_output_variables ( self , write_ascii = True , write_binary = False , ** kwargs ) : assert ( write_ascii or write_binary ) , "write_binary and/or write_ascii must be configured" if write_binary and write_ascii : ascii_binary = "BOTH" elif write_ascii : ascii_binary = "ASCII" else : ascii_binary = "BINARY" outconfig = { "out_emissions" : 0 , "out_gwpemissions" : 0 , "out_sum_gwpemissions" : 0 , "out_concentrations" : 0 , "out_carboncycle" : 0 , "out_forcing" : 0 , "out_surfaceforcing" : 0 , "out_permafrost" : 0 , "out_temperature" : 0 , "out_sealevel" : 0 , "out_parameters" : 0 , "out_misc" : 0 , "out_timeseriesmix" : 0 , "out_rcpdata" : 0 , "out_summaryidx" : 0 , "out_inverseemis" : 0 , "out_tempoceanlayers" : 0 , "out_heatuptake" : 0 , "out_ascii_binary" : ascii_binary , "out_warnings" : 0 , "out_precipinput" : 0 , "out_aogcmtuning" : 0 , "out_ccycletuning" : 0 , "out_observationaltuning" : 0 , "out_keydata_1" : 0 , "out_keydata_2" : 0 , } if self . version == 7 : outconfig [ "out_oceanarea" ] = 0 outconfig [ "out_lifetimes" ] = 0 for kw in kwargs : val = 1 if kwargs [ kw ] else 0 outconfig [ "out_" + kw . lower ( ) ] = val self . update_config ( ** outconfig )
Set the output configuration minimising output as much as possible
354
def diagnose_tcr_ecs ( self , ** kwargs ) : if self . version == 7 : raise NotImplementedError ( "MAGICC7 cannot yet diagnose ECS and TCR" ) self . _diagnose_tcr_ecs_config_setup ( ** kwargs ) timeseries = self . run ( scenario = None , only = [ "Atmospheric Concentrations|CO2" , "Radiative Forcing" , "Surface Temperature" , ] , ) tcr , ecs = self . _get_tcr_ecs_from_diagnosis_results ( timeseries ) return { "tcr" : tcr , "ecs" : ecs , "timeseries" : timeseries }
Diagnose TCR and ECS
355
def set_emission_scenario_setup ( self , scenario , config_dict ) : self . write ( scenario , self . _scen_file_name ) config_dict [ "file_emissionscenario" ] = self . _scen_file_name config_dict = self . _fix_any_backwards_emissions_scen_key_in_config ( config_dict ) return config_dict
Set the emissions flags correctly .
356
def contains ( value : Union [ str , 'Type' ] ) -> bool : if isinstance ( value , str ) : return any ( value . lower ( ) == i . value for i in Type ) return any ( value == i for i in Type )
Checks if a type is defined
357
def _calcidxs ( func ) : timegrids = hydpy . pub . get ( 'timegrids' ) if timegrids is None : raise RuntimeError ( 'An Indexer object has been asked for an %s array. Such an ' 'array has neither been determined yet nor can it be ' 'determined automatically at the moment. Either define an ' '%s array manually and pass it to the Indexer object, or make ' 'a proper Timegrids object available within the pub module. ' 'In usual HydPy applications, the latter is done ' 'automatically.' % ( func . __name__ , func . __name__ ) ) idxs = numpy . empty ( len ( timegrids . init ) , dtype = int ) for jdx , date in enumerate ( hydpy . pub . timegrids . init ) : idxs [ jdx ] = func ( date ) return idxs
Return the required indexes based on the given lambda function and the |Timegrids| object handled by module |pub| . Raise a |RuntimeError| if the latter is not available .
358
def getter_ ( self , fget ) -> 'BaseProperty' : self . fget = fget self . set_doc ( fget . __doc__ ) return self
Add the given getter function and its docstring to the property and return it .
359
def isready ( self , obj ) -> bool : return vars ( obj ) . get ( self . name , False )
Return |True| or |False| to indicate if the protected property is ready for the given object . If the object is unknow |ProtectedProperty| returns |False| .
360
def allready ( self , obj ) -> bool : for prop in self . __properties : if not prop . isready ( obj ) : return False return True
Return |True| or |False| to indicate whether all protected properties are ready or not .
361
def call_fget ( self , obj ) -> Any : custom = vars ( obj ) . get ( self . name ) if custom is None : return self . fget ( obj ) return custom
Return the predefined custom value when available otherwise the value defined by the getter function .
362
def call_fset ( self , obj , value ) -> None : vars ( obj ) [ self . name ] = self . fset ( obj , value )
Store the given custom value and call the setter function .
363
def call_fdel ( self , obj ) -> None : self . fdel ( obj ) try : del vars ( obj ) [ self . name ] except KeyError : pass
Remove the predefined custom value and call the delete function .
364
def _add_lines ( specification , module ) : caption = _all_spec2capt . get ( specification , 'dummy' ) if caption . split ( ) [ - 1 ] in ( 'parameters' , 'sequences' , 'Masks' ) : exists_collectionclass = True name_collectionclass = caption . title ( ) . replace ( ' ' , '' ) else : exists_collectionclass = False lines = [ ] if specification == 'model' : lines += [ f'' , f'.. autoclass:: {module.__name__}.Model' , f' :members:' , f' :show-inheritance:' , f' :exclude-members: {", ".join(EXCLUDE_MEMBERS)}' ] elif exists_collectionclass : lines += [ f'' , f'.. autoclass:: {module.__name__}.{name_collectionclass}' , f' :members:' , f' :show-inheritance:' , f' :exclude-members: {", ".join(EXCLUDE_MEMBERS)}' ] lines += [ '' , '.. automodule:: ' + module . __name__ , ' :members:' , ' :show-inheritance:' ] if specification == 'model' : lines += [ ' :exclude-members: Model' ] elif exists_collectionclass : lines += [ ' :exclude-members: ' + name_collectionclass ] return lines
Return autodoc commands for a basemodels docstring .
365
def autodoc_basemodel ( module ) : autodoc_tuple2doc ( module ) namespace = module . __dict__ doc = namespace . get ( '__doc__' ) if doc is None : doc = '' basemodulename = namespace [ '__name__' ] . split ( '.' ) [ - 1 ] modules = { key : value for key , value in namespace . items ( ) if ( isinstance ( value , types . ModuleType ) and key . startswith ( basemodulename + '_' ) ) } substituter = Substituter ( hydpy . substituter ) lines = [ ] specification = 'model' modulename = basemodulename + '_' + specification if modulename in modules : module = modules [ modulename ] lines += _add_title ( 'Model features' , '-' ) lines += _add_lines ( specification , module ) substituter . add_module ( module ) for ( title , spec2capt ) in ( ( 'Parameter features' , _PAR_SPEC2CAPT ) , ( 'Sequence features' , _SEQ_SPEC2CAPT ) , ( 'Auxiliary features' , _AUX_SPEC2CAPT ) ) : found_module = False new_lines = _add_title ( title , '-' ) for ( specification , caption ) in spec2capt . items ( ) : modulename = basemodulename + '_' + specification module = modules . get ( modulename ) if module : found_module = True new_lines += _add_title ( caption , '.' ) new_lines += _add_lines ( specification , module ) substituter . add_module ( module ) if found_module : lines += new_lines doc += '\n' . join ( lines ) namespace [ '__doc__' ] = doc basemodule = importlib . import_module ( namespace [ '__name__' ] ) substituter . add_module ( basemodule ) substituter . update_masters ( ) namespace [ 'substituter' ] = substituter
Add an exhaustive docstring to the given module of a basemodel .
366
def autodoc_applicationmodel ( module ) : autodoc_tuple2doc ( module ) name_applicationmodel = module . __name__ name_basemodel = name_applicationmodel . split ( '_' ) [ 0 ] module_basemodel = importlib . import_module ( name_basemodel ) substituter = Substituter ( module_basemodel . substituter ) substituter . add_module ( module ) substituter . update_masters ( ) module . substituter = substituter
Improves the docstrings of application models when called at the bottom of the respective module .
367
def _number_of_line ( member_tuple ) : member = member_tuple [ 1 ] try : return member . __code__ . co_firstlineno except AttributeError : pass try : return inspect . findsource ( member ) [ 1 ] except BaseException : pass for value in vars ( member ) . values ( ) : try : return value . __code__ . co_firstlineno except AttributeError : pass return 0
Try to return the number of the first line of the definition of a member of a module .
368
def autodoc_module ( module ) : doc = getattr ( module , '__doc__' ) if doc is None : doc = '' members = [ ] for name , member in inspect . getmembers ( module ) : if ( ( not name . startswith ( '_' ) ) and ( inspect . getmodule ( member ) is module ) ) : members . append ( ( name , member ) ) members = sorted ( members , key = _number_of_line ) if members : lines = [ '\n\nModule :mod:`~%s` implements the following members:\n' % module . __name__ ] for ( name , member ) in members : if inspect . isfunction ( member ) : type_ = 'func' elif inspect . isclass ( member ) : type_ = 'class' else : type_ = 'obj' lines . append ( ' * :%s:`~%s` %s' % ( type_ , name , objecttools . description ( member ) ) ) doc = doc + '\n\n' + '\n' . join ( lines ) + '\n\n' + 80 * '_' module . __doc__ = doc
Add a short summary of all implemented members to a modules docstring .
369
def autodoc_tuple2doc ( module ) : modulename = module . __name__ for membername , member in inspect . getmembers ( module ) : for tuplename , descr in _name2descr . items ( ) : tuple_ = getattr ( member , tuplename , None ) if tuple_ : logstring = f'{modulename}.{membername}.{tuplename}' if logstring not in _loggedtuples : _loggedtuples . add ( logstring ) lst = [ f'\n\n\n {descr}:' ] if tuplename == 'CLASSES' : type_ = 'func' else : type_ = 'class' for cls in tuple_ : lst . append ( f' * ' f':{type_}:`{cls.__module__}.{cls.__name__}`' f' {objecttools.description(cls)}' ) doc = getattr ( member , '__doc__' ) if doc is None : doc = '' member . __doc__ = doc + '\n' . join ( l for l in lst )
Include tuples as CLASSES of ControlParameters and RUN_METHODS of Models into the respective docstring .
370
def consider_member ( name_member , member , module , class_ = None ) : if name_member . startswith ( '_' ) : return False if inspect . ismodule ( member ) : return False real_module = getattr ( member , '__module__' , None ) if not real_module : return True if real_module != module . __name__ : if class_ and hasattr ( member , '__get__' ) : return True if 'hydpy' in real_module : return False if module . __name__ not in real_module : return False return True
Return |True| if the given member should be added to the substitutions . If not return |False| .
371
def get_role ( member , cython = False ) : if inspect . isroutine ( member ) or isinstance ( member , numpy . ufunc ) : return 'func' elif inspect . isclass ( member ) : return 'class' elif cython : return 'func' return 'const'
Return the reStructuredText role func class or const best describing the given member .
372
def add_substitution ( self , short , medium , long , module ) : name = module . __name__ if 'builtin' in name : self . _short2long [ short ] = long . split ( '~' ) [ 0 ] + long . split ( '.' ) [ - 1 ] else : if ( 'hydpy' in name ) and ( short not in self . _blacklist ) : if short in self . _short2long : if self . _short2long [ short ] != long : self . _blacklist . add ( short ) del self . _short2long [ short ] else : self . _short2long [ short ] = long self . _medium2long [ medium ] = long
Add the given substitutions both as a short2long and a medium2long mapping .
373
def add_module ( self , module , cython = False ) : name_module = module . __name__ . split ( '.' ) [ - 1 ] short = ( '|%s|' % name_module ) long = ( ':mod:`~%s`' % module . __name__ ) self . _short2long [ short ] = long for ( name_member , member ) in vars ( module ) . items ( ) : if self . consider_member ( name_member , member , module ) : role = self . get_role ( member , cython ) short = ( '|%s|' % name_member ) medium = ( '|%s.%s|' % ( name_module , name_member ) ) long = ( ':%s:`~%s.%s`' % ( role , module . __name__ , name_member ) ) self . add_substitution ( short , medium , long , module ) if inspect . isclass ( member ) : for name_submember , submember in vars ( member ) . items ( ) : if self . consider_member ( name_submember , submember , module , member ) : role = self . get_role ( submember , cython ) short = ( '|%s.%s|' % ( name_member , name_submember ) ) medium = ( '|%s.%s.%s|' % ( name_module , name_member , name_submember ) ) long = ( ':%s:`~%s.%s.%s`' % ( role , module . __name__ , name_member , name_submember ) ) self . add_substitution ( short , medium , long , module )
Add the given module its members and their submembers .
374
def add_modules ( self , package ) : for name in os . listdir ( package . __path__ [ 0 ] ) : if name . startswith ( '_' ) : continue name = name . split ( '.' ) [ 0 ] short = '|%s|' % name long = ':mod:`~%s.%s`' % ( package . __package__ , name ) self . _short2long [ short ] = long
Add the modules of the given package without their members .
375
def update_masters ( self ) : if self . master is not None : self . master . _medium2long . update ( self . _medium2long ) self . master . update_masters ( )
Update all master |Substituter| objects .
376
def update_slaves ( self ) : for slave in self . slaves : slave . _medium2long . update ( self . _medium2long ) slave . update_slaves ( )
Update all slave |Substituter| objects .
377
def get_commands ( self , source = None ) : commands = [ ] for key , value in self : if ( source is None ) or ( key in source ) : commands . append ( '.. %s replace:: %s' % ( key , value ) ) return '\n' . join ( commands )
Return a string containing multiple reStructuredText replacements with the substitutions currently defined .
378
def find ( self , text ) : for key , value in self : if ( text in key ) or ( text in value ) : print ( key , value )
Print all substitutions that include the given text string .
379
def print_progress ( wrapped , _ = None , args = None , kwargs = None ) : global _printprogress_indentation _printprogress_indentation += 4 try : if hydpy . pub . options . printprogress : blanks = ' ' * _printprogress_indentation name = wrapped . __name__ time_ = time . strftime ( '%H:%M:%S' ) with PrintStyle ( color = 34 , font = 1 ) : print ( f'{blanks}method {name} started at {time_}' ) seconds = time . perf_counter ( ) sys . stdout . flush ( ) wrapped ( * args , ** kwargs ) blanks = ' ' * ( _printprogress_indentation + 4 ) seconds = time . perf_counter ( ) - seconds with PrintStyle ( color = 34 , font = 1 ) : print ( f'{blanks}seconds elapsed: {seconds}' ) sys . stdout . flush ( ) else : wrapped ( * args , ** kwargs ) finally : _printprogress_indentation -= 4
Add print commands time to the given function informing about execution time .
380
def progressbar ( iterable , length = 23 ) : if hydpy . pub . options . printprogress and ( len ( iterable ) > 1 ) : temp_name = os . path . join ( tempfile . gettempdir ( ) , 'HydPy_progressbar_stdout' ) temp_stdout = open ( temp_name , 'w' ) real_stdout = sys . stdout try : sys . stdout = temp_stdout nmbstars = min ( len ( iterable ) , length ) nmbcounts = len ( iterable ) / nmbstars indentation = ' ' * max ( _printprogress_indentation , 0 ) with PrintStyle ( color = 36 , font = 1 , file = real_stdout ) : print ( ' %s|%s|\n%s ' % ( indentation , '-' * ( nmbstars - 2 ) , indentation ) , end = '' , file = real_stdout ) counts = 1. for next_ in iterable : counts += 1. if counts >= nmbcounts : print ( end = '*' , file = real_stdout ) counts -= nmbcounts yield next_ finally : try : temp_stdout . close ( ) except BaseException : pass sys . stdout = real_stdout print ( ) with open ( temp_name , 'r' ) as temp_stdout : sys . stdout . write ( temp_stdout . read ( ) ) sys . stdout . flush ( ) else : for next_ in iterable : yield next_
Print a simple progress bar while processing the given iterable .
381
def GET_parameteritemtypes ( self ) -> None : for item in state . parameteritems : self . _outputs [ item . name ] = self . _get_itemtype ( item )
Get the types of all current exchange items supposed to change the values of |Parameter| objects .
382
def GET_conditionitemtypes ( self ) -> None : for item in state . conditionitems : self . _outputs [ item . name ] = self . _get_itemtype ( item )
Get the types of all current exchange items supposed to change the values of |StateSequence| or |LogSequence| objects .
383
def GET_getitemtypes ( self ) -> None : for item in state . getitems : type_ = self . _get_itemtype ( item ) for name , _ in item . yield_name2value ( ) : self . _outputs [ name ] = type_
Get the types of all current exchange items supposed to return the values of |Parameter| or |Sequence| objects or the time series of |IOSequence| objects .
384
def POST_timegrid ( self ) -> None : init = hydpy . pub . timegrids . init sim = hydpy . pub . timegrids . sim sim . firstdate = self . _inputs [ 'firstdate' ] sim . lastdate = self . _inputs [ 'lastdate' ] state . idx1 = init [ sim . firstdate ] state . idx2 = init [ sim . lastdate ]
Change the current simulation |Timegrid| .
385
def GET_parameteritemvalues ( self ) -> None : for item in state . parameteritems : self . _outputs [ item . name ] = item . value
Get the values of all |ChangeItem| objects handling |Parameter| objects .
386
def GET_conditionitemvalues ( self ) -> None : for item in state . conditionitems : self . _outputs [ item . name ] = item . value
Get the values of all |ChangeItem| objects handling |StateSequence| or |LogSequence| objects .
387
def GET_getitemvalues ( self ) -> None : for item in state . getitems : for name , value in item . yield_name2value ( state . idx1 , state . idx2 ) : self . _outputs [ name ] = value
Get the values of all |Variable| objects observed by the current |GetItem| objects .
388
def GET_load_conditionvalues ( self ) -> None : try : state . hp . conditions = state . conditions [ self . _id ] [ state . idx1 ] except KeyError : if state . idx1 : self . _statuscode = 500 raise RuntimeError ( f'Conditions for ID `{self._id}` and time point ' f'`{hydpy.pub.timegrids.sim.firstdate}` are required, ' f'but have not been calculated so far.' ) else : state . hp . conditions = state . init_conditions
Assign the |StateSequence| or |LogSequence| object values available for the current simulation start point to the current |HydPy| instance .
389
def GET_save_conditionvalues ( self ) -> None : state . conditions [ self . _id ] = state . conditions . get ( self . _id , { } ) state . conditions [ self . _id ] [ state . idx2 ] = state . hp . conditions
Save the |StateSequence| and |LogSequence| object values of the current |HydPy| instance for the current simulation endpoint .
390
def GET_save_parameteritemvalues ( self ) -> None : for item in state . parameteritems : state . parameteritemvalues [ self . _id ] [ item . name ] = item . value . copy ( )
Save the values of those |ChangeItem| objects which are handling |Parameter| objects .
391
def GET_savedparameteritemvalues ( self ) -> None : dict_ = state . parameteritemvalues . get ( self . _id ) if dict_ is None : self . GET_parameteritemvalues ( ) else : for name , value in dict_ . items ( ) : self . _outputs [ name ] = value
Get the previously saved values of those |ChangeItem| objects which are handling |Parameter| objects .
392
def GET_save_getitemvalues ( self ) -> None : for item in state . getitems : for name , value in item . yield_name2value ( state . idx1 , state . idx2 ) : state . getitemvalues [ self . _id ] [ name ] = value
Save the values of all current |GetItem| objects .
393
def GET_savedgetitemvalues ( self ) -> None : dict_ = state . getitemvalues . get ( self . _id ) if dict_ is None : self . GET_getitemvalues ( ) else : for name , value in dict_ . items ( ) : self . _outputs [ name ] = value
Get the previously saved values of all |GetItem| objects .
394
def GET_save_timegrid ( self ) -> None : state . timegrids [ self . _id ] = copy . deepcopy ( hydpy . pub . timegrids . sim )
Save the current simulation period .
395
def GET_savedtimegrid ( self ) -> None : try : self . _write_timegrid ( state . timegrids [ self . _id ] ) except KeyError : self . _write_timegrid ( hydpy . pub . timegrids . init )
Get the previously saved simulation period .
396
def _compare_variables_function_generator ( method_string , aggregation_func ) : def comparison_function ( self , other ) : if self is other : return method_string in ( '__eq__' , '__le__' , '__ge__' ) method = getattr ( self . value , method_string ) try : if hasattr ( type ( other ) , '__hydpy__get_value__' ) : other = other . __hydpy__get_value__ ( ) result = method ( other ) if result is NotImplemented : return result return aggregation_func ( result ) except BaseException : objecttools . augment_excmessage ( f'While trying to compare variable ' f'{objecttools.elementphrase(self)} with object ' f'`{other}` of type `{objecttools.classname(other)}`' ) return comparison_function
Return a function usable as a comparison method for class |Variable| .
397
def to_repr ( self : Variable , values , brackets1d : Optional [ bool ] = False ) -> str : prefix = f'{self.name}(' if isinstance ( values , str ) : string = f'{self.name}({values})' elif self . NDIM == 0 : string = f'{self.name}({objecttools.repr_(values)})' elif self . NDIM == 1 : if brackets1d : string = objecttools . assignrepr_list ( values , prefix , 72 ) + ')' else : string = objecttools . assignrepr_values ( values , prefix , 72 ) + ')' else : string = objecttools . assignrepr_list2 ( values , prefix , 72 ) + ')' return '\n' . join ( self . commentrepr + [ string ] )
Return a valid string representation for the given |Variable| object .
398
def verify ( self ) -> None : nmbnan : int = numpy . sum ( numpy . isnan ( numpy . array ( self . value ) [ self . mask ] ) ) if nmbnan : if nmbnan == 1 : text = 'value has' else : text = 'values have' raise RuntimeError ( f'For variable {objecttools.devicephrase(self)}, ' f'{nmbnan} required {text} not been set yet.' )
Raises a |RuntimeError| if at least one of the required values of a |Variable| object is |None| or |numpy . nan| . The descriptor mask defines which values are considered to be necessary .
399
def average_values ( self , * args , ** kwargs ) -> float : try : if not self . NDIM : return self . value mask = self . get_submask ( * args , ** kwargs ) if numpy . any ( mask ) : weights = self . refweights [ mask ] return numpy . sum ( weights * self [ mask ] ) / numpy . sum ( weights ) return numpy . nan except BaseException : objecttools . augment_excmessage ( f'While trying to calculate the mean value of variable ' f'{objecttools.devicephrase(self)}' )
Average the actual values of the |Variable| object .