idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
600
def print_textandtime ( text : str ) -> None : timestring = datetime . datetime . now ( ) . strftime ( '%Y-%m-%d %H:%M:%S.%f' ) print ( f'{text} ({timestring}).' )
Print the given string and the current date and time with high precision for logging purposes .
601
def write ( self , string : str ) -> None : self . logfile . write ( '\n' . join ( f'{self._string}{substring}' if substring else '' for substring in string . split ( '\n' ) ) )
Write the given string as explained in the main documentation on class |LogFileInterface| .
602
def solve_dv_dt_v1 ( self ) : der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess old = self . sequences . states . fastaccess_old new = self . sequences . states . fastaccess_new aid = self . sequences . aides . fastaccess flu . qa = 0. aid . v = old . v for _ in range ( der . nmbsubsteps ) : self . calc_vq ( ) self . interp_qa ( ) self . calc_v_qa ( ) flu . qa += aid . qa flu . qa /= der . nmbsubsteps new . v = aid . v
Solve the differential equation of HydPy - L .
603
def calc_vq_v1 ( self ) : der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess aid = self . sequences . aides . fastaccess aid . vq = 2. * aid . v + der . seconds / der . nmbsubsteps * flu . qz
Calculate the auxiliary term .
604
def interp_qa_v1 ( self ) : con = self . parameters . control . fastaccess der = self . parameters . derived . fastaccess aid = self . sequences . aides . fastaccess idx = der . toy [ self . idx_sim ] for jdx in range ( 1 , con . n ) : if der . vq [ idx , jdx ] >= aid . vq : break aid . qa = ( ( aid . vq - der . vq [ idx , jdx - 1 ] ) * ( con . q [ idx , jdx ] - con . q [ idx , jdx - 1 ] ) / ( der . vq [ idx , jdx ] - der . vq [ idx , jdx - 1 ] ) + con . q [ idx , jdx - 1 ] ) aid . qa = max ( aid . qa , 0. )
Calculate the lake outflow based on linear interpolation .
605
def calc_v_qa_v1 ( self ) : der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess aid = self . sequences . aides . fastaccess aid . qa = min ( aid . qa , flu . qz + der . nmbsubsteps / der . seconds * aid . v ) aid . v = max ( aid . v + der . seconds / der . nmbsubsteps * ( flu . qz - aid . qa ) , 0. )
Update the stored water volume based on the equation of continuity .
606
def interp_w_v1 ( self ) : con = self . parameters . control . fastaccess new = self . sequences . states . fastaccess_new for jdx in range ( 1 , con . n ) : if con . v [ jdx ] >= new . v : break new . w = ( ( new . v - con . v [ jdx - 1 ] ) * ( con . w [ jdx ] - con . w [ jdx - 1 ] ) / ( con . v [ jdx ] - con . v [ jdx - 1 ] ) + con . w [ jdx - 1 ] )
Calculate the actual water stage based on linear interpolation .
607
def corr_dw_v1 ( self ) : con = self . parameters . control . fastaccess der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess old = self . sequences . states . fastaccess_old new = self . sequences . states . fastaccess_new idx = der . toy [ self . idx_sim ] if ( con . maxdw [ idx ] > 0. ) and ( ( old . w - new . w ) > con . maxdw [ idx ] ) : new . w = old . w - con . maxdw [ idx ] self . interp_v ( ) flu . qa = flu . qz + ( old . v - new . v ) / der . seconds
Adjust the water stage drop to the highest value allowed and correct the associated fluxes .
608
def modify_qa_v1 ( self ) : con = self . parameters . control . fastaccess der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess idx = der . toy [ self . idx_sim ] flu . qa = max ( flu . qa - con . verzw [ idx ] , 0. )
Add water to or remove water from the calculated lake outflow .
609
def thresholds ( self ) : return numpy . array ( sorted ( self . _key2float ( key ) for key in self . _coefs ) , dtype = float )
Threshold values of the response functions .
610
def prepare_arrays ( sim = None , obs = None , node = None , skip_nan = False ) : if node : if sim is not None : raise ValueError ( 'Values are passed to both arguments `sim` and `node`, ' 'which is not allowed.' ) if obs is not None : raise ValueError ( 'Values are passed to both arguments `obs` and `node`, ' 'which is not allowed.' ) sim = node . sequences . sim . series obs = node . sequences . obs . series elif ( sim is not None ) and ( obs is None ) : raise ValueError ( 'A value is passed to argument `sim` ' 'but no value is passed to argument `obs`.' ) elif ( obs is not None ) and ( sim is None ) : raise ValueError ( 'A value is passed to argument `obs` ' 'but no value is passed to argument `sim`.' ) elif ( sim is None ) and ( obs is None ) : raise ValueError ( 'Neither a `Node` object is passed to argument `node` nor ' 'are arrays passed to arguments `sim` and `obs`.' ) sim = numpy . asarray ( sim ) obs = numpy . asarray ( obs ) if skip_nan : idxs = ~ numpy . isnan ( sim ) * ~ numpy . isnan ( obs ) sim = sim [ idxs ] obs = obs [ idxs ] return sim , obs
Prepare and return two |numpy| arrays based on the given arguments .
611
def nse ( sim = None , obs = None , node = None , skip_nan = False ) : sim , obs = prepare_arrays ( sim , obs , node , skip_nan ) return 1. - numpy . sum ( ( sim - obs ) ** 2 ) / numpy . sum ( ( obs - numpy . mean ( obs ) ) ** 2 )
Calculate the efficiency criteria after Nash & Sutcliffe .
612
def bias_abs ( sim = None , obs = None , node = None , skip_nan = False ) : sim , obs = prepare_arrays ( sim , obs , node , skip_nan ) return numpy . mean ( sim - obs )
Calculate the absolute difference between the means of the simulated and the observed values .
613
def std_ratio ( sim = None , obs = None , node = None , skip_nan = False ) : sim , obs = prepare_arrays ( sim , obs , node , skip_nan ) return numpy . std ( sim ) / numpy . std ( obs ) - 1.
Calculate the ratio between the standard deviation of the simulated and the observed values .
614
def corr ( sim = None , obs = None , node = None , skip_nan = False ) : sim , obs = prepare_arrays ( sim , obs , node , skip_nan ) return numpy . corrcoef ( sim , obs ) [ 0 , 1 ]
Calculate the product - moment correlation coefficient after Pearson .
615
def hsepd_pdf ( sigma1 , sigma2 , xi , beta , sim = None , obs = None , node = None , skip_nan = False ) : sim , obs = prepare_arrays ( sim , obs , node , skip_nan ) sigmas = _pars_h ( sigma1 , sigma2 , sim ) mu_xi , sigma_xi , w_beta , c_beta = _pars_sepd ( xi , beta ) x , mu = obs , sim a = ( x - mu ) / sigmas a_xi = numpy . empty ( a . shape ) idxs = mu_xi + sigma_xi * a < 0. a_xi [ idxs ] = numpy . absolute ( xi * ( mu_xi + sigma_xi * a [ idxs ] ) ) a_xi [ ~ idxs ] = numpy . absolute ( 1. / xi * ( mu_xi + sigma_xi * a [ ~ idxs ] ) ) ps = ( 2. * sigma_xi / ( xi + 1. / xi ) * w_beta * numpy . exp ( - c_beta * a_xi ** ( 2. / ( 1. + beta ) ) ) ) / sigmas return ps
Calculate the probability densities based on the heteroskedastic skewed exponential power distribution .
616
def calc_mean_time ( timepoints , weights ) : timepoints = numpy . array ( timepoints ) weights = numpy . array ( weights ) validtools . test_equal_shape ( timepoints = timepoints , weights = weights ) validtools . test_non_negative ( weights = weights ) return numpy . dot ( timepoints , weights ) / numpy . sum ( weights )
Return the weighted mean of the given timepoints .
617
def calc_mean_time_deviation ( timepoints , weights , mean_time = None ) : timepoints = numpy . array ( timepoints ) weights = numpy . array ( weights ) validtools . test_equal_shape ( timepoints = timepoints , weights = weights ) validtools . test_non_negative ( weights = weights ) if mean_time is None : mean_time = calc_mean_time ( timepoints , weights ) return ( numpy . sqrt ( numpy . dot ( weights , ( timepoints - mean_time ) ** 2 ) / numpy . sum ( weights ) ) )
Return the weighted deviation of the given timepoints from their mean time .
618
def evaluationtable ( nodes , criteria , nodenames = None , critnames = None , skip_nan = False ) : if nodenames : if len ( nodes ) != len ( nodenames ) : raise ValueError ( '%d node objects are given which does not match with ' 'number of given alternative names beeing %s.' % ( len ( nodes ) , len ( nodenames ) ) ) else : nodenames = [ node . name for node in nodes ] if critnames : if len ( criteria ) != len ( critnames ) : raise ValueError ( '%d criteria functions are given which does not match ' 'with number of given alternative names beeing %s.' % ( len ( criteria ) , len ( critnames ) ) ) else : critnames = [ crit . __name__ for crit in criteria ] data = numpy . empty ( ( len ( nodes ) , len ( criteria ) ) , dtype = float ) for idx , node in enumerate ( nodes ) : sim , obs = prepare_arrays ( None , None , node , skip_nan ) for jdx , criterion in enumerate ( criteria ) : data [ idx , jdx ] = criterion ( sim , obs ) table = pandas . DataFrame ( data = data , index = nodenames , columns = critnames ) return table
Return a table containing the results of the given evaluation criteria for the given |Node| objects .
619
def set_primary_parameters ( self , ** kwargs ) : given = sorted ( kwargs . keys ( ) ) required = sorted ( self . _PRIMARY_PARAMETERS ) if given == required : for ( key , value ) in kwargs . items ( ) : setattr ( self , key , value ) else : raise ValueError ( 'When passing primary parameter values as initialization ' 'arguments of the instantaneous unit hydrograph class `%s`, ' 'or when using method `set_primary_parameters, one has to ' 'to define all values at once via keyword arguments. ' 'But instead of the primary parameter names `%s` the ' 'following keywords were given: %s.' % ( objecttools . classname ( self ) , ', ' . join ( required ) , ', ' . join ( given ) ) )
Set all primary parameters at once .
620
def update ( self ) : del self . ma . coefs del self . arma . ma_coefs del self . arma . ar_coefs if self . primary_parameters_complete : self . calc_secondary_parameters ( ) else : for secpar in self . _SECONDARY_PARAMETERS . values ( ) : secpar . __delete__ ( self )
Delete the coefficients of the pure MA model and also all MA and AR coefficients of the ARMA model . Also calculate or delete the values of all secondary iuh parameters depending on the completeness of the values of the primary parameters .
621
def delay_response_series ( self ) : delays = [ ] responses = [ ] sum_responses = 0. for t in itertools . count ( self . dt_response / 2. , self . dt_response ) : delays . append ( t ) response = self ( t ) responses . append ( response ) sum_responses += self . dt_response * response if ( sum_responses > .9 ) and ( response < self . smallest_response ) : break return numpy . array ( delays ) , numpy . array ( responses )
A tuple of two numpy arrays which hold the time delays and the associated iuh values respectively .
622
def plot ( self , threshold = None , ** kwargs ) : delays , responses = self . delay_response_series pyplot . plot ( delays , responses , ** kwargs ) pyplot . xlabel ( 'time' ) pyplot . ylabel ( 'response' ) if threshold is not None : threshold = numpy . clip ( threshold , 0. , 1. ) cumsum = numpy . cumsum ( responses ) idx = numpy . where ( cumsum >= threshold * cumsum [ - 1 ] ) [ 0 ] [ 0 ] pyplot . xlim ( 0. , delays [ idx ] )
Plot the instanteneous unit hydrograph .
623
def moment1 ( self ) : delays , response = self . delay_response_series return statstools . calc_mean_time ( delays , response )
The first time delay weighted statistical moment of the instantaneous unit hydrograph .
624
def moment2 ( self ) : moment1 = self . moment1 delays , response = self . delay_response_series return statstools . calc_mean_time_deviation ( delays , response , moment1 )
The second time delay weighted statistical momens of the instantaneous unit hydrograph .
625
def calc_secondary_parameters ( self ) : self . a = self . x / ( 2. * self . d ** .5 ) self . b = self . u / ( 2. * self . d ** .5 )
Determine the values of the secondary parameters a and b .
626
def calc_secondary_parameters ( self ) : self . c = 1. / ( self . k * special . gamma ( self . n ) )
Determine the value of the secondary parameter c .
627
def post ( self , request , pk ) : location = self . get_object ( ) present_prefixes = [ x . split ( '-' ) [ 0 ] for x in request . POST . keys ( ) ] day_forms = OrderedDict ( ) for day_no , day_name in WEEKDAYS : for slot_no in ( 1 , 2 ) : prefix = self . form_prefix ( day_no , slot_no ) if prefix not in present_prefixes : continue day_forms [ prefix ] = ( day_no , Slot ( request . POST , prefix = prefix ) ) if all ( [ day_form [ 1 ] . is_valid ( ) for pre , day_form in day_forms . items ( ) ] ) : OpeningHours . objects . filter ( company = location ) . delete ( ) for prefix , day_form in day_forms . items ( ) : day , form = day_form opens , shuts = [ str_to_time ( form . cleaned_data [ x ] ) for x in ( 'opens' , 'shuts' ) ] if opens != shuts : OpeningHours ( from_hour = opens , to_hour = shuts , company = location , weekday = day ) . save ( ) return redirect ( request . path_info )
Clean the data and save opening hours in the database . Old opening hours are purged before new ones are saved .
628
def get ( self , request , pk ) : location = self . get_object ( ) two_sets = False closed = None opening_hours = { } for o in OpeningHours . objects . filter ( company = location ) : opening_hours . setdefault ( o . weekday , [ ] ) . append ( o ) days = [ ] for day_no , day_name in WEEKDAYS : if day_no not in opening_hours . keys ( ) : if opening_hours : closed = True ini1 , ini2 = [ None , None ] else : closed = False ini = [ { 'opens' : time_to_str ( oh . from_hour ) , 'shuts' : time_to_str ( oh . to_hour ) } for oh in opening_hours [ day_no ] ] ini += [ None ] * ( 2 - len ( ini [ : 2 ] ) ) ini1 , ini2 = ini [ : 2 ] if ini2 : two_sets = True days . append ( { 'name' : day_name , 'number' : day_no , 'slot1' : Slot ( prefix = self . form_prefix ( day_no , 1 ) , initial = ini1 ) , 'slot2' : Slot ( prefix = self . form_prefix ( day_no , 2 ) , initial = ini2 ) , 'closed' : closed } ) return render ( request , self . template_name , { 'days' : days , 'two_sets' : two_sets , 'location' : location , } )
Initialize the editing form
629
def calc_qjoints_v1 ( self ) : der = self . parameters . derived . fastaccess new = self . sequences . states . fastaccess_new old = self . sequences . states . fastaccess_old for j in range ( der . nmbsegments ) : new . qjoints [ j + 1 ] = ( der . c1 * new . qjoints [ j ] + der . c2 * old . qjoints [ j ] + der . c3 * old . qjoints [ j + 1 ] )
Apply the routing equation .
630
def pick_q_v1 ( self ) : inl = self . sequences . inlets . fastaccess new = self . sequences . states . fastaccess_new new . qjoints [ 0 ] = 0. for idx in range ( inl . len_q ) : new . qjoints [ 0 ] += inl . q [ idx ] [ 0 ]
Assign the actual value of the inlet sequence to the upper joint of the subreach upstream .
631
def pass_q_v1 ( self ) : der = self . parameters . derived . fastaccess new = self . sequences . states . fastaccess_new out = self . sequences . outlets . fastaccess out . q [ 0 ] += new . qjoints [ der . nmbsegments ]
Assing the actual value of the lower joint of of the subreach downstream to the outlet sequence .
632
def _detect_encoding ( data = None ) : import locale enc_list = [ 'utf-8' , 'latin-1' , 'iso8859-1' , 'iso8859-2' , 'utf-16' , 'cp720' ] code = locale . getpreferredencoding ( False ) if data is None : return code if code . lower ( ) not in enc_list : enc_list . insert ( 0 , code . lower ( ) ) for c in enc_list : try : for line in data : line . decode ( c ) except ( UnicodeDecodeError , UnicodeError , AttributeError ) : continue return c print ( "Encoding not detected. Please pass encoding value manually" )
Return the default system encoding . If data is passed try to decode the data with the default system encoding or from a short list of encoding types to test .
633
def parameterstep ( timestep = None ) : if timestep is not None : parametertools . Parameter . parameterstep ( timestep ) namespace = inspect . currentframe ( ) . f_back . f_locals model = namespace . get ( 'model' ) if model is None : model = namespace [ 'Model' ] ( ) namespace [ 'model' ] = model if hydpy . pub . options . usecython and 'cythonizer' in namespace : cythonizer = namespace [ 'cythonizer' ] namespace [ 'cythonmodule' ] = cythonizer . cymodule model . cymodel = cythonizer . cymodule . Model ( ) namespace [ 'cymodel' ] = model . cymodel model . cymodel . parameters = cythonizer . cymodule . Parameters ( ) model . cymodel . sequences = cythonizer . cymodule . Sequences ( ) for numpars_name in ( 'NumConsts' , 'NumVars' ) : if hasattr ( cythonizer . cymodule , numpars_name ) : numpars_new = getattr ( cythonizer . cymodule , numpars_name ) ( ) numpars_old = getattr ( model , numpars_name . lower ( ) ) for ( name_numpar , numpar ) in vars ( numpars_old ) . items ( ) : setattr ( numpars_new , name_numpar , numpar ) setattr ( model . cymodel , numpars_name . lower ( ) , numpars_new ) for name in dir ( model . cymodel ) : if ( not name . startswith ( '_' ) ) and hasattr ( model , name ) : setattr ( model , name , getattr ( model . cymodel , name ) ) if 'Parameters' not in namespace : namespace [ 'Parameters' ] = parametertools . Parameters model . parameters = namespace [ 'Parameters' ] ( namespace ) if 'Sequences' not in namespace : namespace [ 'Sequences' ] = sequencetools . Sequences model . sequences = namespace [ 'Sequences' ] ( ** namespace ) namespace [ 'parameters' ] = model . parameters for pars in model . parameters : namespace [ pars . name ] = pars namespace [ 'sequences' ] = model . sequences for seqs in model . sequences : namespace [ seqs . name ] = seqs if 'Masks' in namespace : model . masks = namespace [ 'Masks' ] ( model ) namespace [ 'masks' ] = model . masks try : namespace . update ( namespace [ 'CONSTANTS' ] ) except KeyError : pass focus = namespace . get ( 'focus' ) for par in model . parameters . control : try : if ( focus is None ) or ( par is focus ) : namespace [ par . name ] = par else : namespace [ par . name ] = lambda * args , ** kwargs : None except AttributeError : pass
Define a parameter time step size within a parameter control file .
634
def reverse_model_wildcard_import ( ) : namespace = inspect . currentframe ( ) . f_back . f_locals model = namespace . get ( 'model' ) if model is not None : for subpars in model . parameters : for par in subpars : namespace . pop ( par . name , None ) namespace . pop ( objecttools . classname ( par ) , None ) namespace . pop ( subpars . name , None ) namespace . pop ( objecttools . classname ( subpars ) , None ) for subseqs in model . sequences : for seq in subseqs : namespace . pop ( seq . name , None ) namespace . pop ( objecttools . classname ( seq ) , None ) namespace . pop ( subseqs . name , None ) namespace . pop ( objecttools . classname ( subseqs ) , None ) for name in ( 'parameters' , 'sequences' , 'masks' , 'model' , 'Parameters' , 'Sequences' , 'Masks' , 'Model' , 'cythonizer' , 'cymodel' , 'cythonmodule' ) : namespace . pop ( name , None ) for key in list ( namespace . keys ( ) ) : try : if namespace [ key ] . __module__ == model . __module__ : del namespace [ key ] except AttributeError : pass
Clear the local namespace from a model wildcard import .
635
def prepare_model ( module : Union [ types . ModuleType , str ] , timestep : PeriodABC . ConstrArg = None ) : if timestep is not None : parametertools . Parameter . parameterstep ( timetools . Period ( timestep ) ) try : model = module . Model ( ) except AttributeError : module = importlib . import_module ( f'hydpy.models.{module}' ) model = module . Model ( ) if hydpy . pub . options . usecython and hasattr ( module , 'cythonizer' ) : cymodule = module . cythonizer . cymodule cymodel = cymodule . Model ( ) cymodel . parameters = cymodule . Parameters ( ) cymodel . sequences = cymodule . Sequences ( ) model . cymodel = cymodel for numpars_name in ( 'NumConsts' , 'NumVars' ) : if hasattr ( cymodule , numpars_name ) : numpars_new = getattr ( cymodule , numpars_name ) ( ) numpars_old = getattr ( model , numpars_name . lower ( ) ) for ( name_numpar , numpar ) in vars ( numpars_old ) . items ( ) : setattr ( numpars_new , name_numpar , numpar ) setattr ( cymodel , numpars_name . lower ( ) , numpars_new ) for name in dir ( cymodel ) : if ( not name . startswith ( '_' ) ) and hasattr ( model , name ) : setattr ( model , name , getattr ( cymodel , name ) ) dict_ = { 'cythonmodule' : cymodule , 'cymodel' : cymodel } else : dict_ = { } dict_ . update ( vars ( module ) ) dict_ [ 'model' ] = model if hasattr ( module , 'Parameters' ) : model . parameters = module . Parameters ( dict_ ) else : model . parameters = parametertools . Parameters ( dict_ ) if hasattr ( module , 'Sequences' ) : model . sequences = module . Sequences ( ** dict_ ) else : model . sequences = sequencetools . Sequences ( ** dict_ ) if hasattr ( module , 'Masks' ) : model . masks = module . Masks ( model ) return model
Prepare and return the model of the given module .
636
def simulationstep ( timestep ) : if hydpy . pub . options . warnsimulationstep : warnings . warn ( 'Note that the applied function `simulationstep` is intended for ' 'testing purposes only. When doing a HydPy simulation, parameter ' 'values are initialised based on the actual simulation time step ' 'as defined under `pub.timegrids.stepsize` and the value given ' 'to `simulationstep` is ignored.' ) parametertools . Parameter . simulationstep ( timestep )
Define a simulation time step size for testing purposes within a parameter control file .
637
def controlcheck ( controldir = 'default' , projectdir = None , controlfile = None ) : namespace = inspect . currentframe ( ) . f_back . f_locals model = namespace . get ( 'model' ) if model is None : if not controlfile : controlfile = os . path . split ( namespace [ '__file__' ] ) [ - 1 ] if projectdir is None : projectdir = ( os . path . split ( os . path . split ( os . path . split ( os . getcwd ( ) ) [ 0 ] ) [ 0 ] ) [ - 1 ] ) dirpath = os . path . abspath ( os . path . join ( '..' , '..' , '..' , projectdir , 'control' , controldir ) ) class CM ( filetools . ControlManager ) : currentpath = dirpath model = CM ( ) . load_file ( filename = controlfile ) [ 'model' ] model . parameters . update ( ) namespace [ 'model' ] = model for name in ( 'states' , 'logs' ) : subseqs = getattr ( model . sequences , name , None ) if subseqs is not None : for seq in subseqs : namespace [ seq . name ] = seq
Define the corresponding control file within a condition file .
638
def update ( self ) : con = self . subpars . pars . control temp = con . zonearea . values . copy ( ) temp [ con . zonetype . values == GLACIER ] = 0. temp [ con . zonetype . values == ILAKE ] = 0. self ( numpy . sum ( temp ) / con . area )
Update |RelSoilArea| based on |Area| |ZoneArea| and |ZoneType| .
639
def update ( self ) : maxbaz = self . subpars . pars . control . maxbaz . value quh = self . subpars . pars . model . sequences . logs . quh if maxbaz <= 1. : self . shape = 1 self ( 1. ) quh . shape = 1 else : full = maxbaz if ( full % 1. ) < 1e-4 : full //= 1. full_f = int ( numpy . floor ( full ) ) full_c = int ( numpy . ceil ( full ) ) half = full / 2. half_f = int ( numpy . floor ( half ) ) half_c = int ( numpy . ceil ( half ) ) full_2 = full ** 2. self . shape = full_c uh = self . values quh . shape = full_c points = numpy . arange ( 1 , half_f + 1 ) uh [ : half_f ] = ( 2. * points - 1. ) / ( 2. * full_2 ) if numpy . mod ( half , 1. ) != 0. : uh [ half_f ] = ( ( half_c - half ) / full + ( 2 * half ** 2. - half_f ** 2. - half_c ** 2. ) / ( 2. * full_2 ) ) points = numpy . arange ( half_c + 1. , full_f + 1. ) uh [ half_c : full_f ] = 1. / full - ( 2. * points - 1. ) / ( 2. * full_2 ) if numpy . mod ( full , 1. ) != 0. : uh [ full_f ] = ( ( full - full_f ) / full - ( full_2 - full_f ** 2. ) / ( 2. * full_2 ) ) self ( uh / numpy . sum ( uh ) )
Update |UH| based on |MaxBaz| .
640
def update ( self ) : self ( self . subpars . pars . control . area * 1000. / self . subpars . qfactor . simulationstep . seconds )
Update |QFactor| based on |Area| and the current simulation step size .
641
def nmb_neurons ( self ) -> Tuple [ int , ... ] : return tuple ( numpy . asarray ( self . _cann . nmb_neurons ) )
Number of neurons of the hidden layers .
642
def shape_weights_hidden ( self ) -> Tuple [ int , int , int ] : if self . nmb_layers > 1 : nmb_neurons = self . nmb_neurons return ( self . nmb_layers - 1 , max ( nmb_neurons [ : - 1 ] ) , max ( nmb_neurons [ 1 : ] ) ) return 0 , 0 , 0
Shape of the array containing the activation of the hidden neurons .
643
def nmb_weights_hidden ( self ) -> int : nmb = 0 for idx_layer in range ( self . nmb_layers - 1 ) : nmb += self . nmb_neurons [ idx_layer ] * self . nmb_neurons [ idx_layer + 1 ] return nmb
Number of hidden weights .
644
def verify ( self ) -> None : if not self . __protectedproperties . allready ( self ) : raise RuntimeError ( 'The shape of the the artificial neural network ' 'parameter %s has not been defined so far.' % objecttools . elementphrase ( self ) )
Raise a |RuntimeError| if the network s shape is not defined completely .
645
def assignrepr ( self , prefix ) -> str : prefix = '%s%s(' % ( prefix , self . name ) blanks = len ( prefix ) * ' ' lines = [ objecttools . assignrepr_value ( self . nmb_inputs , '%snmb_inputs=' % prefix ) + ',' , objecttools . assignrepr_tuple ( self . nmb_neurons , '%snmb_neurons=' % blanks ) + ',' , objecttools . assignrepr_value ( self . nmb_outputs , '%snmb_outputs=' % blanks ) + ',' , objecttools . assignrepr_list2 ( self . weights_input , '%sweights_input=' % blanks ) + ',' ] if self . nmb_layers > 1 : lines . append ( objecttools . assignrepr_list3 ( self . weights_hidden , '%sweights_hidden=' % blanks ) + ',' ) lines . append ( objecttools . assignrepr_list2 ( self . weights_output , '%sweights_output=' % blanks ) + ',' ) lines . append ( objecttools . assignrepr_list2 ( self . intercepts_hidden , '%sintercepts_hidden=' % blanks ) + ',' ) lines . append ( objecttools . assignrepr_list ( self . intercepts_output , '%sintercepts_output=' % blanks ) + ')' ) return '\n' . join ( lines )
Return a string representation of the actual |anntools . ANN| object that is prefixed with the given string .
646
def refresh ( self ) -> None : if self . _do_refresh : if self . anns : self . __sann = annutils . SeasonalANN ( self . anns ) setattr ( self . fastaccess , self . name , self . _sann ) self . _set_shape ( ( None , self . _sann . nmb_anns ) ) if self . _sann . nmb_anns > 1 : self . _interp ( ) else : self . _sann . ratios [ : , 0 ] = 1. self . verify ( ) else : self . __sann = None
Prepare the actual |anntools . SeasonalANN| object for calculations .
647
def verify ( self ) -> None : if not self . anns : self . _toy2ann . clear ( ) raise RuntimeError ( 'Seasonal artificial neural network collections need ' 'to handle at least one "normal" single neural network, ' 'but for the seasonal neural network `%s` of element ' '`%s` none has been defined so far.' % ( self . name , objecttools . devicename ( self ) ) ) for toy , ann_ in self : ann_ . verify ( ) if ( ( self . nmb_inputs != ann_ . nmb_inputs ) or ( self . nmb_outputs != ann_ . nmb_outputs ) ) : self . _toy2ann . clear ( ) raise RuntimeError ( 'The number of input and output values of all neural ' 'networks contained by a seasonal neural network ' 'collection must be identical and be known by the ' 'containing object. But the seasonal neural ' 'network collection `%s` of element `%s` assumes ' '`%d` input and `%d` output values, while the network ' 'corresponding to the time of year `%s` requires ' '`%d` input and `%d` output values.' % ( self . name , objecttools . devicename ( self ) , self . nmb_inputs , self . nmb_outputs , toy , ann_ . nmb_inputs , ann_ . nmb_outputs ) )
Raise a |RuntimeError| and removes all handled neural networks if the they are defined inconsistently .
648
def shape ( self ) -> Tuple [ int , ... ] : return tuple ( int ( sub ) for sub in self . ratios . shape )
The shape of array |anntools . SeasonalANN . ratios| .
649
def _set_shape ( self , shape ) : try : shape = ( int ( shape ) , ) except TypeError : pass shp = list ( shape ) shp [ 0 ] = timetools . Period ( '366d' ) / self . simulationstep shp [ 0 ] = int ( numpy . ceil ( round ( shp [ 0 ] , 10 ) ) ) getattr ( self . fastaccess , self . name ) . ratios = numpy . zeros ( shp , dtype = float )
Private on purpose .
650
def toys ( self ) -> Tuple [ timetools . TOY , ... ] : return tuple ( toy for ( toy , _ ) in self )
A sorted |tuple| of all contained |TOY| objects .
651
def plot ( self , xmin , xmax , idx_input = 0 , idx_output = 0 , points = 100 , ** kwargs ) -> None : for toy , ann_ in self : ann_ . plot ( xmin , xmax , idx_input = idx_input , idx_output = idx_output , points = points , label = str ( toy ) , ** kwargs ) pyplot . legend ( )
Call method |anntools . ANN . plot| of all |anntools . ANN| objects handled by the actual |anntools . SeasonalANN| object .
652
def specstring ( self ) : if self . subgroup is None : variable = self . variable else : variable = f'{self.subgroup}.{self.variable}' if self . series : variable = f'{variable}.series' return variable
The string corresponding to the current values of subgroup state and variable .
653
def collect_variables ( self , selections ) -> None : self . insert_variables ( self . device2target , self . targetspecs , selections )
Apply method |ExchangeItem . insert_variables| to collect the relevant target variables handled by the devices of the given |Selections| object .
654
def update_variables ( self ) -> None : value = self . value for variable in self . device2target . values ( ) : self . update_variable ( variable , value )
Assign the current objects |ChangeItem . value| to the values of the target variables .
655
def collect_variables ( self , selections ) -> None : super ( ) . collect_variables ( selections ) self . insert_variables ( self . device2base , self . basespecs , selections )
Apply method |ChangeItem . collect_variables| of the base class |ChangeItem| and also apply method |ExchangeItem . insert_variables| of class |ExchangeItem| to collect the relevant base variables handled by the devices of the given |Selections| object .
656
def update_variables ( self ) -> None : value = self . value for device , target in self . device2target . items ( ) : base = self . device2base [ device ] try : result = base . value + value except BaseException : raise objecttools . augment_excmessage ( f'When trying to add the value(s) `{value}` of ' f'AddItem `{self.name}` and the value(s) `{base.value}` ' f'of variable {objecttools.devicephrase(base)}' ) self . update_variable ( target , result )
Add the general |ChangeItem . value| with the |Device| specific base variable and assign the result to the respective target variable .
657
def collect_variables ( self , selections ) -> None : super ( ) . collect_variables ( selections ) for device in sorted ( self . device2target . keys ( ) , key = lambda x : x . name ) : self . _device2name [ device ] = f'{device.name}_{self.target}' for target in self . device2target . values ( ) : self . ndim = target . NDIM if self . targetspecs . series : self . ndim += 1 break
Apply method |ExchangeItem . collect_variables| of the base class |ExchangeItem| and determine the ndim attribute of the current |ChangeItem| object afterwards .
658
def yield_name2value ( self , idx1 = None , idx2 = None ) -> Iterator [ Tuple [ str , str ] ] : for device , name in self . _device2name . items ( ) : target = self . device2target [ device ] if self . targetspecs . series : values = target . series [ idx1 : idx2 ] else : values = target . values if self . ndim == 0 : values = objecttools . repr_ ( float ( values ) ) else : values = objecttools . repr_list ( values . tolist ( ) ) yield name , values
Sequentially return name - value - pairs describing the current state of the target variables .
659
def iso_day_to_weekday ( d ) : if int ( d ) == utils . get_now ( ) . isoweekday ( ) : return _ ( "today" ) for w in WEEKDAYS : if w [ 0 ] == int ( d ) : return w [ 1 ]
Returns the weekday s name given a ISO weekday number ; today if today is the same weekday .
660
def is_open ( location = None , attr = None ) : obj = utils . is_open ( location ) if obj is False : return False if attr is not None : return getattr ( obj , attr ) return obj
Returns False if the location is closed or the OpeningHours object to show the location is currently open .
661
def opening_hours ( location = None , concise = False ) : template_name = 'openinghours/opening_hours_list.html' days = [ ] if location : ohrs = OpeningHours . objects . filter ( company = location ) else : try : Location = utils . get_premises_model ( ) ohrs = Location . objects . first ( ) . openinghours_set . all ( ) except AttributeError : raise Exception ( "You must define some opening hours" " to use the opening hours tags." ) ohrs . order_by ( 'weekday' , 'from_hour' ) for o in ohrs : days . append ( { 'day_number' : o . weekday , 'name' : o . get_weekday_display ( ) , 'from_hour' : o . from_hour , 'to_hour' : o . to_hour , 'hours' : '%s%s to %s%s' % ( o . from_hour . strftime ( '%I:%M' ) . lstrip ( '0' ) , o . from_hour . strftime ( '%p' ) . lower ( ) , o . to_hour . strftime ( '%I:%M' ) . lstrip ( '0' ) , o . to_hour . strftime ( '%p' ) . lower ( ) ) } ) open_days = [ o . weekday for o in ohrs ] for day_number , day_name in WEEKDAYS : if day_number not in open_days : days . append ( { 'day_number' : day_number , 'name' : day_name , 'hours' : 'Closed' } ) days = sorted ( days , key = lambda k : k [ 'day_number' ] ) if concise : template_name = 'openinghours/opening_hours_list_concise.html' concise_days = [ ] current_set = { } for day in days : if 'hours' not in current_set . keys ( ) : current_set = { 'day_names' : [ day [ 'name' ] ] , 'hours' : day [ 'hours' ] } elif day [ 'hours' ] != current_set [ 'hours' ] : concise_days . append ( current_set ) current_set = { 'day_names' : [ day [ 'name' ] ] , 'hours' : day [ 'hours' ] } else : current_set [ 'day_names' ] . append ( day [ 'name' ] ) concise_days . append ( current_set ) for day_set in concise_days : if len ( day_set [ 'day_names' ] ) > 2 : day_set [ 'day_names' ] = '%s to %s' % ( day_set [ 'day_names' ] [ 0 ] , day_set [ 'day_names' ] [ - 1 ] ) elif len ( day_set [ 'day_names' ] ) > 1 : day_set [ 'day_names' ] = '%s and %s' % ( day_set [ 'day_names' ] [ 0 ] , day_set [ 'day_names' ] [ - 1 ] ) else : day_set [ 'day_names' ] = '%s' % day_set [ 'day_names' ] [ 0 ] days = concise_days template = get_template ( template_name ) return template . render ( { 'days' : days } )
Creates a rendered listing of hours .
662
def prepare_everything ( self ) : self . prepare_network ( ) self . init_models ( ) self . load_conditions ( ) with hydpy . pub . options . warnmissingobsfile ( False ) : self . prepare_nodeseries ( ) self . prepare_modelseries ( ) self . load_inputseries ( )
Convenience method to make the actual |HydPy| instance runable .
663
def save_controls ( self , parameterstep = None , simulationstep = None , auxfiler = None ) : self . elements . save_controls ( parameterstep = parameterstep , simulationstep = simulationstep , auxfiler = auxfiler )
Call method |Elements . save_controls| of the |Elements| object currently handled by the |HydPy| object .
664
def networkproperties ( self ) : print ( 'Number of nodes: %d' % len ( self . nodes ) ) print ( 'Number of elements: %d' % len ( self . elements ) ) print ( 'Number of end nodes: %d' % len ( self . endnodes ) ) print ( 'Number of distinct networks: %d' % len ( self . numberofnetworks ) ) print ( 'Applied node variables: %s' % ', ' . join ( self . variables ) )
Print out some properties of the network defined by the |Node| and |Element| objects currently handled by the |HydPy| object .
665
def numberofnetworks ( self ) : sels1 = selectiontools . Selections ( ) sels2 = selectiontools . Selections ( ) complete = selectiontools . Selection ( 'complete' , self . nodes , self . elements ) for node in self . endnodes : sel = complete . copy ( node . name ) . select_upstream ( node ) sels1 += sel sels2 += sel . copy ( node . name ) for sel1 in sels1 : for sel2 in sels2 : if sel1 . name != sel2 . name : sel1 -= sel2 for name in list ( sels1 . names ) : if not sels1 [ name ] . elements : del sels1 [ name ] return sels1
The number of distinct networks defined by the|Node| and |Element| objects currently handled by the |HydPy| object .
666
def endnodes ( self ) : endnodes = devicetools . Nodes ( ) for node in self . nodes : for element in node . exits : if ( ( element in self . elements ) and ( node not in element . receivers ) ) : break else : endnodes += node return endnodes
|Nodes| object containing all |Node| objects currently handled by the |HydPy| object which define a downstream end point of a network .
667
def variables ( self ) : variables = set ( [ ] ) for node in self . nodes : variables . add ( node . variable ) return sorted ( variables )
Sorted list of strings summarizing all variables handled by the |Node| objects
668
def simindices ( self ) : return ( hydpy . pub . timegrids . init [ hydpy . pub . timegrids . sim . firstdate ] , hydpy . pub . timegrids . init [ hydpy . pub . timegrids . sim . lastdate ] )
Tuple containing the start and end index of the simulation period regarding the initialization period defined by the |Timegrids| object stored in module |pub| .
669
def open_files ( self , idx = 0 ) : self . elements . open_files ( idx = idx ) self . nodes . open_files ( idx = idx )
Call method |Devices . open_files| of the |Nodes| and |Elements| objects currently handled by the |HydPy| object .
670
def update_devices ( self , selection = None ) : if selection is not None : self . nodes = selection . nodes self . elements = selection . elements self . _update_deviceorder ( )
Determines the order in which the |Node| and |Element| objects currently handled by the |HydPy| objects need to be processed during a simulation time step . Optionally a |Selection| object for defining new |Node| and |Element| objects can be passed .
671
def methodorder ( self ) : funcs = [ ] for node in self . nodes : if node . deploymode == 'oldsim' : funcs . append ( node . sequences . fastaccess . load_simdata ) elif node . deploymode == 'obs' : funcs . append ( node . sequences . fastaccess . load_obsdata ) for node in self . nodes : if node . deploymode != 'oldsim' : funcs . append ( node . reset ) for device in self . deviceorder : if isinstance ( device , devicetools . Element ) : funcs . append ( device . model . doit ) for element in self . elements : if element . senders : funcs . append ( element . model . update_senders ) for element in self . elements : if element . receivers : funcs . append ( element . model . update_receivers ) for element in self . elements : funcs . append ( element . model . save_data ) for node in self . nodes : if node . deploymode != 'oldsim' : funcs . append ( node . sequences . fastaccess . save_simdata ) return funcs
A list containing all methods of all |Node| and |Element| objects that need to be processed during a simulation time step in the order they must be called .
672
def doit ( self ) : idx_start , idx_end = self . simindices self . open_files ( idx_start ) methodorder = self . methodorder for idx in printtools . progressbar ( range ( idx_start , idx_end ) ) : for func in methodorder : func ( idx ) self . close_files ( )
Perform a simulation run over the actual simulation time period defined by the |Timegrids| object stored in module |pub| .
673
def pic_inflow_v1 ( self ) : flu = self . sequences . fluxes . fastaccess inl = self . sequences . inlets . fastaccess flu . inflow = inl . q [ 0 ]
Update the inlet link sequence .
674
def pic_inflow_v2 ( self ) : flu = self . sequences . fluxes . fastaccess inl = self . sequences . inlets . fastaccess flu . inflow = inl . q [ 0 ] + inl . s [ 0 ] + inl . r [ 0 ]
Update the inlet link sequences .
675
def calc_waterlevel_v1 ( self ) : con = self . parameters . control . fastaccess new = self . sequences . states . fastaccess_new aid = self . sequences . aides . fastaccess con . watervolume2waterlevel . inputs [ 0 ] = new . watervolume con . watervolume2waterlevel . process_actual_input ( ) aid . waterlevel = con . watervolume2waterlevel . outputs [ 0 ]
Determine the water level based on an artificial neural network describing the relationship between water level and water stage .
676
def calc_allowedremoterelieve_v2 ( self ) : con = self . parameters . control . fastaccess der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess aid = self . sequences . aides . fastaccess toy = der . toy [ self . idx_sim ] flu . allowedremoterelieve = ( con . highestremoterelieve [ toy ] * smoothutils . smooth_logistic1 ( con . waterlevelrelievethreshold [ toy ] - aid . waterlevel , der . waterlevelrelievesmoothpar [ toy ] ) )
Calculate the allowed maximum relieve another location is allowed to discharge into the dam .
677
def calc_requiredremotesupply_v1 ( self ) : con = self . parameters . control . fastaccess der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess aid = self . sequences . aides . fastaccess toy = der . toy [ self . idx_sim ] flu . requiredremotesupply = ( con . highestremotesupply [ toy ] * smoothutils . smooth_logistic1 ( con . waterlevelsupplythreshold [ toy ] - aid . waterlevel , der . waterlevelsupplysmoothpar [ toy ] ) )
Calculate the required maximum supply from another location that can be discharged into the dam .
678
def calc_naturalremotedischarge_v1 ( self ) : con = self . parameters . control . fastaccess flu = self . sequences . fluxes . fastaccess log = self . sequences . logs . fastaccess flu . naturalremotedischarge = 0. for idx in range ( con . nmblogentries ) : flu . naturalremotedischarge += ( log . loggedtotalremotedischarge [ idx ] - log . loggedoutflow [ idx ] ) if flu . naturalremotedischarge > 0. : flu . naturalremotedischarge /= con . nmblogentries else : flu . naturalremotedischarge = 0.
Try to estimate the natural discharge of a cross section far downstream based on the last few simulation steps .
679
def calc_remotedemand_v1 ( self ) : con = self . parameters . control . fastaccess der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess flu . remotedemand = max ( con . remotedischargeminimum [ der . toy [ self . idx_sim ] ] - flu . naturalremotedischarge , 0. )
Estimate the discharge demand of a cross section far downstream .
680
def calc_remotefailure_v1 ( self ) : con = self . parameters . control . fastaccess der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess log = self . sequences . logs . fastaccess flu . remotefailure = 0 for idx in range ( con . nmblogentries ) : flu . remotefailure -= log . loggedtotalremotedischarge [ idx ] flu . remotefailure /= con . nmblogentries flu . remotefailure += con . remotedischargeminimum [ der . toy [ self . idx_sim ] ]
Estimate the shortfall of actual discharge under the required discharge of a cross section far downstream .
681
def calc_requiredremoterelease_v1 ( self ) : con = self . parameters . control . fastaccess der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess flu . requiredremoterelease = ( flu . remotedemand + con . remotedischargesafety [ der . toy [ self . idx_sim ] ] * smoothutils . smooth_logistic1 ( flu . remotefailure , der . remotedischargesmoothpar [ der . toy [ self . idx_sim ] ] ) )
Guess the required release necessary to not fall below the threshold value at a cross section far downstream with a certain level of certainty .
682
def calc_requiredremoterelease_v2 ( self ) : flu = self . sequences . fluxes . fastaccess log = self . sequences . logs . fastaccess flu . requiredremoterelease = log . loggedrequiredremoterelease [ 0 ]
Get the required remote release of the last simulation step .
683
def calc_allowedremoterelieve_v1 ( self ) : flu = self . sequences . fluxes . fastaccess log = self . sequences . logs . fastaccess flu . allowedremoterelieve = log . loggedallowedremoterelieve [ 0 ]
Get the allowed remote relieve of the last simulation step .
684
def calc_possibleremoterelieve_v1 ( self ) : con = self . parameters . control . fastaccess flu = self . sequences . fluxes . fastaccess aid = self . sequences . aides . fastaccess con . waterlevel2possibleremoterelieve . inputs [ 0 ] = aid . waterlevel con . waterlevel2possibleremoterelieve . process_actual_input ( ) flu . possibleremoterelieve = con . waterlevel2possibleremoterelieve . outputs [ 0 ]
Calculate the highest possible water release that can be routed to a remote location based on an artificial neural network describing the relationship between possible release and water stage .
685
def calc_actualremoterelieve_v1 ( self ) : con = self . parameters . control . fastaccess flu = self . sequences . fluxes . fastaccess d_smoothpar = con . remoterelievetolerance * flu . allowedremoterelieve flu . actualremoterelieve = smoothutils . smooth_min1 ( flu . possibleremoterelieve , flu . allowedremoterelieve , d_smoothpar ) for dummy in range ( 5 ) : d_smoothpar /= 5. flu . actualremoterelieve = smoothutils . smooth_max1 ( flu . actualremoterelieve , 0. , d_smoothpar ) d_smoothpar /= 5. flu . actualremoterelieve = smoothutils . smooth_min1 ( flu . actualremoterelieve , flu . possibleremoterelieve , d_smoothpar ) flu . actualremoterelieve = min ( flu . actualremoterelieve , flu . possibleremoterelieve ) flu . actualremoterelieve = min ( flu . actualremoterelieve , flu . allowedremoterelieve ) flu . actualremoterelieve = max ( flu . actualremoterelieve , 0. )
Calculate the actual amount of water released to a remote location to relieve the dam during high flow conditions .
686
def calc_targetedrelease_v1 ( self ) : con = self . parameters . control . fastaccess der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess if con . restricttargetedrelease : flu . targetedrelease = smoothutils . smooth_logistic1 ( flu . inflow - con . neardischargeminimumthreshold [ der . toy [ self . idx_sim ] ] , der . neardischargeminimumsmoothpar1 [ der . toy [ self . idx_sim ] ] ) flu . targetedrelease = ( flu . targetedrelease * flu . requiredrelease + ( 1. - flu . targetedrelease ) * flu . inflow ) else : flu . targetedrelease = flu . requiredrelease
Calculate the targeted water release for reducing drought events taking into account both the required water release and the actual inflow into the dam .
687
def calc_actualrelease_v1 ( self ) : con = self . parameters . control . fastaccess der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess aid = self . sequences . aides . fastaccess flu . actualrelease = ( flu . targetedrelease * smoothutils . smooth_logistic1 ( aid . waterlevel - con . waterlevelminimumthreshold , der . waterlevelminimumsmoothpar ) )
Calculate the actual water release that can be supplied by the dam considering the targeted release and the given water level .
688
def calc_missingremoterelease_v1 ( self ) : flu = self . sequences . fluxes . fastaccess flu . missingremoterelease = max ( flu . requiredremoterelease - flu . actualrelease , 0. )
Calculate the portion of the required remote demand that could not be met by the actual discharge release .
689
def calc_actualremoterelease_v1 ( self ) : con = self . parameters . control . fastaccess der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess aid = self . sequences . aides . fastaccess flu . actualremoterelease = ( flu . requiredremoterelease * smoothutils . smooth_logistic1 ( aid . waterlevel - con . waterlevelminimumremotethreshold , der . waterlevelminimumremotesmoothpar ) )
Calculate the actual remote water release that can be supplied by the dam considering the required remote release and the given water level .
690
def update_actualremoterelieve_v1 ( self ) : con = self . parameters . control . fastaccess der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess d_smooth = der . highestremotesmoothpar d_highest = con . highestremotedischarge d_value = smoothutils . smooth_min1 ( flu . actualremoterelieve , d_highest , d_smooth ) for dummy in range ( 5 ) : d_smooth /= 5. d_value = smoothutils . smooth_max1 ( d_value , 0. , d_smooth ) d_smooth /= 5. d_value = smoothutils . smooth_min1 ( d_value , d_highest , d_smooth ) d_value = min ( d_value , flu . actualremoterelieve ) d_value = min ( d_value , d_highest ) flu . actualremoterelieve = max ( d_value , 0. )
Constrain the actual relieve discharge to a remote location .
691
def calc_outflow_v1 ( self ) : flu = self . sequences . fluxes . fastaccess flu . outflow = max ( flu . actualrelease + flu . flooddischarge , 0. )
Calculate the total outflow of the dam .
692
def update_watervolume_v1 ( self ) : der = self . parameters . derived . fastaccess flu = self . sequences . fluxes . fastaccess old = self . sequences . states . fastaccess_old new = self . sequences . states . fastaccess_new new . watervolume = ( old . watervolume + der . seconds * ( flu . inflow - flu . outflow ) / 1e6 )
Update the actual water volume .
693
def pass_outflow_v1 ( self ) : flu = self . sequences . fluxes . fastaccess out = self . sequences . outlets . fastaccess out . q [ 0 ] += flu . outflow
Update the outlet link sequence |dam_outlets . Q| .
694
def pass_missingremoterelease_v1 ( self ) : flu = self . sequences . fluxes . fastaccess sen = self . sequences . senders . fastaccess sen . d [ 0 ] += flu . missingremoterelease
Update the outlet link sequence |dam_senders . D| .
695
def moments ( self ) : moment1 = statstools . calc_mean_time ( self . delays , self . coefs ) moment2 = statstools . calc_mean_time_deviation ( self . delays , self . coefs , moment1 ) return numpy . array ( [ moment1 , moment2 ] )
The first two time delay weighted statistical moments of the MA coefficients .
696
def effective_max_ar_order ( self ) : return min ( self . max_ar_order , self . ma . order - self . ma . turningpoint [ 0 ] - 1 )
The maximum number of AR coefficients that shall or can be determined .
697
def update_ar_coefs ( self ) : del self . ar_coefs for ar_order in range ( 1 , self . effective_max_ar_order + 1 ) : self . calc_all_ar_coefs ( ar_order , self . ma ) if self . _rel_rmse < self . max_rel_rmse : break else : with hydpy . pub . options . reprdigits ( 12 ) : raise RuntimeError ( f'Method `update_ar_coefs` is not able to determine ' f'the AR coefficients of the ARMA model with the desired ' f'accuracy. You can either set the tolerance value ' f'`max_rel_rmse` to a higher value or increase the ' f'allowed `max_ar_order`. An accuracy of `' f'{objecttools.repr_(self._rel_rmse)}` has been reached ' f'using `{self.effective_max_ar_order}` coefficients.' )
Determine the AR coefficients .
698
def dev_moments ( self ) : return numpy . sum ( numpy . abs ( self . moments - self . ma . moments ) )
Sum of the absolute deviations between the central moments of the instantaneous unit hydrograph and the ARMA approximation .
699
def norm_coefs ( self ) : sum_coefs = self . sum_coefs self . ar_coefs /= sum_coefs self . ma_coefs /= sum_coefs
Multiply all coefficients by the same factor so that their sum becomes one .