idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
247,900
def migrate_app ( sender , * args , ** kwargs ) : from . registration import registry if 'app_config' not in kwargs : return app_config = kwargs [ 'app_config' ] app_name = app_config . label fields = [ fld for fld in list ( registry . _field_registry . keys ( ) ) if fld . startswith ( app_name ) ] sid = transaction . ...
Migrate all models of this app registered
247,901
def get_absolute_url ( self ) : from django . urls import NoReverseMatch if self . alternate_url : return self . alternate_url try : prefix = reverse ( 'categories_tree_list' ) except NoReverseMatch : prefix = '/' ancestors = list ( self . get_ancestors ( ) ) + [ self , ] return prefix + '/' . join ( [ force_text ( i ....
Return a path
247,902
def get_content_type ( self , content_type ) : qs = self . get_queryset ( ) return qs . filter ( content_type__name = content_type )
Get all the items of the given content type related to this item .
247,903
def get_relation_type ( self , relation_type ) : qs = self . get_queryset ( ) return qs . filter ( relation_type = relation_type )
Get all the items of the given relationship type related to this item .
247,904
def handle_class_prepared ( sender , ** kwargs ) : from . settings import M2M_REGISTRY , FK_REGISTRY from . registration import registry sender_app = sender . _meta . app_label sender_name = sender . _meta . model_name for key , val in list ( FK_REGISTRY . items ( ) ) : app_name , model_name = key . split ( '.' ) if ap...
See if this class needs registering of fields
247,905
def get_queryset ( self , request ) : qs = self . model . _default_manager . get_queryset ( ) qs . __class__ = TreeEditorQuerySet return qs
Returns a QuerySet of all model instances that can be edited by the admin site . This is used by changelist_view .
247,906
def deactivate ( self , request , queryset ) : selected_cats = self . model . objects . filter ( pk__in = [ int ( x ) for x in request . POST . getlist ( '_selected_action' ) ] ) for item in selected_cats : if item . active : item . active = False item . save ( ) item . children . all ( ) . update ( active = False )
Set active to False for selected items
247,907
def get_indent ( self , string ) : indent_amt = 0 if string [ 0 ] == '\t' : return '\t' for char in string : if char == ' ' : indent_amt += 1 else : return ' ' * indent_amt
Look through the string and count the spaces
247,908
def make_category ( self , string , parent = None , order = 1 ) : cat = Category ( name = string . strip ( ) , slug = slugify ( SLUG_TRANSLITERATOR ( string . strip ( ) ) ) [ : 49 ] , order = order ) cat . _tree_manager . insert_node ( cat , parent , 'last-child' , True ) cat . save ( ) if parent : parent . rght = cat ...
Make and save a category object from a string
247,909
def parse_lines ( self , lines ) : indent = '' level = 0 if lines [ 0 ] [ 0 ] == ' ' or lines [ 0 ] [ 0 ] == '\t' : raise CommandError ( "The first line in the file cannot start with a space or tab." ) current_parents = { 0 : None } for line in lines : if len ( line ) == 0 : continue if line [ 0 ] == ' ' or line [ 0 ] ...
Do the work of parsing each line
247,910
def handle ( self , * file_paths , ** options ) : import os for file_path in file_paths : if not os . path . isfile ( file_path ) : print ( "File %s not found." % file_path ) continue f = open ( file_path , 'r' ) data = f . readlines ( ) f . close ( ) self . parse_lines ( data )
Handle the basic import
247,911
def get_cat_model ( model ) : try : if isinstance ( model , string_types ) : model_class = apps . get_model ( * model . split ( "." ) ) elif issubclass ( model , CategoryBase ) : model_class = model if model_class is None : raise TypeError except TypeError : raise TemplateSyntaxError ( "Unknown model submitted: %s" % m...
Return a class from a string or class
247,912
def get_category ( category_string , model = Category ) : model_class = get_cat_model ( model ) category = str ( category_string ) . strip ( "'\"" ) category = category . strip ( '/' ) cat_list = category . split ( '/' ) if len ( cat_list ) == 0 : return None try : categories = model_class . objects . filter ( name = c...
Convert a string including a path and return the Category object
247,913
def get_category_drilldown ( parser , token ) : bits = token . split_contents ( ) error_str = '%(tagname)s tag should be in the format {%% %(tagname)s ' '"category name" [using "app.Model"] as varname %%} or ' '{%% %(tagname)s category_obj as varname %%}.' if len ( bits ) == 4 : if bits [ 2 ] != 'as' : raise template ....
Retrieves the specified category its ancestors and its immediate children as an iterable .
247,914
def get_top_level_categories ( parser , token ) : bits = token . split_contents ( ) usage = 'Usage: {%% %s [using "app.Model"] as <variable> %%}' % bits [ 0 ] if len ( bits ) == 3 : if bits [ 1 ] != 'as' : raise template . TemplateSyntaxError ( usage ) varname = bits [ 2 ] model = "categories.category" elif len ( bits ...
Retrieves an alphabetical list of all the categories that have no parents .
247,915
def tree_queryset ( value ) : from django . db . models . query import QuerySet from copy import deepcopy if not isinstance ( value , QuerySet ) : return value qs = value qs2 = deepcopy ( qs ) is_filtered = bool ( qs . query . where . children ) if is_filtered : include_pages = set ( ) for p in qs2 . order_by ( 'rght' ...
Converts a normal queryset from an MPTT model to include all the ancestors so a filtered subset of items can be formatted correctly
247,916
def convolve ( data , h , res_g = None , sub_blocks = None ) : if not len ( data . shape ) in [ 1 , 2 , 3 ] : raise ValueError ( "dim = %s not supported" % ( len ( data . shape ) ) ) if len ( data . shape ) != len ( h . shape ) : raise ValueError ( "dimemnsion of data (%s) and h (%s) are different" % ( len ( data . sha...
convolves 1d - 3d data with kernel h
247,917
def _convolve3_old ( data , h , dev = None ) : if dev is None : dev = get_device ( ) if dev is None : raise ValueError ( "no OpenCLDevice found..." ) dtype = data . dtype . type dtypes_options = { np . float32 : "" , np . uint16 : "-D SHORTTYPE" } if not dtype in dtypes_options : raise TypeError ( "data type %s not sup...
convolves 3d data with kernel h on the GPU Device dev boundary conditions are clamping to edge . h is converted to float32
247,918
def _scale_shape ( dshape , scale = ( 1 , 1 , 1 ) ) : nshape = np . round ( np . array ( dshape ) * np . array ( scale ) ) return tuple ( nshape . astype ( np . int ) )
returns the shape after scaling ( should be the same as ndimage . zoom
247,919
def fftshift ( arr_obj , axes = None , res_g = None , return_buffer = False ) : if axes is None : axes = list ( range ( arr_obj . ndim ) ) if isinstance ( arr_obj , OCLArray ) : if not arr_obj . dtype . type in DTYPE_KERNEL_NAMES : raise NotImplementedError ( "only works for float32 or complex64" ) elif isinstance ( ar...
gpu version of fftshift for numpy arrays or OCLArrays
247,920
def _fftshift_single ( d_g , res_g , ax = 0 ) : dtype_kernel_name = { np . float32 : "fftshift_1_f" , np . complex64 : "fftshift_1_c" } N = d_g . shape [ ax ] N1 = 1 if ax == 0 else np . prod ( d_g . shape [ : ax ] ) N2 = 1 if ax == len ( d_g . shape ) - 1 else np . prod ( d_g . shape [ ax + 1 : ] ) dtype = d_g . dtype...
basic fftshift of an OCLArray
247,921
def fft_convolve ( data , h , res_g = None , plan = None , inplace = False , kernel_is_fft = False , kernel_is_fftshifted = False ) : if isinstance ( data , np . ndarray ) : return _fft_convolve_numpy ( data , h , plan = plan , kernel_is_fft = kernel_is_fft , kernel_is_fftshifted = kernel_is_fftshifted ) elif isinstanc...
convolves data with kernel h via FFTs
247,922
def _fft_convolve_numpy ( data , h , plan = None , kernel_is_fft = False , kernel_is_fftshifted = False ) : if data . shape != h . shape : raise ValueError ( "data and kernel must have same size! %s vs %s " % ( str ( data . shape ) , str ( h . shape ) ) ) data_g = OCLArray . from_array ( data . astype ( np . complex64 ...
convolving via opencl fft for numpy arrays
247,923
def _fft_convolve_gpu ( data_g , h_g , res_g = None , plan = None , inplace = False , kernel_is_fft = False ) : assert_bufs_type ( np . complex64 , data_g , h_g ) if data_g . shape != h_g . shape : raise ValueError ( "data and kernel must have same size! %s vs %s " % ( str ( data_g . shape ) , str ( h_g . shape ) ) ) i...
fft convolve for gpu buffer
247,924
def median_filter ( data , size = 3 , cval = 0 , res_g = None , sub_blocks = None ) : if data . ndim == 2 : _filt = make_filter ( _median_filter_gpu_2d ( ) ) elif data . ndim == 3 : _filt = make_filter ( _median_filter_gpu_3d ( ) ) else : raise ValueError ( "currently only 2 or 3 dimensional data is supported" ) return...
median filter of given size
247,925
def rotate ( data , axis = ( 1. , 0 , 0 ) , angle = 0. , center = None , mode = "constant" , interpolation = "linear" ) : if center is None : center = tuple ( [ s // 2 for s in data . shape ] ) cx , cy , cz = center m = np . dot ( mat4_translate ( cx , cy , cz ) , np . dot ( mat4_rotate ( angle , * axis ) , mat4_transl...
rotates data around axis by a given angle
247,926
def map_coordinates ( data , coordinates , interpolation = "linear" , mode = 'constant' ) : if not ( isinstance ( data , np . ndarray ) and data . ndim in ( 2 , 3 ) ) : raise ValueError ( "input data has to be a 2d or 3d array!" ) coordinates = np . asarray ( coordinates , np . int32 ) if not ( coordinates . shape [ 0 ...
Map data to new coordinates by interpolation . The array of coordinates is used to find for each point in the output the corresponding coordinates in the input .
247,927
def pad_to_shape ( d , dshape , mode = "constant" ) : if d . shape == dshape : return d diff = np . array ( dshape ) - np . array ( d . shape ) slices = tuple ( slice ( - x // 2 , x // 2 ) if x < 0 else slice ( None , None ) for x in diff ) res = d [ slices ] return np . pad ( res , [ ( int ( np . ceil ( d / 2. ) ) , d...
pad array d to shape dshape
247,928
def pad_to_power2 ( data , axis = None , mode = "constant" ) : if axis is None : axis = list ( range ( data . ndim ) ) if np . all ( [ _is_power2 ( n ) for i , n in enumerate ( data . shape ) if i in axis ] ) : return data else : return pad_to_shape ( data , [ ( _next_power_of_2 ( n ) if i in axis else n ) for i , n in...
pad data to a shape of power 2 if axis == None all axis are padded
247,929
def max_filter ( data , size = 7 , res_g = None , sub_blocks = ( 1 , 1 , 1 ) ) : if data . ndim == 2 : _filt = make_filter ( _generic_filter_gpu_2d ( FUNC = "(val>res?val:res)" , DEFAULT = "-INFINITY" ) ) elif data . ndim == 3 : _filt = make_filter ( _generic_filter_gpu_3d ( FUNC = "(val>res?val:res)" , DEFAULT = "-INF...
maximum filter of given size
247,930
def min_filter ( data , size = 7 , res_g = None , sub_blocks = ( 1 , 1 , 1 ) ) : if data . ndim == 2 : _filt = make_filter ( _generic_filter_gpu_2d ( FUNC = "(val<res?val:res)" , DEFAULT = "INFINITY" ) ) elif data . ndim == 3 : _filt = make_filter ( _generic_filter_gpu_3d ( FUNC = "(val<res?val:res)" , DEFAULT = "INFIN...
minimum filter of given size
247,931
def uniform_filter ( data , size = 7 , res_g = None , sub_blocks = ( 1 , 1 , 1 ) , normalized = True ) : if normalized : if np . isscalar ( size ) : norm = size else : norm = np . int32 ( np . prod ( size ) ) ** ( 1. / len ( size ) ) FUNC = "res+val/%s" % norm else : FUNC = "res+val" if data . ndim == 2 : _filt = make_...
mean filter of given size
247,932
def _gauss_filter ( data , sigma = 4 , res_g = None , sub_blocks = ( 1 , 1 , 1 ) ) : truncate = 4. radius = tuple ( int ( truncate * s + 0.5 ) for s in sigma ) size = tuple ( 2 * r + 1 for r in radius ) s = sigma [ 0 ] if data . ndim == 2 : _filt = make_filter ( _generic_filter_gpu_2d ( FUNC = "res+(val*native_exp((flo...
gaussian filter of given size
247,933
def _separable_series2 ( h , N = 1 ) : if min ( h . shape ) < N : raise ValueError ( "smallest dimension of h is smaller than approximation order! (%s < %s)" % ( min ( h . shape ) , N ) ) U , S , V = linalg . svd ( h ) hx = [ - U [ : , n ] * np . sqrt ( S [ n ] ) for n in range ( N ) ] hy = [ - V [ n , : ] * np . sqrt ...
finds separable approximations to the 2d function 2d h
247,934
def _separable_approx2 ( h , N = 1 ) : return np . cumsum ( [ np . outer ( fy , fx ) for fy , fx in _separable_series2 ( h , N ) ] , 0 )
returns the N first approximations to the 2d function h whose sum should be h
247,935
def _separable_approx3 ( h , N = 1 ) : return np . cumsum ( [ np . einsum ( "i,j,k" , fz , fy , fx ) for fz , fy , fx in _separable_series3 ( h , N ) ] , 0 )
returns the N first approximations to the 3d function h
247,936
def separable_approx ( h , N = 1 ) : if h . ndim == 2 : return _separable_approx2 ( h , N ) elif h . ndim == 3 : return _separable_approx3 ( h , N ) else : raise ValueError ( "unsupported array dimension: %s (only 2d or 3d) " % h . ndim )
finds the k - th rank approximation to h where k = 1 .. N
247,937
def tables ( self ) : _tables = set ( ) for attr in six . itervalues ( self . __dict__ ) : if isinstance ( attr , list ) : for item in attr : if isinstance ( item , Node ) : _tables |= item . tables ( ) elif isinstance ( attr , Node ) : _tables |= attr . tables ( ) return _tables
Generic method that does a depth - first search on the node attributes .
247,938
def fix_identities ( self , uniq = None ) : if not hasattr ( self , 'children' ) : return self uniq = list ( set ( self . flat ( ) ) ) if uniq is None else uniq for i , child in enumerate ( self . children ) : if not hasattr ( child , 'children' ) : assert child in uniq self . children [ i ] = uniq [ uniq . index ( chi...
Make pattern - tree tips point to same object if they are equal .
247,939
def find_version ( fname ) : version = "" with open ( fname , "r" ) as fp : reg = re . compile ( r'__version__ = [\'"]([^\'"]*)[\'"]' ) for line in fp : m = reg . match ( line ) if m : version = m . group ( 1 ) break if not version : raise RuntimeError ( "Cannot find version information" ) return version
Attempts to find the version number in the file names fname . Raises RuntimeError if not found .
247,940
def format_context ( context : Context , formatter : typing . Union [ str , Formatter ] = "full" ) -> str : if not context : return "" if callable ( formatter ) : formatter_func = formatter else : if formatter in CONTEXT_FORMATTERS : formatter_func = CONTEXT_FORMATTERS [ formatter ] else : raise ValueError ( f'Invalid ...
Output the a context dictionary as a string .
247,941
def make_banner ( text : typing . Optional [ str ] = None , context : typing . Optional [ Context ] = None , banner_template : typing . Optional [ str ] = None , context_format : ContextFormat = "full" , ) -> str : banner_text = text or speak ( ) banner_template = banner_template or BANNER_TEMPLATE ctx = format_context...
Generates a full banner with version info the given text and a formatted list of context variables .
247,942
def config ( config_dict : typing . Mapping ) -> Config : logger . debug ( f"Updating with {config_dict}" ) _cfg . update ( config_dict ) return _cfg
Configures the konch shell . This function should be called in a . konchrc file .
247,943
def named_config ( name : str , config_dict : typing . Mapping ) -> None : names = ( name if isinstance ( name , Iterable ) and not isinstance ( name , ( str , bytes ) ) else [ name ] ) for each in names : _config_registry [ each ] = Config ( ** config_dict )
Adds a named config to the config registry . The first argument may either be a string or a collection of strings .
247,944
def __ensure_directory_in_path ( filename : Path ) -> None : directory = Path ( filename ) . parent . resolve ( ) if directory not in sys . path : logger . debug ( f"Adding {directory} to sys.path" ) sys . path . insert ( 0 , str ( directory ) )
Ensures that a file s directory is in the Python path .
247,945
def use_file ( filename : typing . Union [ Path , str , None ] , trust : bool = False ) -> typing . Union [ types . ModuleType , None ] : config_file = filename or resolve_path ( CONFIG_FILE ) def preview_unauthorized ( ) -> None : if not config_file : return None print ( SEPARATOR , file = sys . stderr ) with Path ( c...
Load filename as a python file . Import filename and return it as a module .
247,946
def resolve_path ( filename : Path ) -> typing . Union [ Path , None ] : current = Path . cwd ( ) sentinel_dir = Path . home ( ) . parent . resolve ( ) while current != sentinel_dir : target = Path ( current ) / Path ( filename ) if target . exists ( ) : return target . resolve ( ) else : current = current . parent . r...
Find a file by walking up parent directories until the file is found . Return the absolute path of the file .
247,947
def parse_args ( argv : typing . Optional [ typing . Sequence ] = None ) -> typing . Dict [ str , str ] : return docopt ( __doc__ , argv = argv , version = __version__ )
Exposes the docopt command - line arguments parser . Return a dictionary of arguments .
247,948
def main ( argv : typing . Optional [ typing . Sequence ] = None ) -> typing . NoReturn : args = parse_args ( argv ) if args [ "--debug" ] : logging . basicConfig ( format = "%(levelname)s %(filename)s: %(message)s" , level = logging . DEBUG ) logger . debug ( args ) config_file : typing . Union [ Path , None ] if args...
Main entry point for the konch CLI .
247,949
def init_autoreload ( mode : int ) -> None : from IPython . extensions import autoreload ip = get_ipython ( ) autoreload . load_ipython_extension ( ip ) ip . magics_manager . magics [ "line" ] [ "autoreload" ] ( str ( mode ) )
Load and initialize the IPython autoreload extension .
247,950
def read_tabular ( table_file , sheetname = 'Sheet1' ) : if isinstance ( table_file , str ) : extension = table_file . split ( '.' ) [ - 1 ] if extension in [ 'xls' , 'xlsx' ] : table = pd . read_excel ( table_file , sheetname = sheetname ) elif extension == 'csv' : table = pd . read_csv ( table_file , encoding = 'UTF-...
Reads a vensim syntax model which has been formatted as a table .
247,951
def read_xmile ( xmile_file ) : from . import py_backend from . py_backend . xmile . xmile2py import translate_xmile py_model_file = translate_xmile ( xmile_file ) model = load ( py_model_file ) model . xmile_file = xmile_file return model
Construct a model object from . xmile file .
247,952
def read_vensim ( mdl_file ) : from . py_backend . vensim . vensim2py import translate_vensim from . py_backend import functions py_model_file = translate_vensim ( mdl_file ) model = functions . Model ( py_model_file ) model . mdl_file = mdl_file return model
Construct a model from Vensim . mdl file .
247,953
def cache ( horizon ) : def cache_step ( func ) : @ wraps ( func ) def cached ( * args ) : try : data = func . __globals__ [ '__data' ] assert cached . cache_t == data [ 'time' ] ( ) assert hasattr ( cached , 'cache_val' ) assert cached . cache_val is not None except ( AssertionError , AttributeError ) : cached . cache...
Put a wrapper around a model function
247,954
def ramp ( time , slope , start , finish = 0 ) : t = time ( ) if t < start : return 0 else : if finish <= 0 : return slope * ( t - start ) elif t > finish : return slope * ( finish - start ) else : return slope * ( t - start )
Implements vensim s and xmile s RAMP function
247,955
def pulse ( time , start , duration ) : t = time ( ) return 1 if start <= t < start + duration else 0
Implements vensim s PULSE function
247,956
def pulse_train ( time , start , duration , repeat_time , end ) : t = time ( ) if start <= t < end : return 1 if ( t - start ) % repeat_time < duration else 0 else : return 0
Implements vensim s PULSE TRAIN function
247,957
def lookup_extrapolation ( x , xs , ys ) : length = len ( xs ) if x < xs [ 0 ] : dx = xs [ 1 ] - xs [ 0 ] dy = ys [ 1 ] - ys [ 0 ] k = dy / dx return ys [ 0 ] + ( x - xs [ 0 ] ) * k if x > xs [ length - 1 ] : dx = xs [ length - 1 ] - xs [ length - 2 ] dy = ys [ length - 1 ] - ys [ length - 2 ] k = dy / dx return ys [ l...
Intermediate values are calculated with linear interpolation between the intermediate points . Out - of - range values are calculated with linear extrapolation from the last two values at either end .
247,958
def xidz ( numerator , denominator , value_if_denom_is_zero ) : small = 1e-6 if abs ( denominator ) < small : return value_if_denom_is_zero else : return numerator * 1.0 / denominator
Implements Vensim s XIDZ function . This function executes a division robust to denominator being zero . In the case of zero denominator the final argument is returned .
247,959
def initialize ( self , initialization_order = None ) : if self . time is None : if self . time_initialization is None : self . time = Time ( ) else : self . time = self . time_initialization ( ) self . components . _init_outer_references ( { 'scope' : self , 'time' : self . time } ) remaining = set ( self . _stateful_...
This function tries to initialize the stateful objects .
247,960
def set_components ( self , params ) : for key , value in params . items ( ) : if isinstance ( value , pd . Series ) : new_function = self . _timeseries_component ( value ) elif callable ( value ) : new_function = value else : new_function = self . _constant_component ( value ) func_name = utils . get_value_by_insensit...
Set the value of exogenous model elements . Element values can be passed as keyword = value pairs in the function call . Values can be numeric type or pandas Series . Series will be interpolated by integrator .
247,961
def _timeseries_component ( self , series ) : return lambda : np . interp ( self . time ( ) , series . index , series . values )
Internal function for creating a timeseries model element
247,962
def set_state ( self , t , state ) : self . time . update ( t ) for key , value in state . items ( ) : component_name = utils . get_value_by_insensitive_key_or_value ( key , self . components . _namespace ) if component_name is not None : stateful_name = '_integ_%s' % component_name else : component_name = key stateful...
Set the system state .
247,963
def clear_caches ( self ) : for element_name in dir ( self . components ) : element = getattr ( self . components , element_name ) if hasattr ( element , 'cache_val' ) : delattr ( element , 'cache_val' )
Clears the Caches for all model elements
247,964
def doc ( self ) : collector = [ ] for name , varname in self . components . _namespace . items ( ) : try : docstring = getattr ( self . components , varname ) . __doc__ lines = docstring . split ( '\n' ) collector . append ( { 'Real Name' : name , 'Py Name' : varname , 'Eqn' : lines [ 2 ] . replace ( "Original Eqn:" ,...
Formats a table of documentation strings to help users remember variable names and understand how they are translated into python safe names .
247,965
def initialize ( self ) : self . time . update ( self . components . initial_time ( ) ) self . time . stage = 'Initialization' super ( Model , self ) . initialize ( )
Initializes the simulation model
247,966
def _format_return_timestamps ( self , return_timestamps = None ) : if return_timestamps is None : return_timestamps_array = np . arange ( self . components . initial_time ( ) , self . components . final_time ( ) + self . components . saveper ( ) , self . components . saveper ( ) , dtype = np . float64 ) elif inspect ....
Format the passed in return timestamps value as a numpy array . If no value is passed build up array of timestamps based upon model start and end times and the saveper value .
247,967
def run ( self , params = None , return_columns = None , return_timestamps = None , initial_condition = 'original' , reload = False ) : if reload : self . reload ( ) if params : self . set_components ( params ) self . set_initial_condition ( initial_condition ) return_timestamps = self . _format_return_timestamps ( ret...
Simulate the model s behavior over time . Return a pandas dataframe with timestamps as rows model elements as columns .
247,968
def _default_return_columns ( self ) : return_columns = [ ] parsed_expr = [ ] for key , value in self . components . _namespace . items ( ) : if hasattr ( self . components , value ) : sig = signature ( getattr ( self . components , value ) ) if len ( set ( sig . parameters ) - { 'args' } ) == 0 : expr = self . compone...
Return a list of the model elements that does not include lookup functions or other functions that take parameters .
247,969
def set_initial_condition ( self , initial_condition ) : if isinstance ( initial_condition , tuple ) : self . set_state ( * initial_condition ) elif isinstance ( initial_condition , str ) : if initial_condition . lower ( ) in [ 'original' , 'o' ] : self . initialize ( ) elif initial_condition . lower ( ) in [ 'current'...
Set the initial conditions of the integration .
247,970
def _euler_step ( self , dt ) : self . state = self . state + self . ddt ( ) * dt
Performs a single step in the euler integration updating stateful components
247,971
def _integrate ( self , time_steps , capture_elements , return_timestamps ) : outputs = [ ] for t2 in time_steps [ 1 : ] : if self . time ( ) in return_timestamps : outputs . append ( { key : getattr ( self . components , key ) ( ) for key in capture_elements } ) self . _euler_step ( t2 - self . time ( ) ) self . time ...
Performs euler integration
247,972
def merge_partial_elements ( element_list ) : outs = dict ( ) for element in element_list : if element [ 'py_expr' ] != "None" : name = element [ 'py_name' ] if name not in outs : eqn = element [ 'expr' ] if 'expr' in element else element [ 'eqn' ] outs [ name ] = { 'py_name' : element [ 'py_name' ] , 'real_name' : ele...
merges model elements which collectively all define the model component mostly for multidimensional subscripts
247,973
def add_n_delay ( delay_input , delay_time , initial_value , order , subs , subscript_dict ) : stateful = { 'py_name' : utils . make_python_identifier ( '_delay_%s_%s_%s_%s' % ( delay_input , delay_time , initial_value , order ) ) [ 0 ] , 'real_name' : 'Delay of %s' % delay_input , 'doc' : 'Delay time: %s \n Delay init...
Creates code to instantiate a stateful Delay object and provides reference to that object s output .
247,974
def add_n_smooth ( smooth_input , smooth_time , initial_value , order , subs , subscript_dict ) : stateful = { 'py_name' : utils . make_python_identifier ( '_smooth_%s_%s_%s_%s' % ( smooth_input , smooth_time , initial_value , order ) ) [ 0 ] , 'real_name' : 'Smooth of %s' % smooth_input , 'doc' : 'Smooth time: %s \n S...
Constructs stock and flow chains that implement the calculation of a smoothing function .
247,975
def add_initial ( initial_input ) : stateful = { 'py_name' : utils . make_python_identifier ( '_initial_%s' % initial_input ) [ 0 ] , 'real_name' : 'Smooth of %s' % initial_input , 'doc' : 'Returns the value taken on during the initialization phase' , 'py_expr' : 'functions.Initial(lambda: %s)' % ( initial_input ) , 'u...
Constructs a stateful object for handling vensim s Initial functionality
247,976
def add_macro ( macro_name , filename , arg_names , arg_vals ) : func_args = '{ %s }' % ', ' . join ( [ "'%s': lambda: %s" % ( key , val ) for key , val in zip ( arg_names , arg_vals ) ] ) stateful = { 'py_name' : '_macro_' + macro_name + '_' + '_' . join ( [ utils . make_python_identifier ( f ) [ 0 ] for f in arg_vals...
Constructs a stateful object instantiating a Macro
247,977
def add_incomplete ( var_name , dependencies ) : warnings . warn ( '%s has no equation specified' % var_name , SyntaxWarning , stacklevel = 2 ) return "functions.incomplete(%s)" % ', ' . join ( dependencies [ 1 : ] ) , [ ]
Incomplete functions don t really need to be builders as they add no new real structure but it s helpful to have a function in which we can raise a warning about the incomplete equation at translate time .
247,978
def get_model_elements ( model_str ) : model_structure_grammar = _include_common_grammar ( r ) parser = parsimonious . Grammar ( model_structure_grammar ) tree = parser . parse ( model_str ) class ModelParser ( parsimonious . NodeVisitor ) : def __init__ ( self , ast ) : self . entries = [ ] self . visit ( ast ) def vi...
Takes in a string representing model text and splits it into elements
247,979
def get_equation_components ( equation_str ) : component_structure_grammar = _include_common_grammar ( r ) equation_str = equation_str . replace ( '\\t' , ' ' ) equation_str = re . sub ( r"\s+" , ' ' , equation_str ) parser = parsimonious . Grammar ( component_structure_grammar ) tree = parser . parse ( equation_str ) ...
Breaks down a string representing only the equation part of a model element . Recognizes the various types of model elements that may exist and identifies them .
247,980
def parse_units ( units_str ) : if not len ( units_str ) : return units_str , ( None , None ) if units_str [ - 1 ] == ']' : units , lims = units_str . rsplit ( '[' ) else : units = units_str lims = '?, ?]' lims = tuple ( [ float ( x ) if x . strip ( ) != '?' else None for x in lims . strip ( ']' ) . split ( ',' ) ] ) r...
Extract and parse the units Extract the bounds over which the expression is assumed to apply .
247,981
def parse_lookup_expression ( element ) : lookup_grammar = r parser = parsimonious . Grammar ( lookup_grammar ) tree = parser . parse ( element [ 'expr' ] ) class LookupParser ( parsimonious . NodeVisitor ) : def __init__ ( self , ast ) : self . translation = "" self . new_structure = [ ] self . visit ( ast ) def visit...
This syntax parses lookups that are defined with their own element
247,982
def dict_find ( in_dict , value ) : return list ( in_dict . keys ( ) ) [ list ( in_dict . values ( ) ) . index ( value ) ]
Helper function for looking up directory keys by their values . This isn t robust to repeated values
247,983
def find_subscript_name ( subscript_dict , element ) : if element in subscript_dict . keys ( ) : return element for name , elements in subscript_dict . items ( ) : if element in elements : return name
Given a subscript dictionary and a member of a subscript family return the first key of which the member is within the value list . If element is already a subscript name return that
247,984
def make_coord_dict ( subs , subscript_dict , terse = True ) : sub_elems_list = [ y for x in subscript_dict . values ( ) for y in x ] coordinates = { } for sub in subs : if sub in sub_elems_list : name = find_subscript_name ( subscript_dict , sub ) coordinates [ name ] = [ sub ] elif not terse : coordinates [ sub ] = s...
This is for assisting with the lookup of a particular element such that the output of this function would take the place of %s in this expression
247,985
def make_python_identifier ( string , namespace = None , reserved_words = None , convert = 'drop' , handle = 'force' ) : if namespace is None : namespace = dict ( ) if reserved_words is None : reserved_words = list ( ) if string in namespace : return namespace [ string ] , namespace s = string . lower ( ) s = s . strip...
Takes an arbitrary string and creates a valid Python identifier .
247,986
def make_flat_df ( frames , return_addresses ) : visited = list ( map ( lambda x : visit_addresses ( x , return_addresses ) , frames ) ) return pd . DataFrame ( visited )
Takes a list of dictionaries each representing what is returned from the model at a particular time and creates a dataframe whose columns correspond to the keys of return addresses
247,987
def visit_addresses ( frame , return_addresses ) : outdict = dict ( ) for real_name , ( pyname , address ) in return_addresses . items ( ) : if address : xrval = frame [ pyname ] . loc [ address ] if xrval . size > 1 : outdict [ real_name ] = xrval else : outdict [ real_name ] = float ( np . squeeze ( xrval . values ) ...
Visits all of the addresses returns a new dict which contains just the addressed elements
247,988
def validate_request ( request ) : if getattr ( settings , 'BASICAUTH_DISABLE' , False ) : return True if 'HTTP_AUTHORIZATION' not in request . META : return False authorization_header = request . META [ 'HTTP_AUTHORIZATION' ] ret = extract_basicauth ( authorization_header ) if not ret : return False username , passwor...
Check an incoming request .
247,989
def _find_address_range ( addresses ) : first = last = addresses [ 0 ] last_index = 0 for ip in addresses [ 1 : ] : if ip . _ip == last . _ip + 1 : last = ip last_index += 1 else : break return ( first , last , last_index )
Find a sequence of addresses .
247,990
def _prefix_from_prefix_int ( self , prefixlen ) : if not isinstance ( prefixlen , ( int , long ) ) : raise NetmaskValueError ( '%r is not an integer' % prefixlen ) prefixlen = int ( prefixlen ) if not ( 0 <= prefixlen <= self . _max_prefixlen ) : raise NetmaskValueError ( '%d is not a valid prefix length' % prefixlen ...
Validate and return a prefix length integer .
247,991
def output_colored ( code , text , is_bold = False ) : if is_bold : code = '1;%s' % code return '\033[%sm%s\033[0m' % ( code , text )
Create function to output with color sequence
247,992
def _set_asset_paths ( self , app ) : webpack_stats = app . config [ 'WEBPACK_MANIFEST_PATH' ] try : with app . open_resource ( webpack_stats , 'r' ) as stats_json : stats = json . load ( stats_json ) if app . config [ 'WEBPACK_ASSETS_URL' ] : self . assets_url = app . config [ 'WEBPACK_ASSETS_URL' ] else : self . asse...
Read in the manifest json file which acts as a manifest for assets . This allows us to get the asset path as well as hashed names .
247,993
def javascript_tag ( self , * args ) : tags = [ ] for arg in args : asset_path = self . asset_url_for ( '{0}.js' . format ( arg ) ) if asset_path : tags . append ( '<script src="{0}"></script>' . format ( asset_path ) ) return '\n' . join ( tags )
Convenience tag to output 1 or more javascript tags .
247,994
def asset_url_for ( self , asset ) : if '//' in asset : return asset if asset not in self . assets : return None return '{0}{1}' . format ( self . assets_url , self . assets [ asset ] )
Lookup the hashed asset path of a file name unless it starts with something that resembles a web address then take it as is .
247,995
def pre_change_receiver ( self , instance : Model , action : Action ) : if action == Action . CREATE : group_names = set ( ) else : group_names = set ( self . group_names ( instance ) ) if not hasattr ( instance , '__instance_groups' ) : instance . __instance_groups = threading . local ( ) instance . __instance_groups ...
Entry point for triggering the old_binding from save signals .
247,996
def post_change_receiver ( self , instance : Model , action : Action , ** kwargs ) : try : old_group_names = instance . __instance_groups . observers [ self ] except ( ValueError , KeyError ) : old_group_names = set ( ) if action == Action . DELETE : new_group_names = set ( ) else : new_group_names = set ( self . group...
Triggers the old_binding to possibly send to its group .
247,997
def get_queryset ( self , ** kwargs ) -> QuerySet : assert self . queryset is not None , ( "'%s' should either include a `queryset` attribute, " "or override the `get_queryset()` method." % self . __class__ . __name__ ) queryset = self . queryset if isinstance ( queryset , QuerySet ) : queryset = queryset . all ( ) ret...
Get the list of items for this view . This must be an iterable and may be a queryset . Defaults to using self . queryset .
247,998
def get_serializer_class ( self , ** kwargs ) -> Type [ Serializer ] : assert self . serializer_class is not None , ( "'%s' should either include a `serializer_class` attribute, " "or override the `get_serializer_class()` method." % self . __class__ . __name__ ) return self . serializer_class
Return the class to use for the serializer . Defaults to using self . serializer_class .
247,999
def view_as_consumer ( wrapped_view : typing . Callable [ [ HttpRequest ] , HttpResponse ] , mapped_actions : typing . Optional [ typing . Dict [ str , str ] ] = None ) -> Type [ AsyncConsumer ] : if mapped_actions is None : mapped_actions = { 'create' : 'PUT' , 'update' : 'PATCH' , 'list' : 'GET' , 'retrieve' : 'GET' ...
Wrap a django View so that it will be triggered by actions over this json websocket consumer .