idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
56,100
def quasi_newton_uniform_blocks ( points , cells , * args , ** kwargs ) : def get_new_points ( mesh ) : x = mesh . node_coords . copy ( ) x += update ( mesh ) x [ ghosted_mesh . is_ghost_point ] = ghosted_mesh . reflect_ghost ( x [ ghosted_mesh . mirrors ] ) return x ghosted_mesh = GhostedMesh ( points , cells ) runner...
Lloyd s algorithm can be though of a diagonal - only Hessian ; this method incorporates the diagonal blocks too .
56,101
def new ( filename : str , * , file_attrs : Optional [ Dict [ str , str ] ] = None ) -> LoomConnection : if filename . startswith ( "~/" ) : filename = os . path . expanduser ( filename ) if file_attrs is None : file_attrs = { } f = h5py . File ( name = filename , mode = 'w' ) f . create_group ( '/layers' ) f . create_...
Create an empty Loom file and return it as a context manager .
56,102
def create ( filename : str , layers : Union [ np . ndarray , Dict [ str , np . ndarray ] , loompy . LayerManager ] , row_attrs : Union [ loompy . AttributeManager , Dict [ str , np . ndarray ] ] , col_attrs : Union [ loompy . AttributeManager , Dict [ str , np . ndarray ] ] , * , file_attrs : Dict [ str , str ] = None...
Create a new Loom file from the given data .
56,103
def connect ( filename : str , mode : str = 'r+' , * , validate : bool = True , spec_version : str = "2.0.1" ) -> LoomConnection : return LoomConnection ( filename , mode , validate = validate , spec_version = spec_version )
Establish a connection to a . loom file .
56,104
def last_modified ( self ) -> str : if "last_modified" in self . attrs : return self . attrs [ "last_modified" ] elif self . mode == "r+" : self . attrs [ "last_modified" ] = timestamp ( ) return self . attrs [ "last_modified" ] return timestamp ( )
Return an ISO8601 timestamp indicating when the file was last modified
56,105
def get_changes_since ( self , timestamp : str ) -> Dict [ str , List ] : rg = [ ] cg = [ ] ra = [ ] ca = [ ] layers = [ ] if self . last_modified ( ) > timestamp : if self . row_graphs . last_modified ( ) > timestamp : for name in self . row_graphs . keys ( ) : if self . row_graphs . last_modified ( name ) > timestamp...
Get a summary of the parts of the file that changed since the given time
56,106
def sparse ( self , rows : np . ndarray = None , cols : np . ndarray = None , layer : str = None ) -> scipy . sparse . coo_matrix : if layer is None : return self . layers [ "" ] . sparse ( rows = rows , cols = cols ) else : return self . layers [ layer ] . sparse ( rows = rows , cols = cols )
Return the main matrix or specified layer as a scipy . sparse . coo_matrix without loading dense matrix in RAM
56,107
def close ( self , suppress_warning : bool = False ) -> None : if self . _file is None : if not suppress_warning : logging . warn ( "Connection to %s is already closed" , self . filename ) else : self . _file . close ( ) self . _file = None self . layers = None self . ra = None self . row_attrs = None self . ca = None ...
Close the connection . After this the connection object becomes invalid . Warns user if called after closing .
56,108
def permute ( self , ordering : np . ndarray , axis : int ) -> None : if self . _file . __contains__ ( "tiles" ) : del self . _file [ 'tiles' ] ordering = list ( np . array ( ordering ) . flatten ( ) ) self . layers . _permute ( ordering , axis = axis ) if axis == 0 : self . row_attrs . _permute ( ordering ) self . row...
Permute the dataset along the indicated axis .
56,109
def aggregate ( self , out_file : str = None , select : np . ndarray = None , group_by : Union [ str , np . ndarray ] = "Clusters" , aggr_by : str = "mean" , aggr_ca_by : Dict [ str , str ] = None ) -> np . ndarray : ca = { } if select is not None : raise ValueError ( "The 'select' argument is deprecated" ) if isinstan...
Aggregate the Loom file by applying aggregation functions to the main matrix as well as to the column attributes
56,110
def get ( self , name : str , default : Any = None ) -> np . ndarray : if name in self : return self [ name ] else : return default
Return the value for a named attribute if it exists else default . If default is not given it defaults to None so that this method never raises a KeyError .
56,111
def cat_colors ( N : int = 1 , * , hue : str = None , luminosity : str = None , bgvalue : int = None , loop : bool = False , seed : str = "cat" ) -> Union [ List [ Any ] , colors . LinearSegmentedColormap ] : c : List [ str ] = [ ] if N <= 25 and hue is None and luminosity is None : c = _color_alphabet [ : N ] elif not...
Return a colormap suitable for N categorical values optimized to be both aesthetically pleasing and perceptually distinct .
56,112
def _renumber ( a : np . ndarray , keys : np . ndarray , values : np . ndarray ) -> np . ndarray : ordering = np . argsort ( keys ) keys = keys [ ordering ] values = keys [ ordering ] index = np . digitize ( a . ravel ( ) , keys , right = True ) return ( values [ index ] . reshape ( a . shape ) )
Renumber a by replacing any occurrence of keys by the corresponding values
56,113
def validate ( self , path : str , strictness : str = "speconly" ) -> bool : valid1 = True with h5py . File ( path , mode = "r" ) as f : valid1 = self . validate_spec ( f ) if not valid1 : self . errors . append ( "For help, see http://linnarssonlab.org/loompy/format/" ) valid2 = True if strictness == "conventions" : w...
Validate a file for conformance to the Loom specification
56,114
def _permute ( self , ordering : np . ndarray ) -> None : for key in self . keys ( ) : self [ key ] = self [ key ] [ ordering ]
Permute all the attributes in the collection
56,115
def get ( self , name : str , default : np . ndarray ) -> np . ndarray : if name in self : return self [ name ] else : if not isinstance ( default , np . ndarray ) : raise ValueError ( f"Default must be an np.ndarray with exactly {self.ds.shape[self.axis]} values" ) if default . shape [ 0 ] != self . ds . shape [ self ...
Return the value for a named attribute if it exists else default . Default has to be a numpy array of correct size .
56,116
def normalize_attr_array ( a : Any ) -> np . ndarray : if type ( a ) is np . ndarray : return a elif type ( a ) is np . matrix : if a . shape [ 0 ] == 1 : return np . array ( a ) [ 0 , : ] elif a . shape [ 1 ] == 1 : return np . array ( a ) [ : , 0 ] else : raise ValueError ( "Attribute values must be 1-dimensional." )...
Take all kinds of array - like inputs and normalize to a one - dimensional np . ndarray
56,117
def to_html ( ds : Any ) -> str : rm = min ( 10 , ds . shape [ 0 ] ) cm = min ( 10 , ds . shape [ 1 ] ) html = "<p>" if ds . attrs . __contains__ ( "title" ) : html += "<strong>" + ds . attrs [ "title" ] + "</strong> " html += f"{ds.shape[0]} rows, {ds.shape[1]} columns, {len(ds.layers)} layer{'s' if len(ds.layers) > 1...
Return an HTML representation of the loom file or view showing the upper - left 10x10 corner .
56,118
def permute ( self , ordering : np . ndarray , * , axis : int ) -> None : if axis not in ( 0 , 1 ) : raise ValueError ( "Axis must be 0 (rows) or 1 (columns)" ) for layer in self . layers . values ( ) : layer . _permute ( ordering , axis = axis ) if axis == 0 : if self . row_graphs is not None : for g in self . row_gra...
Permute the view by permuting its layers attributes and graphs
56,119
def permute ( self , ordering : np . ndarray , * , axis : int ) -> None : if axis == 0 : self . values = self . values [ ordering , : ] elif axis == 1 : self . values = self . values [ : , ordering ] else : raise ValueError ( "axis must be 0 or 1" )
Permute the layer along an axis
56,120
def _resize ( self , size : Tuple [ int , int ] , axis : int = None ) -> None : if self . name == "" : self . ds . _file [ '/matrix' ] . resize ( size , axis ) else : self . ds . _file [ '/layers/' + self . name ] . resize ( size , axis )
Resize the dataset or the specified axis .
56,121
def is_datafile_valid ( datafile ) : try : datafile_json = json . loads ( datafile ) except : return False try : jsonschema . Draft4Validator ( constants . JSON_SCHEMA ) . validate ( datafile_json ) except : return False return True
Given a datafile determine if it is valid or not .
56,122
def is_user_profile_valid ( user_profile ) : if not user_profile : return False if not type ( user_profile ) is dict : return False if UserProfile . USER_ID_KEY not in user_profile : return False if UserProfile . EXPERIMENT_BUCKET_MAP_KEY not in user_profile : return False experiment_bucket_map = user_profile . get ( U...
Determine if provided user profile is valid or not .
56,123
def is_attribute_valid ( attribute_key , attribute_value ) : if not isinstance ( attribute_key , string_types ) : return False if isinstance ( attribute_value , ( string_types , bool ) ) : return True if isinstance ( attribute_value , ( numbers . Integral , float ) ) : return is_finite_number ( attribute_value ) return...
Determine if given attribute is valid .
56,124
def is_finite_number ( value ) : if not isinstance ( value , ( numbers . Integral , float ) ) : return False if isinstance ( value , bool ) : return False if isinstance ( value , float ) : if math . isnan ( value ) or math . isinf ( value ) : return False if abs ( value ) > ( 2 ** 53 ) : return False return True
Validates if the given value is a number enforces absolute limit of 2^53 and restricts NAN INF - INF .
56,125
def are_values_same_type ( first_val , second_val ) : first_val_type = type ( first_val ) second_val_type = type ( second_val ) if isinstance ( first_val , string_types ) and isinstance ( second_val , string_types ) : return True if isinstance ( first_val , bool ) or isinstance ( second_val , bool ) : return first_val_...
Method to verify that both values belong to same type . Float and integer are considered as same type .
56,126
def reset_logger ( name , level = None , handler = None ) : if level is None : level = logging . INFO logger = logging . getLogger ( name ) logger . setLevel ( level ) handler = handler or logging . StreamHandler ( ) handler . setFormatter ( logging . Formatter ( _DEFAULT_LOG_FORMAT ) ) logger . handlers = [ handler ] ...
Make a standard python logger object with default formatter handler etc .
56,127
def adapt_logger ( logger ) : if isinstance ( logger , logging . Logger ) : return logger if isinstance ( logger , ( SimpleLogger , NoOpLogger ) ) : return logger . logger return logger
Adapt our custom logger . BaseLogger object into a standard logging . Logger object .
56,128
def get_variation_for_experiment ( self , experiment_id ) : return self . experiment_bucket_map . get ( experiment_id , { self . VARIATION_ID_KEY : None } ) . get ( self . VARIATION_ID_KEY )
Helper method to retrieve variation ID for given experiment .
56,129
def get_numeric_value ( event_tags , logger = None ) : logger_message_debug = None numeric_metric_value = None if event_tags is None : logger_message_debug = 'Event tags is undefined.' elif not isinstance ( event_tags , dict ) : logger_message_debug = 'Event tags is not a dictionary.' elif NUMERIC_METRIC_TYPE not in ev...
A smart getter of the numeric value from the event tags .
56,130
def hash ( key , seed = 0x0 ) : key = bytearray ( xencode ( key ) ) def fmix ( h ) : h ^= h >> 16 h = ( h * 0x85ebca6b ) & 0xFFFFFFFF h ^= h >> 13 h = ( h * 0xc2b2ae35 ) & 0xFFFFFFFF h ^= h >> 16 return h length = len ( key ) nblocks = int ( length / 4 ) h1 = seed c1 = 0xcc9e2d51 c2 = 0x1b873593 for block_start in xran...
Implements 32bit murmur3 hash .
56,131
def hash64 ( key , seed = 0x0 , x64arch = True ) : hash_128 = hash128 ( key , seed , x64arch ) unsigned_val1 = hash_128 & 0xFFFFFFFFFFFFFFFF if unsigned_val1 & 0x8000000000000000 == 0 : signed_val1 = unsigned_val1 else : signed_val1 = - ( ( unsigned_val1 ^ 0xFFFFFFFFFFFFFFFF ) + 1 ) unsigned_val2 = ( hash_128 >> 64 ) &...
Implements 64bit murmur3 hash . Returns a tuple .
56,132
def hash_bytes ( key , seed = 0x0 , x64arch = True ) : hash_128 = hash128 ( key , seed , x64arch ) bytestring = '' for i in xrange ( 0 , 16 , 1 ) : lsbyte = hash_128 & 0xFF bytestring = bytestring + str ( chr ( lsbyte ) ) hash_128 = hash_128 >> 8 return bytestring
Implements 128bit murmur3 hash . Returns a byte string .
56,133
def _generate_bucket_value ( self , bucketing_id ) : ratio = float ( self . _generate_unsigned_hash_code_32_bit ( bucketing_id ) ) / MAX_HASH_VALUE return math . floor ( ratio * MAX_TRAFFIC_VALUE )
Helper function to generate bucket value in half - closed interval [ 0 MAX_TRAFFIC_VALUE ) .
56,134
def find_bucket ( self , bucketing_id , parent_id , traffic_allocations ) : bucketing_key = BUCKETING_ID_TEMPLATE . format ( bucketing_id = bucketing_id , parent_id = parent_id ) bucketing_number = self . _generate_bucket_value ( bucketing_key ) self . config . logger . debug ( 'Assigned bucket %s to user with bucketin...
Determine entity based on bucket value and traffic allocations .
56,135
def bucket ( self , experiment , user_id , bucketing_id ) : if not experiment : return None if experiment . groupPolicy in GROUP_POLICIES : group = self . config . get_group ( experiment . groupId ) if not group : return None user_experiment_id = self . find_bucket ( bucketing_id , experiment . groupId , group . traffi...
For a given experiment and bucketing ID determines variation to be shown to user .
56,136
def _generate_key_map ( entity_list , key , entity_class ) : key_map = { } for obj in entity_list : key_map [ obj [ key ] ] = entity_class ( ** obj ) return key_map
Helper method to generate map from key to entity object for given list of dicts .
56,137
def _deserialize_audience ( audience_map ) : for audience in audience_map . values ( ) : condition_structure , condition_list = condition_helper . loads ( audience . conditions ) audience . __dict__ . update ( { 'conditionStructure' : condition_structure , 'conditionList' : condition_list } ) return audience_map
Helper method to de - serialize and populate audience map with the condition list and structure .
56,138
def get_typecast_value ( self , value , type ) : if type == entities . Variable . Type . BOOLEAN : return value == 'true' elif type == entities . Variable . Type . INTEGER : return int ( value ) elif type == entities . Variable . Type . DOUBLE : return float ( value ) else : return value
Helper method to determine actual value based on type of feature variable .
56,139
def get_experiment_from_key ( self , experiment_key ) : experiment = self . experiment_key_map . get ( experiment_key ) if experiment : return experiment self . logger . error ( 'Experiment key "%s" is not in datafile.' % experiment_key ) self . error_handler . handle_error ( exceptions . InvalidExperimentException ( e...
Get experiment for the provided experiment key .
56,140
def get_experiment_from_id ( self , experiment_id ) : experiment = self . experiment_id_map . get ( experiment_id ) if experiment : return experiment self . logger . error ( 'Experiment ID "%s" is not in datafile.' % experiment_id ) self . error_handler . handle_error ( exceptions . InvalidExperimentException ( enums ....
Get experiment for the provided experiment ID .
56,141
def get_group ( self , group_id ) : group = self . group_id_map . get ( group_id ) if group : return group self . logger . error ( 'Group ID "%s" is not in datafile.' % group_id ) self . error_handler . handle_error ( exceptions . InvalidGroupException ( enums . Errors . INVALID_GROUP_ID_ERROR ) ) return None
Get group for the provided group ID .
56,142
def get_audience ( self , audience_id ) : audience = self . audience_id_map . get ( audience_id ) if audience : return audience self . logger . error ( 'Audience ID "%s" is not in datafile.' % audience_id ) self . error_handler . handle_error ( exceptions . InvalidAudienceException ( ( enums . Errors . INVALID_AUDIENCE...
Get audience object for the provided audience ID .
56,143
def get_variation_from_key ( self , experiment_key , variation_key ) : variation_map = self . variation_key_map . get ( experiment_key ) if variation_map : variation = variation_map . get ( variation_key ) if variation : return variation else : self . logger . error ( 'Variation key "%s" is not in datafile.' % variatio...
Get variation given experiment and variation key .
56,144
def get_variation_from_id ( self , experiment_key , variation_id ) : variation_map = self . variation_id_map . get ( experiment_key ) if variation_map : variation = variation_map . get ( variation_id ) if variation : return variation else : self . logger . error ( 'Variation ID "%s" is not in datafile.' % variation_id ...
Get variation given experiment and variation ID .
56,145
def get_event ( self , event_key ) : event = self . event_key_map . get ( event_key ) if event : return event self . logger . error ( 'Event "%s" is not in datafile.' % event_key ) self . error_handler . handle_error ( exceptions . InvalidEventException ( enums . Errors . INVALID_EVENT_KEY_ERROR ) ) return None
Get event for the provided event key .
56,146
def get_attribute_id ( self , attribute_key ) : attribute = self . attribute_key_map . get ( attribute_key ) has_reserved_prefix = attribute_key . startswith ( RESERVED_ATTRIBUTE_PREFIX ) if attribute : if has_reserved_prefix : self . logger . warning ( ( 'Attribute %s unexpectedly has reserved prefix %s; using attribu...
Get attribute ID for the provided attribute key .
56,147
def get_feature_from_key ( self , feature_key ) : feature = self . feature_key_map . get ( feature_key ) if feature : return feature self . logger . error ( 'Feature "%s" is not in datafile.' % feature_key ) return None
Get feature for the provided feature key .
56,148
def get_rollout_from_id ( self , rollout_id ) : layer = self . rollout_id_map . get ( rollout_id ) if layer : return layer self . logger . error ( 'Rollout with ID "%s" is not in datafile.' % rollout_id ) return None
Get rollout for the provided ID .
56,149
def get_variable_value_for_variation ( self , variable , variation ) : if not variable or not variation : return None if variation . id not in self . variation_variable_usage_map : self . logger . error ( 'Variation with ID "%s" is not in the datafile.' % variation . id ) return None variable_usages = self . variation_...
Get the variable value for the given variation .
56,150
def get_variable_for_feature ( self , feature_key , variable_key ) : feature = self . feature_key_map . get ( feature_key ) if not feature : self . logger . error ( 'Feature with key "%s" not found in the datafile.' % feature_key ) return None if variable_key not in feature . variables : self . logger . error ( 'Variab...
Get the variable with the given variable key for the given feature .
56,151
def set_forced_variation ( self , experiment_key , user_id , variation_key ) : experiment = self . get_experiment_from_key ( experiment_key ) if not experiment : return False experiment_id = experiment . id if variation_key is None : if user_id in self . forced_variation_map : experiment_to_variation_map = self . force...
Sets users to a map of experiments to forced variations .
56,152
def get_forced_variation ( self , experiment_key , user_id ) : if user_id not in self . forced_variation_map : self . logger . debug ( 'User "%s" is not in the forced variation map.' % user_id ) return None experiment = self . get_experiment_from_key ( experiment_key ) if not experiment : return None experiment_to_vari...
Gets the forced variation key for the given user and experiment .
56,153
def dispatch_event ( event ) : try : if event . http_verb == enums . HTTPVerbs . GET : requests . get ( event . url , params = event . params , timeout = REQUEST_TIMEOUT ) . raise_for_status ( ) elif event . http_verb == enums . HTTPVerbs . POST : requests . post ( event . url , data = json . dumps ( event . params ) ,...
Dispatch the event being represented by the Event object .
56,154
def _validate_instantiation_options ( self , datafile , skip_json_validation ) : if not skip_json_validation and not validator . is_datafile_valid ( datafile ) : raise exceptions . InvalidInputException ( enums . Errors . INVALID_INPUT_ERROR . format ( 'datafile' ) ) if not validator . is_event_dispatcher_valid ( self ...
Helper method to validate all instantiation parameters .
56,155
def _validate_user_inputs ( self , attributes = None , event_tags = None ) : if attributes and not validator . are_attributes_valid ( attributes ) : self . logger . error ( 'Provided attributes are in an invalid format.' ) self . error_handler . handle_error ( exceptions . InvalidAttributeException ( enums . Errors . I...
Helper method to validate user inputs .
56,156
def _send_impression_event ( self , experiment , variation , user_id , attributes ) : impression_event = self . event_builder . create_impression_event ( experiment , variation . id , user_id , attributes ) self . logger . debug ( 'Dispatching impression event to URL %s with params %s.' % ( impression_event . url , imp...
Helper method to send impression event .
56,157
def _get_feature_variable_for_type ( self , feature_key , variable_key , variable_type , user_id , attributes ) : if not validator . is_non_empty_string ( feature_key ) : self . logger . error ( enums . Errors . INVALID_INPUT_ERROR . format ( 'feature_key' ) ) return None if not validator . is_non_empty_string ( variab...
Helper method to determine value for a certain variable attached to a feature flag based on type of variable .
56,158
def activate ( self , experiment_key , user_id , attributes = None ) : if not self . is_valid : self . logger . error ( enums . Errors . INVALID_DATAFILE . format ( 'activate' ) ) return None if not validator . is_non_empty_string ( experiment_key ) : self . logger . error ( enums . Errors . INVALID_INPUT_ERROR . forma...
Buckets visitor and sends impression event to Optimizely .
56,159
def track ( self , event_key , user_id , attributes = None , event_tags = None ) : if not self . is_valid : self . logger . error ( enums . Errors . INVALID_DATAFILE . format ( 'track' ) ) return if not validator . is_non_empty_string ( event_key ) : self . logger . error ( enums . Errors . INVALID_INPUT_ERROR . format...
Send conversion event to Optimizely .
56,160
def get_variation ( self , experiment_key , user_id , attributes = None ) : if not self . is_valid : self . logger . error ( enums . Errors . INVALID_DATAFILE . format ( 'get_variation' ) ) return None if not validator . is_non_empty_string ( experiment_key ) : self . logger . error ( enums . Errors . INVALID_INPUT_ERR...
Gets variation where user will be bucketed .
56,161
def is_feature_enabled ( self , feature_key , user_id , attributes = None ) : if not self . is_valid : self . logger . error ( enums . Errors . INVALID_DATAFILE . format ( 'is_feature_enabled' ) ) return False if not validator . is_non_empty_string ( feature_key ) : self . logger . error ( enums . Errors . INVALID_INPU...
Returns true if the feature is enabled for the given user .
56,162
def get_enabled_features ( self , user_id , attributes = None ) : enabled_features = [ ] if not self . is_valid : self . logger . error ( enums . Errors . INVALID_DATAFILE . format ( 'get_enabled_features' ) ) return enabled_features if not isinstance ( user_id , string_types ) : self . logger . error ( enums . Errors ...
Returns the list of features that are enabled for the user .
56,163
def get_feature_variable_boolean ( self , feature_key , variable_key , user_id , attributes = None ) : variable_type = entities . Variable . Type . BOOLEAN return self . _get_feature_variable_for_type ( feature_key , variable_key , variable_type , user_id , attributes )
Returns value for a certain boolean variable attached to a feature flag .
56,164
def get_feature_variable_double ( self , feature_key , variable_key , user_id , attributes = None ) : variable_type = entities . Variable . Type . DOUBLE return self . _get_feature_variable_for_type ( feature_key , variable_key , variable_type , user_id , attributes )
Returns value for a certain double variable attached to a feature flag .
56,165
def get_feature_variable_integer ( self , feature_key , variable_key , user_id , attributes = None ) : variable_type = entities . Variable . Type . INTEGER return self . _get_feature_variable_for_type ( feature_key , variable_key , variable_type , user_id , attributes )
Returns value for a certain integer variable attached to a feature flag .
56,166
def get_feature_variable_string ( self , feature_key , variable_key , user_id , attributes = None ) : variable_type = entities . Variable . Type . STRING return self . _get_feature_variable_for_type ( feature_key , variable_key , variable_type , user_id , attributes )
Returns value for a certain string variable attached to a feature .
56,167
def set_forced_variation ( self , experiment_key , user_id , variation_key ) : if not self . is_valid : self . logger . error ( enums . Errors . INVALID_DATAFILE . format ( 'set_forced_variation' ) ) return False if not validator . is_non_empty_string ( experiment_key ) : self . logger . error ( enums . Errors . INVALI...
Force a user into a variation for a given experiment .
56,168
def get_forced_variation ( self , experiment_key , user_id ) : if not self . is_valid : self . logger . error ( enums . Errors . INVALID_DATAFILE . format ( 'get_forced_variation' ) ) return None if not validator . is_non_empty_string ( experiment_key ) : self . logger . error ( enums . Errors . INVALID_INPUT_ERROR . f...
Gets the forced variation for a given user and experiment .
56,169
def is_user_in_experiment ( config , experiment , attributes , logger ) : audience_conditions = experiment . getAudienceConditionsOrIds ( ) logger . debug ( audience_logs . EVALUATING_AUDIENCES_COMBINED . format ( experiment . key , json . dumps ( audience_conditions ) ) ) if audience_conditions is None or audience_con...
Determine for given experiment if user satisfies the audiences for the experiment .
56,170
def _get_common_params ( self , user_id , attributes ) : commonParams = { } commonParams [ self . EventParams . PROJECT_ID ] = self . _get_project_id ( ) commonParams [ self . EventParams . ACCOUNT_ID ] = self . _get_account_id ( ) visitor = { } visitor [ self . EventParams . END_USER_ID ] = user_id visitor [ self . Ev...
Get params which are used same in both conversion and impression events .
56,171
def _get_required_params_for_impression ( self , experiment , variation_id ) : snapshot = { } snapshot [ self . EventParams . DECISIONS ] = [ { self . EventParams . EXPERIMENT_ID : experiment . id , self . EventParams . VARIATION_ID : variation_id , self . EventParams . CAMPAIGN_ID : experiment . layerId } ] snapshot [...
Get parameters that are required for the impression event to register .
56,172
def _get_required_params_for_conversion ( self , event_key , event_tags ) : snapshot = { } event_dict = { self . EventParams . EVENT_ID : self . config . get_event ( event_key ) . id , self . EventParams . TIME : self . _get_time ( ) , self . EventParams . KEY : event_key , self . EventParams . UUID : str ( uuid . uuid...
Get parameters that are required for the conversion event to register .
56,173
def create_impression_event ( self , experiment , variation_id , user_id , attributes ) : params = self . _get_common_params ( user_id , attributes ) impression_params = self . _get_required_params_for_impression ( experiment , variation_id ) params [ self . EventParams . USERS ] [ 0 ] [ self . EventParams . SNAPSHOTS ...
Create impression Event to be sent to the logging endpoint .
56,174
def create_conversion_event ( self , event_key , user_id , attributes , event_tags ) : params = self . _get_common_params ( user_id , attributes ) conversion_params = self . _get_required_params_for_conversion ( event_key , event_tags ) params [ self . EventParams . USERS ] [ 0 ] [ self . EventParams . SNAPSHOTS ] . ap...
Create conversion Event to be sent to the logging endpoint .
56,175
def _audience_condition_deserializer ( obj_dict ) : return [ obj_dict . get ( 'name' ) , obj_dict . get ( 'value' ) , obj_dict . get ( 'type' ) , obj_dict . get ( 'match' ) ]
Deserializer defining how dict objects need to be decoded for audience conditions .
56,176
def _get_condition_json ( self , index ) : condition = self . condition_data [ index ] condition_log = { 'name' : condition [ 0 ] , 'value' : condition [ 1 ] , 'type' : condition [ 2 ] , 'match' : condition [ 3 ] } return json . dumps ( condition_log )
Method to generate json for logging audience condition .
56,177
def is_value_type_valid_for_exact_conditions ( self , value ) : if isinstance ( value , string_types ) or isinstance ( value , ( numbers . Integral , float ) ) : return True return False
Method to validate if the value is valid for exact match type evaluation .
56,178
def exists_evaluator ( self , index ) : attr_name = self . condition_data [ index ] [ 0 ] return self . attributes . get ( attr_name ) is not None
Evaluate the given exists match condition for the user attributes .
56,179
def greater_than_evaluator ( self , index ) : condition_name = self . condition_data [ index ] [ 0 ] condition_value = self . condition_data [ index ] [ 1 ] user_value = self . attributes . get ( condition_name ) if not validator . is_finite_number ( condition_value ) : self . logger . warning ( audience_logs . UNKNOWN...
Evaluate the given greater than match condition for the user attributes .
56,180
def substring_evaluator ( self , index ) : condition_name = self . condition_data [ index ] [ 0 ] condition_value = self . condition_data [ index ] [ 1 ] user_value = self . attributes . get ( condition_name ) if not isinstance ( condition_value , string_types ) : self . logger . warning ( audience_logs . UNKNOWN_CONDI...
Evaluate the given substring match condition for the given user attributes .
56,181
def evaluate ( self , index ) : if self . condition_data [ index ] [ 2 ] != self . CUSTOM_ATTRIBUTE_CONDITION_TYPE : self . logger . warning ( audience_logs . UNKNOWN_CONDITION_TYPE . format ( self . _get_condition_json ( index ) ) ) return None condition_match = self . condition_data [ index ] [ 3 ] if condition_match...
Given a custom attribute audience condition and user attributes evaluate the condition against the attributes .
56,182
def object_hook ( self , object_dict ) : instance = self . decoder ( object_dict ) self . condition_list . append ( instance ) self . index += 1 return self . index
Hook which when passed into a json . JSONDecoder will replace each dict in a json string with its index and convert the dict to an object as defined by the passed in condition_decoder . The newly created condition object is appended to the conditions_list .
56,183
def _get_bucketing_id ( self , user_id , attributes ) : attributes = attributes or { } bucketing_id = attributes . get ( enums . ControlAttributes . BUCKETING_ID ) if bucketing_id is not None : if isinstance ( bucketing_id , string_types ) : return bucketing_id self . logger . warning ( 'Bucketing ID attribute is not a...
Helper method to determine bucketing ID for the user .
56,184
def get_forced_variation ( self , experiment , user_id ) : forced_variations = experiment . forcedVariations if forced_variations and user_id in forced_variations : variation_key = forced_variations . get ( user_id ) variation = self . config . get_variation_from_key ( experiment . key , variation_key ) if variation : ...
Determine if a user is forced into a variation for the given experiment and return that variation .
56,185
def get_stored_variation ( self , experiment , user_profile ) : user_id = user_profile . user_id variation_id = user_profile . get_variation_for_experiment ( experiment . id ) if variation_id : variation = self . config . get_variation_from_id ( experiment . key , variation_id ) if variation : self . logger . info ( 'F...
Determine if the user has a stored variation available for the given experiment and return that .
56,186
def get_variation ( self , experiment , user_id , attributes , ignore_user_profile = False ) : if not experiment_helper . is_experiment_running ( experiment ) : self . logger . info ( 'Experiment "%s" is not running.' % experiment . key ) return None variation = self . config . get_forced_variation ( experiment . key ,...
Top - level function to help determine variation user should be put in .
56,187
def get_experiment_in_group ( self , group , bucketing_id ) : experiment_id = self . bucketer . find_bucket ( bucketing_id , group . id , group . trafficAllocation ) if experiment_id : experiment = self . config . get_experiment_from_id ( experiment_id ) if experiment : self . logger . info ( 'User with bucketing ID "%...
Determine which experiment in the group the user is bucketed into .
56,188
def add_notification_listener ( self , notification_type , notification_callback ) : if notification_type not in self . notifications : self . notifications [ notification_type ] = [ ( self . notification_id , notification_callback ) ] else : if reduce ( lambda a , b : a + 1 , filter ( lambda tup : tup [ 1 ] == notific...
Add a notification callback to the notification center .
56,189
def remove_notification_listener ( self , notification_id ) : for v in self . notifications . values ( ) : toRemove = list ( filter ( lambda tup : tup [ 0 ] == notification_id , v ) ) if len ( toRemove ) > 0 : v . remove ( toRemove [ 0 ] ) return True return False
Remove a previously added notification callback .
56,190
def send_notifications ( self , notification_type , * args ) : if notification_type in self . notifications : for notification_id , callback in self . notifications [ notification_type ] : try : callback ( * args ) except : self . logger . exception ( 'Problem calling notify callback!' )
Fires off the notification for the specific event . Uses var args to pass in a arbitrary list of parameter according to which notification type was fired .
56,191
def and_evaluator ( conditions , leaf_evaluator ) : saw_null_result = False for condition in conditions : result = evaluate ( condition , leaf_evaluator ) if result is False : return False if result is None : saw_null_result = True return None if saw_null_result else True
Evaluates a list of conditions as if the evaluator had been applied to each entry and the results AND - ed together .
56,192
def not_evaluator ( conditions , leaf_evaluator ) : if not len ( conditions ) > 0 : return None result = evaluate ( conditions [ 0 ] , leaf_evaluator ) return None if result is None else not result
Evaluates a list of conditions as if the evaluator had been applied to a single entry and NOT was applied to the result .
56,193
def evaluate ( conditions , leaf_evaluator ) : if isinstance ( conditions , list ) : if conditions [ 0 ] in list ( EVALUATORS_BY_OPERATOR_TYPE . keys ( ) ) : return EVALUATORS_BY_OPERATOR_TYPE [ conditions [ 0 ] ] ( conditions [ 1 : ] , leaf_evaluator ) else : return EVALUATORS_BY_OPERATOR_TYPE [ ConditionOperatorTypes...
Top level method to evaluate conditions .
56,194
def data_objet_class ( data_mode = 'value' , time_mode = 'framewise' ) : classes_table = { ( 'value' , 'global' ) : GlobalValueObject , ( 'value' , 'event' ) : EventValueObject , ( 'value' , 'segment' ) : SegmentValueObject , ( 'value' , 'framewise' ) : FrameValueObject , ( 'label' , 'global' ) : GlobalLabelObject , ( ...
Factory function for Analyzer result
56,195
def JSON_NumpyArrayEncoder ( obj ) : if isinstance ( obj , np . ndarray ) : return { 'numpyArray' : obj . tolist ( ) , 'dtype' : obj . dtype . __str__ ( ) } elif isinstance ( obj , np . generic ) : return np . asscalar ( obj ) else : print type ( obj ) raise TypeError ( repr ( obj ) + " is not JSON serializable" )
Define Specialize JSON encoder for numpy array
56,196
def render ( self ) : fig , ax = plt . subplots ( ) self . data_object . _render_plot ( ax ) return fig
Render a matplotlib figure from the analyzer result
56,197
def new_result ( self , data_mode = 'value' , time_mode = 'framewise' ) : from datetime import datetime result = AnalyzerResult ( data_mode = data_mode , time_mode = time_mode ) result . id_metadata . date = datetime . now ( ) . replace ( microsecond = 0 ) . isoformat ( ' ' ) result . id_metadata . version = timeside ....
Create a new result
56,198
def downmix_to_mono ( process_func ) : import functools @ functools . wraps ( process_func ) def wrapper ( analyzer , frames , eod ) : if frames . ndim > 1 : downmix_frames = frames . mean ( axis = - 1 ) else : downmix_frames = frames process_func ( analyzer , downmix_frames , eod ) return frames , eod return wrapper
Pre - processing decorator that downmixes frames from multi - channel to mono
56,199
def frames_adapter ( process_func ) : import functools import numpy as np class framesBuffer ( object ) : def __init__ ( self , blocksize , stepsize ) : self . blocksize = blocksize self . stepsize = stepsize self . buffer = None def frames ( self , frames , eod ) : if self . buffer is not None : stack = np . concatena...
Pre - processing decorator that adapt frames to match input_blocksize and input_stepsize of the decorated analyzer