code_tokens
stringlengths
74
3.78k
def refresh ( self ) : client = self . _get_client ( ) endpoint = self . _endpoint . format ( resource_id = self . resource_id or "" , parent_id = self . parent_id or "" , grandparent_id = self . grandparent_id or "" ) response = client . get_resource ( endpoint ) self . _reset_model ( response )
def commit ( self , if_match = None , wait = True , timeout = None ) : if not self . _changes : LOG . debug ( "No changes available for %s: %s" , self . __class__ . __name__ , self . resource_id ) return LOG . debug ( "Apply all the changes on the current %s: %s" , self . __class__ . __name__ , self . resource_id ) client = self . _get_client ( ) endpoint = self . _endpoint . format ( resource_id = self . resource_id or "" , parent_id = self . parent_id or "" , grandparent_id = self . grandparent_id or "" ) request_body = self . dump ( include_read_only = False ) response = client . update_resource ( endpoint , data = request_body , if_match = if_match ) elapsed_time = 0 while wait : self . refresh ( ) if self . is_ready ( ) : break elapsed_time += CONFIG . HNV . retry_interval if timeout and elapsed_time > timeout : raise exception . TimeOut ( "The request timed out." ) time . sleep ( CONFIG . HNV . retry_interval ) else : self . _reset_model ( response ) return self
def _set_fields ( self , fields ) : super ( _BaseHNVModel , self ) . _set_fields ( fields ) if not self . resource_ref : endpoint = self . _endpoint . format ( resource_id = self . resource_id , parent_id = self . parent_id , grandparent_id = self . grandparent_id ) self . resource_ref = re . sub ( "(/networking/v[0-9]+)" , "" , endpoint )
def get_resource ( self ) : references = { "resource_id" : None , "parent_id" : None , "grandparent_id" : None } for model_cls , regexp in self . _regexp . iteritems ( ) : match = regexp . search ( self . resource_ref ) if match is not None : references . update ( match . groupdict ( ) ) return model_cls . get ( ** references ) raise exception . NotFound ( "No model available for %(resource_ref)r" , resource_ref = self . resource_ref )
def _get_nr_bins ( count ) : if count <= 30 : k = np . ceil ( np . sqrt ( count ) ) else : k = np . ceil ( np . log2 ( count ) ) + 1 return int ( k )
def plot_histograms ( ertobj , keys , ** kwargs ) : if isinstance ( ertobj , pd . DataFrame ) : df = ertobj else : df = ertobj . data if df . shape [ 0 ] == 0 : raise Exception ( 'No data present, cannot plot' ) if isinstance ( keys , str ) : keys = [ keys , ] figures = { } merge_figs = kwargs . get ( 'merge' , True ) if merge_figs : nr_x = 2 nr_y = len ( keys ) size_x = 15 / 2.54 size_y = 5 * nr_y / 2.54 fig , axes_all = plt . subplots ( nr_y , nr_x , figsize = ( size_x , size_y ) ) axes_all = np . atleast_2d ( axes_all ) for row_nr , key in enumerate ( keys ) : print ( 'Generating histogram plot for key: {0}' . format ( key ) ) subdata_raw = df [ key ] . values subdata = subdata_raw [ ~ np . isnan ( subdata_raw ) ] subdata = subdata [ np . isfinite ( subdata ) ] subdata_log10_with_nan = np . log10 ( subdata [ subdata > 0 ] ) subdata_log10 = subdata_log10_with_nan [ ~ np . isnan ( subdata_log10_with_nan ) ] subdata_log10 = subdata_log10 [ np . isfinite ( subdata_log10 ) ] if merge_figs : axes = axes_all [ row_nr ] . squeeze ( ) else : fig , axes = plt . subplots ( 1 , 2 , figsize = ( 10 / 2.54 , 5 / 2.54 ) ) ax = axes [ 0 ] ax . hist ( subdata , _get_nr_bins ( subdata . size ) , ) ax . set_xlabel ( units . get_label ( key ) ) ax . set_ylabel ( 'count' ) ax . xaxis . set_major_locator ( mpl . ticker . MaxNLocator ( 5 ) ) ax . tick_params ( axis = 'both' , which = 'major' , labelsize = 6 ) ax . tick_params ( axis = 'both' , which = 'minor' , labelsize = 6 ) if subdata_log10 . size > 0 : ax = axes [ 1 ] ax . hist ( subdata_log10 , _get_nr_bins ( subdata . size ) , ) ax . set_xlabel ( r'$log_{10}($' + units . get_label ( key ) + ')' ) ax . set_ylabel ( 'count' ) ax . xaxis . set_major_locator ( mpl . ticker . MaxNLocator ( 5 ) ) else : pass fig . tight_layout ( ) if not merge_figs : figures [ key ] = fig if merge_figs : figures [ 'all' ] = fig return figures
def plot_histograms_extra_dims ( dataobj , keys , ** kwargs ) : if isinstance ( dataobj , pd . DataFrame ) : df_raw = dataobj else : df_raw = dataobj . data if kwargs . get ( 'subquery' , False ) : df = df_raw . query ( kwargs . get ( 'subquery' ) ) else : df = df_raw split_timestamps = True if split_timestamps : group_timestamps = df . groupby ( 'timestep' ) N_ts = len ( group_timestamps . groups . keys ( ) ) else : group_timestamps = ( 'all' , df ) N_ts = 1 columns = keys N_c = len ( columns ) plot_log10 = kwargs . get ( 'log10plot' , False ) if plot_log10 : transformers = [ 'lin' , 'log10' ] N_log10 = 2 else : transformers = [ 'lin' , ] N_log10 = 1 Nx_max = kwargs . get ( 'Nx' , 4 ) N = N_ts * N_c * N_log10 Nx = min ( Nx_max , N ) Ny = int ( np . ceil ( N / Nx ) ) size_x = 5 * Nx / 2.54 size_y = 5 * Ny / 2.54 fig , axes = plt . subplots ( Ny , Nx , figsize = ( size_x , size_y ) , sharex = True , sharey = True ) axes = np . atleast_2d ( axes ) index = 0 for ts_name , tgroup in group_timestamps : for column in columns : for transformer in transformers : subdata_raw = tgroup [ column ] . values subdata = subdata_raw [ ~ np . isnan ( subdata_raw ) ] subdata = subdata [ np . isfinite ( subdata ) ] if transformer == 'log10' : subdata_log10_with_nan = np . log10 ( subdata [ subdata > 0 ] ) subdata_log10 = subdata_log10_with_nan [ ~ np . isnan ( subdata_log10_with_nan ) ] subdata_log10 = subdata_log10 [ np . isfinite ( subdata_log10 ) ] subdata = subdata_log10 ax = axes . flat [ index ] ax . hist ( subdata , _get_nr_bins ( subdata . size ) , ) ax . set_xlabel ( units . get_label ( column ) ) ax . set_ylabel ( 'count' ) ax . xaxis . set_major_locator ( mpl . ticker . MaxNLocator ( 3 ) ) ax . tick_params ( axis = 'both' , which = 'major' , labelsize = 6 ) ax . tick_params ( axis = 'both' , which = 'minor' , labelsize = 6 ) ax . set_title ( "timestep: %d" % ts_name ) index += 1 for ax in axes [ : , 1 : ] . flat : ax . set_ylabel ( '' ) for ax in axes [ : - 1 , : ] . flat : ax . set_xlabel ( '' ) fig . tight_layout ( ) return fig
def parse_substring ( allele , pred , max_len = None ) : result = "" pos = 0 if max_len is None : max_len = len ( allele ) else : max_len = min ( max_len , len ( allele ) ) while pos < max_len and pred ( allele [ pos ] ) : result += allele [ pos ] pos += 1 return result , allele [ pos : ]
def fetch ( self ) : if not self . local_path : self . make_local_path ( ) fetcher = BookFetcher ( self ) fetcher . fetch ( )
def make ( self ) : logger . debug ( "preparing to add all git files" ) num_added = self . local_repo . add_all_files ( ) if num_added : self . local_repo . commit ( "Initial import from Project Gutenberg" ) file_handler = NewFilesHandler ( self ) file_handler . add_new_files ( ) num_added = self . local_repo . add_all_files ( ) if num_added : self . local_repo . commit ( "Updates Readme, contributing, license files, cover, metadata." )
def push ( self ) : self . github_repo . create_and_push ( ) self . _repo = self . github_repo . repo return self . _repo
def tag ( self , version = 'bump' , message = '' ) : self . clone_from_github ( ) self . github_repo . tag ( version , message = message )
def format_title ( self ) : def asciify ( _title ) : _title = unicodedata . normalize ( 'NFD' , unicode ( _title ) ) ascii = True out = [ ] ok = u"1234567890qwertyuiopasdfghjklzxcvbnmQWERTYUIOPASDFGHJKLZXCVBNM- '," for ch in _title : if ch in ok : out . append ( ch ) elif unicodedata . category ( ch ) [ 0 ] == ( "L" ) : out . append ( hex ( ord ( ch ) ) ) ascii = False elif ch in u'\r\n\t' : out . append ( u'-' ) return ( ascii , sub ( "[ ',-]+" , '-' , "" . join ( out ) ) ) ( ascii , _title ) = asciify ( self . meta . title ) if not ascii and self . meta . alternative_title : ( ascii , _title2 ) = asciify ( self . meta . alternative_title ) if ascii : _title = _title2 title_length = 99 - len ( str ( self . book_id ) ) - 1 if len ( _title ) > title_length : repo_title = "{0}__{1}" . format ( _title [ : title_length ] , self . book_id ) else : repo_title = "{0}_{1}" . format ( _title [ : title_length ] , self . book_id ) logger . debug ( "%s %s" % ( len ( repo_title ) , repo_title ) ) self . meta . metadata [ '_repo' ] = repo_title return repo_title
def _request ( self , path , method , body = None ) : url = '/' . join ( [ _SERVER , path ] ) ( resp , content ) = _HTTP . request ( url , method , headers = self . _headers , body = body ) content_type = resp . get ( 'content-type' ) if content_type and content_type . startswith ( 'application/json' ) : content = json . loads ( content . decode ( 'UTF-8' ) ) return ( resp , content )
def put ( self , path , payload ) : body = json . dumps ( payload ) return self . _request ( path , 'PUT' , body )
def post ( self , path , payload ) : body = json . dumps ( payload ) return self . _request ( path , 'POST' , body )
def create_child ( self , modules ) : binder = self . _binder . create_child ( ) return Injector ( modules , binder = binder , stage = self . _stage )
def validate ( self , message , schema_name ) : err = None try : jsonschema . validate ( message , self . schemas [ schema_name ] ) except KeyError : msg = ( f'Schema "{schema_name}" was not found (available: ' f'{", ".join(self.schemas.keys())})' ) err = { 'msg' : msg } except jsonschema . ValidationError as e : msg = ( f'Given message was not valid against the schema ' f'"{schema_name}": {e.message}' ) err = { 'msg' : msg } if err : logging . error ( ** err ) raise exceptions . InvalidMessageError ( err [ 'msg' ] )
def compose ( * functions ) : def inner ( func1 , func2 ) : return lambda * x , ** y : func1 ( func2 ( * x , ** y ) ) return functools . reduce ( inner , functions )
def validate_instance ( instance ) : excludes = settings . AUTOMATED_LOGGING [ 'exclude' ] [ 'model' ] for excluded in excludes : if ( excluded in [ instance . _meta . app_label . lower ( ) , instance . __class__ . __name__ . lower ( ) ] or instance . __module__ . lower ( ) . startswith ( excluded ) ) : return False return True
def get_current_user ( ) : thread_local = AutomatedLoggingMiddleware . thread_local if hasattr ( thread_local , 'current_user' ) : user = thread_local . current_user if isinstance ( user , AnonymousUser ) : user = None else : user = None return user
def get_current_environ ( ) : thread_local = AutomatedLoggingMiddleware . thread_local if hasattr ( thread_local , 'request_uri' ) : request_uri = thread_local . request_uri else : request_uri = None if hasattr ( thread_local , 'application' ) : application = thread_local . application application = Application . objects . get_or_create ( name = application ) [ 0 ] else : application = None if hasattr ( thread_local , 'method' ) : method = thread_local . method else : method = None if hasattr ( thread_local , 'status' ) : status = thread_local . status else : status = None return request_uri , application , method , status
def processor ( status , sender , instance , updated = None , addition = '' ) : logger = logging . getLogger ( __name__ ) if validate_instance ( instance ) : user = get_current_user ( ) application = instance . _meta . app_label model_name = instance . __class__ . __name__ level = settings . AUTOMATED_LOGGING [ 'loglevel' ] [ 'model' ] if status == 'change' : corrected = 'changed' elif status == 'add' : corrected = 'added' elif status == 'delete' : corrected = 'deleted' logger . log ( level , ( '%s %s %s(%s) in %s%s' % ( user , corrected , instance , model_name , application , addition ) ) . replace ( " " , " " ) , extra = { 'action' : 'model' , 'data' : { 'status' : status , 'user' : user , 'sender' : sender , 'instance' : instance , 'update_fields' : updated } } )
def parents ( self ) : parents = [ ] if self . parent is None : return [ ] category = self while category . parent is not None : parents . append ( category . parent ) category = category . parent return parents [ : : - 1 ]
def root_parent ( self , category = None ) : return next ( filter ( lambda c : c . is_root , self . hierarchy ( ) ) )
def active ( self ) -> bool : states = self . _client . get_state ( self . _state_url ) [ 'states' ] for state in states : state = state [ 'State' ] if int ( state [ 'Id' ] ) == self . _state_id : return state [ 'IsActive' ] == "1" return False
def to_reasonable_unit ( value , units , round_digits = 2 ) : def to_unit ( unit ) : return float ( value ) / unit [ 1 ] exponents = [ abs ( Decimal ( to_unit ( u ) ) . adjusted ( ) - 1 ) for u in units ] best = min ( enumerate ( exponents ) , key = itemgetter ( 1 ) ) [ 0 ] return dict ( val = round ( to_unit ( units [ best ] ) , round_digits ) , label = units [ best ] [ 0 ] , multiplier = units [ best ] [ 1 ] )
def get_text ( self ) : done_units = to_reasonable_unit ( self . done , self . units ) current = round ( self . current / done_units [ 'multiplier' ] , 2 ) percent = int ( self . current * 100 / self . done ) return '{0:.2f} of {1:.2f} {2} ({3}%)' . format ( current , done_units [ 'val' ] , done_units [ 'label' ] , percent )
def add_progress ( self , delta , done = None ) : if done is not None : self . done = done self . bar . current = max ( min ( self . done , self . current + delta ) , 0 ) self . rate_display . set_text ( self . rate_text ) self . remaining_time_display . set_text ( self . remaining_time_text ) return self . current == self . done
async def valid_token_set ( self ) : is_valid = False if self . _auth_client . token : now = datetime . datetime . utcnow ( ) skew = datetime . timedelta ( seconds = 60 ) if self . _auth_client . expiry > ( now + skew ) : is_valid = True return is_valid
async def request ( self , method , url , params = None , headers = None , data = None , json = None , token_refresh_attempts = 2 , ** kwargs ) : if all ( [ data , json ] ) : msg = ( '"data" and "json" request parameters can not be used ' 'at the same time' ) logging . warn ( msg ) raise exceptions . GCPHTTPError ( msg ) req_headers = headers or { } req_headers . update ( _utils . DEFAULT_REQUEST_HEADERS ) req_kwargs = { 'params' : params , 'headers' : req_headers , } if data : req_kwargs [ 'data' ] = data if json : req_kwargs [ 'json' ] = json if token_refresh_attempts : if not await self . valid_token_set ( ) : await self . _auth_client . refresh_token ( ) token_refresh_attempts -= 1 req_headers . update ( { 'Authorization' : f'Bearer {self._auth_client.token}' } ) request_id = kwargs . get ( 'request_id' , uuid . uuid4 ( ) ) logging . debug ( _utils . REQ_LOG_FMT . format ( request_id = request_id , method = method . upper ( ) , url = url , kwargs = req_kwargs ) ) try : async with self . _session . request ( method , url , ** req_kwargs ) as resp : log_kw = { 'request_id' : request_id , 'method' : method . upper ( ) , 'url' : resp . url , 'status' : resp . status , 'reason' : resp . reason } logging . debug ( _utils . RESP_LOG_FMT . format ( ** log_kw ) ) if resp . status in REFRESH_STATUS_CODES : logging . warning ( f'[{request_id}] HTTP Status Code {resp.status}' f' returned requesting {resp.url}: {resp.reason}' ) if token_refresh_attempts : logging . info ( f'[{request_id}] Attempting request to {resp.url} ' 'again.' ) return await self . request ( method , url , token_refresh_attempts = token_refresh_attempts , request_id = request_id , ** req_kwargs ) logging . warning ( f'[{request_id}] Max attempts refreshing auth token ' f'exhausted while requesting {resp.url}' ) resp . raise_for_status ( ) return await resp . text ( ) except aiohttp . ClientResponseError as e : msg = f'[{request_id}] HTTP error response from {resp.url}: {e}' logging . error ( msg , exc_info = e ) raise exceptions . GCPHTTPResponseError ( msg , resp . status ) except exceptions . GCPHTTPResponseError as e : raise e except Exception as e : msg = f'[{request_id}] Request call failed: {e}' logging . error ( msg , exc_info = e ) raise exceptions . GCPHTTPError ( msg )
async def get_json ( self , url , json_callback = None , ** kwargs ) : if not json_callback : json_callback = json . loads response = await self . request ( method = 'get' , url = url , ** kwargs ) return json_callback ( response )
async def get_all ( self , url , params = None ) : if not params : params = { } items = [ ] next_page_token = None while True : if next_page_token : params [ 'pageToken' ] = next_page_token response = await self . get_json ( url , params = params ) items . append ( response ) next_page_token = response . get ( 'nextPageToken' ) if not next_page_token : break return items
def check_config ( ) : configfile = ConfigFile ( ) global data if data . keys ( ) > 0 : print ( "gitberg config file exists" ) print ( "\twould you like to edit your gitberg config file?" ) else : print ( "No config found" ) print ( "\twould you like to create a gitberg config file?" ) answer = input ( " ) if not answer : answer = 'Y' if answer in 'Yy' : print ( "Running gitberg config generator ..." ) config_gen = ConfigGenerator ( current = data ) config_gen . ask ( ) data = config_gen . answers configfile . write ( ) print ( "Config written to {}" . format ( configfile . file_path ) )
async def main ( ) : async with aiohttp . ClientSession ( ) as session : data = Luftdaten ( SENSOR_ID , loop , session ) await data . get_data ( ) if not await data . validate_sensor ( ) : print ( "Station is not available:" , data . sensor_id ) return if data . values and data . meta : print ( "Sensor values:" , data . values ) print ( "Location:" , data . meta [ 'latitude' ] , data . meta [ 'longitude' ] )
async def list_instances ( self , project , page_size = 100 , instance_filter = None ) : url = ( f'{self.BASE_URL}{self.api_version}/projects/{project}' '/aggregated/instances' ) params = { 'maxResults' : page_size } if instance_filter : params [ 'filter' ] = instance_filter responses = await self . list_all ( url , params ) instances = self . _parse_rsps_for_instances ( responses ) return instances
def infer_alpha_chain ( beta ) : if beta . gene . startswith ( "DRB" ) : return AlleleName ( species = "HLA" , gene = "DRA1" , allele_family = "01" , allele_code = "01" ) elif beta . gene . startswith ( "DPB" ) : return AlleleName ( species = "HLA" , gene = "DPA1" , allele_family = "01" , allele_code = "03" ) elif beta . gene . startswith ( "DQB" ) : return AlleleName ( species = "HLA" , gene = "DQA1" , allele_family = "01" , allele_code = "02" ) return None
def create_ticket ( subject , tags , ticket_body , requester_email = None , custom_fields = [ ] ) : payload = { 'ticket' : { 'subject' : subject , 'comment' : { 'body' : ticket_body } , 'group_id' : settings . ZENDESK_GROUP_ID , 'tags' : tags , 'custom_fields' : custom_fields } } if requester_email : payload [ 'ticket' ] [ 'requester' ] = { 'name' : 'Sender: %s' % requester_email . split ( '@' ) [ 0 ] , 'email' : requester_email , } else : payload [ 'ticket' ] [ 'requester_id' ] = settings . ZENDESK_REQUESTER_ID requests . post ( get_ticket_endpoint ( ) , data = json . dumps ( payload ) , auth = zendesk_auth ( ) , headers = { 'content-type' : 'application/json' } ) . raise_for_status ( )
def message ( message , title = '' ) : return backend_api . opendialog ( "message" , dict ( message = message , title = title ) )
def ask_file ( message = 'Select file for open.' , default = '' , title = '' , save = False ) : return backend_api . opendialog ( "ask_file" , dict ( message = message , default = default , title = title , save = save ) )
def ask_folder ( message = 'Select folder.' , default = '' , title = '' ) : return backend_api . opendialog ( "ask_folder" , dict ( message = message , default = default , title = title ) )
def ask_ok_cancel ( message = '' , default = 0 , title = '' ) : return backend_api . opendialog ( "ask_ok_cancel" , dict ( message = message , default = default , title = title ) )
def ask_yes_no ( message = '' , default = 0 , title = '' ) : return backend_api . opendialog ( "ask_yes_no" , dict ( message = message , default = default , title = title ) )
def register ( self , receiver_id , receiver ) : assert receiver_id not in self . receivers self . receivers [ receiver_id ] = receiver ( receiver_id )
def get ( self , sched_rule_id ) : path = '/' . join ( [ 'schedulerule' , sched_rule_id ] ) return self . rachio . get ( path )
def parse ( self , message , schema ) : func = { 'audit-log' : self . _parse_audit_log_msg , 'event' : self . _parse_event_msg , } [ schema ] return func ( message )
def start ( self , zone_id , duration ) : path = 'zone/start' payload = { 'id' : zone_id , 'duration' : duration } return self . rachio . put ( path , payload )
def startMultiple ( self , zones ) : path = 'zone/start_multiple' payload = { 'zones' : zones } return self . rachio . put ( path , payload )
def get ( self , zone_id ) : path = '/' . join ( [ 'zone' , zone_id ] ) return self . rachio . get ( path )
def start ( self ) : zones = [ { "id" : data [ 0 ] , "duration" : data [ 1 ] , "sortOrder" : count } for ( count , data ) in enumerate ( self . _zones , 1 ) ] self . _api . startMultiple ( zones )
def clean_translation ( self ) : translation = self . cleaned_data [ 'translation' ] if self . instance and self . instance . content_object : obj = self . instance . content_object field = obj . _meta . get_field ( self . instance . field ) max_length = field . max_length if max_length and len ( translation ) > max_length : raise forms . ValidationError ( _ ( 'The entered translation is too long. You entered ' '%(entered)s chars, max length is %(maxlength)s' ) % { 'entered' : len ( translation ) , 'maxlength' : max_length , } ) else : raise forms . ValidationError ( _ ( 'Can not store translation. First create all translation' ' for this object' ) ) return translation
def _get_merge_rules ( properties , path = None ) : if path is None : path = ( ) for key , value in properties . items ( ) : new_path = path + ( key , ) types = _get_types ( value ) if value . get ( 'omitWhenMerged' ) or value . get ( 'mergeStrategy' ) == 'ocdsOmit' : yield ( new_path , { 'omitWhenMerged' } ) elif 'array' in types and ( value . get ( 'wholeListMerge' ) or value . get ( 'mergeStrategy' ) == 'ocdsVersion' ) : yield ( new_path , { 'wholeListMerge' } ) elif 'object' in types and 'properties' in value : yield from _get_merge_rules ( value [ 'properties' ] , path = new_path ) elif 'array' in types and 'items' in value : item_types = _get_types ( value [ 'items' ] ) if any ( item_type != 'object' for item_type in item_types ) : yield ( new_path , { 'wholeListMerge' } ) elif 'object' in item_types and 'properties' in value [ 'items' ] : if 'id' not in value [ 'items' ] [ 'properties' ] : yield ( new_path , { 'wholeListMerge' } ) else : yield from _get_merge_rules ( value [ 'items' ] [ 'properties' ] , path = new_path )
def get_merge_rules ( schema = None ) : schema = schema or get_release_schema_url ( get_tags ( ) [ - 1 ] ) if isinstance ( schema , dict ) : deref_schema = jsonref . JsonRef . replace_refs ( schema ) else : deref_schema = _get_merge_rules_from_url_or_path ( schema ) return dict ( _get_merge_rules ( deref_schema [ 'properties' ] ) )
def unflatten ( processed , merge_rules ) : unflattened = OrderedDict ( ) for key in processed : current_node = unflattened for end , part in enumerate ( key , 1 ) : if isinstance ( part , IdValue ) : for node in current_node : if isinstance ( node , IdDict ) and node . identifier == part . identifier : current_node = node break else : new_node = IdDict ( ) new_node . identifier = part . identifier if part . original_value is not None : new_node [ 'id' ] = part . original_value current_node . append ( new_node ) current_node = new_node continue node = current_node . get ( part ) if node is not None : current_node = node continue if len ( key ) == end : if processed [ key ] is not None : current_node [ part ] = processed [ key ] continue if isinstance ( key [ end ] , IdValue ) : new_node = [ ] else : new_node = OrderedDict ( ) current_node [ part ] = new_node current_node = new_node return unflattened
def merge ( releases , schema = None , merge_rules = None ) : if not merge_rules : merge_rules = get_merge_rules ( schema ) merged = OrderedDict ( { ( 'tag' , ) : [ 'compiled' ] } ) for release in sorted ( releases , key = lambda release : release [ 'date' ] ) : release = release . copy ( ) ocid = release [ 'ocid' ] date = release [ 'date' ] release . pop ( 'tag' , None ) flat = flatten ( release , merge_rules ) processed = process_flattened ( flat ) merged [ ( 'id' , ) ] = '{}-{}' . format ( ocid , date ) merged [ ( 'date' , ) ] = date merged [ ( 'ocid' , ) ] = ocid merged . update ( processed ) return unflatten ( merged , merge_rules )
def merge_versioned ( releases , schema = None , merge_rules = None ) : if not merge_rules : merge_rules = get_merge_rules ( schema ) merged = OrderedDict ( ) for release in sorted ( releases , key = lambda release : release [ 'date' ] ) : release = release . copy ( ) ocid = release . pop ( 'ocid' ) merged [ ( 'ocid' , ) ] = ocid releaseID = release [ 'id' ] date = release [ 'date' ] tag = release . pop ( 'tag' , None ) flat = flatten ( release , merge_rules ) processed = process_flattened ( flat ) for key , value in processed . items ( ) : if key in merged and value == merged [ key ] [ - 1 ] [ 'value' ] : continue if key not in merged : merged [ key ] = [ ] merged [ key ] . append ( OrderedDict ( [ ( 'releaseID' , releaseID ) , ( 'releaseDate' , date ) , ( 'releaseTag' , tag ) , ( 'value' , value ) , ] ) ) return unflatten ( merged , merge_rules )
def chunks ( items , size ) : return [ items [ i : i + size ] for i in range ( 0 , len ( items ) , size ) ]
def login ( self ) : _LOGGER . debug ( "Attempting to login to ZoneMinder" ) login_post = { 'view' : 'console' , 'action' : 'login' } if self . _username : login_post [ 'username' ] = self . _username if self . _password : login_post [ 'password' ] = self . _password req = requests . post ( urljoin ( self . _server_url , 'index.php' ) , data = login_post , verify = self . _verify_ssl ) self . _cookies = req . cookies req = requests . get ( urljoin ( self . _server_url , 'api/host/getVersion.json' ) , cookies = self . _cookies , timeout = ZoneMinder . DEFAULT_TIMEOUT , verify = self . _verify_ssl ) if not req . ok : _LOGGER . error ( "Connection error logging into ZoneMinder" ) return False return True
def _zm_request ( self , method , api_url , data = None , timeout = DEFAULT_TIMEOUT ) -> dict : try : for _ in range ( ZoneMinder . LOGIN_RETRIES ) : req = requests . request ( method , urljoin ( self . _server_url , api_url ) , data = data , cookies = self . _cookies , timeout = timeout , verify = self . _verify_ssl ) if not req . ok : self . login ( ) else : break else : _LOGGER . error ( 'Unable to get API response from ZoneMinder' ) try : return req . json ( ) except ValueError : _LOGGER . exception ( 'JSON decode exception caught while' 'attempting to decode "%s"' , req . text ) return { } except requests . exceptions . ConnectionError : _LOGGER . exception ( 'Unable to connect to ZoneMinder' ) return { }
def get_monitors ( self ) -> List [ Monitor ] : raw_monitors = self . _zm_request ( 'get' , ZoneMinder . MONITOR_URL ) if not raw_monitors : _LOGGER . warning ( "Could not fetch monitors from ZoneMinder" ) return [ ] monitors = [ ] for raw_result in raw_monitors [ 'monitors' ] : _LOGGER . debug ( "Initializing camera %s" , raw_result [ 'Monitor' ] [ 'Id' ] ) monitors . append ( Monitor ( self , raw_result ) ) return monitors
def get_run_states ( self ) -> List [ RunState ] : raw_states = self . get_state ( 'api/states.json' ) if not raw_states : _LOGGER . warning ( "Could not fetch runstates from ZoneMinder" ) return [ ] run_states = [ ] for i in raw_states [ 'states' ] : raw_state = i [ 'State' ] _LOGGER . info ( "Initializing runstate %s" , raw_state [ 'Id' ] ) run_states . append ( RunState ( self , raw_state ) ) return run_states
def get_active_state ( self ) -> Optional [ str ] : for state in self . get_run_states ( ) : if state . active : return state . name return None
def set_active_state ( self , state_name ) : _LOGGER . info ( 'Setting ZoneMinder run state to state %s' , state_name ) return self . _zm_request ( 'GET' , 'api/states/change/{}.json' . format ( state_name ) , timeout = 120 )
def is_available ( self ) -> bool : status_response = self . get_state ( 'api/host/daemonCheck.json' ) if not status_response : return False return status_response . get ( 'result' ) == 1
def _build_server_url ( server_host , server_path ) -> str : server_url = urljoin ( server_host , server_path ) if server_url [ - 1 ] == '/' : return server_url return '{}/' . format ( server_url )
def get ( self , flex_sched_rule_id ) : path = '/' . join ( [ 'flexschedulerule' , flex_sched_rule_id ] ) return self . rachio . get ( path )
def upload_all_books ( book_id_start , book_id_end , rdf_library = None ) : logger . info ( "starting a gitberg mass upload: {0} -> {1}" . format ( book_id_start , book_id_end ) ) for book_id in range ( int ( book_id_start ) , int ( book_id_end ) + 1 ) : cache = { } errors = 0 try : if int ( book_id ) in missing_pgid : print ( u'missing\t{}' . format ( book_id ) ) continue upload_book ( book_id , rdf_library = rdf_library , cache = cache ) except Exception as e : print ( u'error\t{}' . format ( book_id ) ) logger . error ( u"Error processing: {}\r{}" . format ( book_id , e ) ) errors += 1 if errors > 10 : print ( 'error limit reached!' ) break
def upload_list ( book_id_list , rdf_library = None ) : with open ( book_id_list , 'r' ) as f : cache = { } for book_id in f : book_id = book_id . strip ( ) try : if int ( book_id ) in missing_pgid : print ( u'missing\t{}' . format ( book_id ) ) continue upload_book ( book_id , rdf_library = rdf_library , cache = cache ) except Exception as e : print ( u'error\t{}' . format ( book_id ) ) logger . error ( u"Error processing: {}\r{}" . format ( book_id , e ) )
def translate ( self ) : translations = [ ] for lang in settings . LANGUAGES : if lang [ 0 ] == self . _get_default_language ( ) : continue if self . translatable_slug is not None : if self . translatable_slug not in self . translatable_fields : self . translatable_fields = self . translatable_fields + ( self . translatable_slug , ) for field in self . translatable_fields : trans , created = Translation . objects . get_or_create ( object_id = self . id , content_type = ContentType . objects . get_for_model ( self ) , field = field , lang = lang [ 0 ] , ) translations . append ( trans ) return translations
def translations_objects ( self , lang ) : return Translation . objects . filter ( object_id = self . id , content_type = ContentType . objects . get_for_model ( self ) , lang = lang )
def translations ( self , lang ) : key = self . _get_translations_cache_key ( lang ) trans_dict = cache . get ( key , { } ) if self . translatable_slug is not None : if self . translatable_slug not in self . translatable_fields : self . translatable_fields = self . translatable_fields + ( self . translatable_slug , ) if not trans_dict : for field in self . translatable_fields : trans_dict [ field ] = self . get_translation ( lang , field ) cache . set ( key , trans_dict ) return trans_dict
def get_translation_obj ( self , lang , field , create = False ) : trans = None try : trans = Translation . objects . get ( object_id = self . id , content_type = ContentType . objects . get_for_model ( self ) , lang = lang , field = field , ) except Translation . DoesNotExist : if create : trans = Translation . objects . create ( object_id = self . id , content_type = ContentType . objects . get_for_model ( self ) , lang = lang , field = field , ) return trans
def get_translation ( self , lang , field ) : key = self . _get_translation_cache_key ( lang , field ) trans = cache . get ( key , '' ) if not trans : trans_obj = self . get_translation_obj ( lang , field ) trans = getattr ( trans_obj , 'translation' , '' ) if not trans : trans = getattr ( self , field , '' ) cache . set ( key , trans ) return trans
def set_translation ( self , lang , field , text ) : auto_slug_obj = None if lang == self . _get_default_language ( ) : raise CanNotTranslate ( _ ( 'You are not supposed to translate the default language. ' 'Use the model fields for translations in default language' ) ) trans_obj = self . get_translation_obj ( lang , field , create = True ) trans_obj . translation = text trans_obj . save ( ) if INSTALLED_AUTOSLUG : if self . translatable_slug : try : auto_slug_obj = self . _meta . get_field ( self . translatable_slug ) . populate_from except AttributeError : pass if auto_slug_obj : tobj = self . get_translation_obj ( lang , self . translatable_slug , create = True ) translation = self . get_translation ( lang , auto_slug_obj ) tobj . translation = slugify ( translation ) tobj . save ( ) key = self . _get_translation_cache_key ( lang , field ) cache . set ( key , text ) cache . delete ( self . _get_translations_cache_key ( lang ) ) return trans_obj
def translations_link ( self ) : translation_type = ContentType . objects . get_for_model ( Translation ) link = urlresolvers . reverse ( 'admin:%s_%s_changelist' % ( translation_type . app_label , translation_type . model ) , ) object_type = ContentType . objects . get_for_model ( self ) link += '?content_type__id__exact=%s&object_id=%s' % ( object_type . id , self . id ) return '<a href="%s">translate</a>' % link
def comparison_callback ( sender , instance , ** kwargs ) : if validate_instance ( instance ) and settings . AUTOMATED_LOGGING [ 'to_database' ] : try : old = sender . objects . get ( pk = instance . pk ) except Exception : return None try : mdl = ContentType . objects . get_for_model ( instance ) cur , ins = old . __dict__ , instance . __dict__ old , new = { } , { } for k in cur . keys ( ) : if re . match ( '(_)(.*?)' , k ) : continue changed = False if k in ins . keys ( ) : if cur [ k ] != ins [ k ] : changed = True new [ k ] = ModelObject ( ) new [ k ] . value = str ( ins [ k ] ) new [ k ] . save ( ) try : new [ k ] . type = ContentType . objects . get_for_model ( ins [ k ] ) except Exception : logger = logging . getLogger ( __name__ ) logger . debug ( 'Could not dermin the content type of the field' ) new [ k ] . field = Field . objects . get_or_create ( name = k , model = mdl ) [ 0 ] new [ k ] . save ( ) else : changed = True if changed : old [ k ] = ModelObject ( ) old [ k ] . value = str ( cur [ k ] ) old [ k ] . save ( ) try : old [ k ] . type = ContentType . objects . get_for_model ( cur [ k ] ) except Exception : logger = logging . getLogger ( __name__ ) logger . debug ( 'Could not dermin the content type of the field' ) old [ k ] . field = Field . objects . get_or_create ( name = k , model = mdl ) [ 0 ] old [ k ] . save ( ) if old or new : changelog = ModelChangelog ( ) changelog . save ( ) changelog . modification = ModelModification ( ) changelog . modification . save ( ) changelog . modification . previously . add ( * old . values ( ) ) changelog . modification . currently . add ( * new . values ( ) ) changelog . information = ModelObject ( ) changelog . information . save ( ) changelog . information . value = repr ( instance ) changelog . information . type = ContentType . objects . get_for_model ( instance ) changelog . information . save ( ) changelog . save ( ) instance . al_chl = changelog return instance except Exception as e : print ( e ) logger = logging . getLogger ( __name__ ) logger . warning ( 'automated_logging recorded an exception that should not have happended' )
def save_callback ( sender , instance , created , update_fields , ** kwargs ) : if validate_instance ( instance ) : status = 'add' if created is True else 'change' change = '' if status == 'change' and 'al_chl' in instance . __dict__ . keys ( ) : changelog = instance . al_chl . modification change = ' to following changed: {}' . format ( changelog ) processor ( status , sender , instance , update_fields , addition = change )
def requires_refcount ( cls , func ) : @ functools . wraps ( func ) def requires_active_handle ( * args , ** kwargs ) : if cls . refcount ( ) == 0 : raise NoHandleException ( ) return func ( * args , ** kwargs ) return requires_active_handle
def auto ( cls , func ) : @ functools . wraps ( func ) def auto_claim_handle ( * args , ** kwargs ) : with cls ( ) : return func ( * args , ** kwargs ) return auto_claim_handle
def get_gpubsub_publisher ( config , metrics , changes_channel , ** kw ) : builder = gpubsub_publisher . GPubsubPublisherBuilder ( config , metrics , changes_channel , ** kw ) return builder . build_publisher ( )
def get_reconciler ( config , metrics , rrset_channel , changes_channel , ** kw ) : builder = reconciler . GDNSReconcilerBuilder ( config , metrics , rrset_channel , changes_channel , ** kw ) return builder . build_reconciler ( )
def get_authority ( config , metrics , rrset_channel , ** kwargs ) : builder = authority . GCEAuthorityBuilder ( config , metrics , rrset_channel , ** kwargs ) return builder . build_authority ( )
async def refresh_token ( self ) : url , headers , body = self . _setup_token_request ( ) request_id = uuid . uuid4 ( ) logging . debug ( _utils . REQ_LOG_FMT . format ( request_id = request_id , method = 'POST' , url = url , kwargs = None ) ) async with self . _session . post ( url , headers = headers , data = body ) as resp : log_kw = { 'request_id' : request_id , 'method' : 'POST' , 'url' : resp . url , 'status' : resp . status , 'reason' : resp . reason , } logging . debug ( _utils . RESP_LOG_FMT . format ( ** log_kw ) ) try : resp . raise_for_status ( ) except aiohttp . ClientResponseError as e : msg = f'[{request_id}] Issue connecting to {resp.url}: {e}' logging . error ( msg , exc_info = e ) raise exceptions . GCPHTTPResponseError ( msg , resp . status ) response = await resp . json ( ) try : self . token = response [ 'access_token' ] except KeyError : msg = '[{request_id}] No access token in response.' logging . error ( msg ) raise exceptions . GCPAuthError ( msg ) self . expiry = _client . _parse_expiry ( response )
def get ( self , dev_id ) : path = '/' . join ( [ 'device' , dev_id ] ) return self . rachio . get ( path )
def getEvent ( self , dev_id , starttime , endtime ) : path = 'device/%s/event?startTime=%s&endTime=%s' % ( dev_id , starttime , endtime ) return self . rachio . get ( path )
def getForecast ( self , dev_id , units ) : assert units in [ 'US' , 'METRIC' ] , 'units must be either US or METRIC' path = 'device/%s/forecast?units=%s' % ( dev_id , units ) return self . rachio . get ( path )
def stopWater ( self , dev_id ) : path = 'device/stop_water' payload = { 'id' : dev_id } return self . rachio . put ( path , payload )
def rainDelay ( self , dev_id , duration ) : path = 'device/rain_delay' payload = { 'id' : dev_id , 'duration' : duration } return self . rachio . put ( path , payload )
def on ( self , dev_id ) : path = 'device/on' payload = { 'id' : dev_id } return self . rachio . put ( path , payload )
def off ( self , dev_id ) : path = 'device/off' payload = { 'id' : dev_id } return self . rachio . put ( path , payload )
def create_wallet ( self , master_secret = b"" ) : master_secret = deserialize . bytes_str ( master_secret ) bip32node = control . create_wallet ( self . testnet , master_secret = master_secret ) return bip32node . hwif ( as_private = True )
def create_key ( self , master_secret = b"" ) : master_secret = deserialize . bytes_str ( master_secret ) bip32node = control . create_wallet ( self . testnet , master_secret = master_secret ) return bip32node . wif ( )
def confirms ( self , txid ) : txid = deserialize . txid ( txid ) return self . service . confirms ( txid )
def get_time_period ( value ) : for time_period in TimePeriod : if time_period . period == value : return time_period raise ValueError ( '{} is not a valid TimePeriod' . format ( value ) )
def update_monitor ( self ) : result = self . _client . get_state ( self . _monitor_url ) self . _raw_result = result [ 'monitor' ]
def function ( self , new_function ) : self . _client . change_state ( self . _monitor_url , { 'Monitor[Function]' : new_function . value } )
def is_recording ( self ) -> Optional [ bool ] : status_response = self . _client . get_state ( 'api/monitors/alarm/id:{}/command:status.json' . format ( self . _monitor_id ) ) if not status_response : _LOGGER . warning ( 'Could not get status for monitor {}' . format ( self . _monitor_id ) ) return None status = status_response . get ( 'status' ) if status == '' : return False return int ( status ) == STATE_ALARM
def is_available ( self ) -> bool : status_response = self . _client . get_state ( 'api/monitors/daemonStatus/id:{}/daemon:zmc.json' . format ( self . _monitor_id ) ) if not status_response : _LOGGER . warning ( 'Could not get availability for monitor {}' . format ( self . _monitor_id ) ) return False monitor_status = self . _raw_result . get ( 'Monitor_Status' , None ) capture_fps = monitor_status and monitor_status [ 'CaptureFPS' ] return status_response . get ( 'status' , False ) and capture_fps != "0.00"
def get_events ( self , time_period , include_archived = False ) -> Optional [ int ] : date_filter = '1%20{}' . format ( time_period . period ) if time_period == TimePeriod . ALL : date_filter = '100%20year' archived_filter = '/Archived=:0' if include_archived : archived_filter = '' event = self . _client . get_state ( 'api/events/consoleEvents/{}{}.json' . format ( date_filter , archived_filter ) ) try : events_by_monitor = event [ 'results' ] if isinstance ( events_by_monitor , list ) : return 0 return events_by_monitor . get ( str ( self . _monitor_id ) , 0 ) except ( TypeError , KeyError , AttributeError ) : return None
def _build_image_url ( self , monitor , mode ) -> str : query = urlencode ( { 'mode' : mode , 'buffer' : monitor [ 'StreamReplayBuffer' ] , 'monitor' : monitor [ 'Id' ] , } ) url = '{zms_url}?{query}' . format ( zms_url = self . _client . get_zms_url ( ) , query = query ) _LOGGER . debug ( 'Monitor %s %s URL (without auth): %s' , monitor [ 'Id' ] , mode , url ) return self . _client . get_url_with_auth ( url )