idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
243,000
def on_excepthandler ( self , node ) : return ( self . run ( node . type ) , node . name , node . body )
Exception handler ...
243,001
def on_call ( self , node ) : func = self . run ( node . func ) if not hasattr ( func , '__call__' ) and not isinstance ( func , type ) : msg = "'%s' is not callable!!" % ( func ) self . raise_exception ( node , exc = TypeError , msg = msg ) args = [ self . run ( targ ) for targ in node . args ] starargs = getattr ( no...
Function execution .
243,002
def on_functiondef ( self , node ) : if node . decorator_list : raise Warning ( "decorated procedures not supported!" ) kwargs = [ ] if not valid_symbol_name ( node . name ) or node . name in self . readonly_symbols : errmsg = "invalid function name (reserved word?) %s" % node . name self . raise_exception ( node , exc...
Define procedures .
243,003
def safe_pow ( base , exp ) : if exp > MAX_EXPONENT : raise RuntimeError ( "Invalid exponent, max exponent is {}" . format ( MAX_EXPONENT ) ) return base ** exp
safe version of pow
243,004
def safe_mult ( a , b ) : if isinstance ( a , str ) and isinstance ( b , int ) and len ( a ) * b > MAX_STR_LEN : raise RuntimeError ( "String length exceeded, max string length is {}" . format ( MAX_STR_LEN ) ) return a * b
safe version of multiply
243,005
def safe_add ( a , b ) : if isinstance ( a , str ) and isinstance ( b , str ) and len ( a ) + len ( b ) > MAX_STR_LEN : raise RuntimeError ( "String length exceeded, max string length is {}" . format ( MAX_STR_LEN ) ) return a + b
safe version of add
243,006
def safe_lshift ( a , b ) : if b > MAX_SHIFT : raise RuntimeError ( "Invalid left shift, max left shift is {}" . format ( MAX_SHIFT ) ) return a << b
safe version of lshift
243,007
def valid_symbol_name ( name ) : if name in RESERVED_WORDS : return False gen = generate_tokens ( io . BytesIO ( name . encode ( 'utf-8' ) ) . readline ) typ , _ , start , end , _ = next ( gen ) if typ == tk_ENCODING : typ , _ , start , end , _ = next ( gen ) return typ == tk_NAME and start == ( 1 , 0 ) and end == ( 1 ...
Determine whether the input symbol name is a valid name .
243,008
def make_symbol_table ( use_numpy = True , ** kws ) : symtable = { } for sym in FROM_PY : if sym in builtins : symtable [ sym ] = builtins [ sym ] for sym in FROM_MATH : if hasattr ( math , sym ) : symtable [ sym ] = getattr ( math , sym ) if HAS_NUMPY and use_numpy : for sym in FROM_NUMPY : if hasattr ( numpy , sym ) ...
Create a default symboltable taking dict of user - defined symbols .
243,009
def get_error ( self ) : col_offset = - 1 if self . node is not None : try : col_offset = self . node . col_offset except AttributeError : pass try : exc_name = self . exc . __name__ except AttributeError : exc_name = str ( self . exc ) if exc_name in ( None , 'None' ) : exc_name = 'UnknownError' out = [ " %s" % self...
Retrieve error data .
243,010
def add_config_path ( self , path ) : abspath = util . abs_pathify ( path ) if abspath not in self . _config_paths : log . info ( "Adding {0} to paths to search" . format ( abspath ) ) self . _config_paths . append ( abspath )
Add a path for Vyper to search for the config file in . Can be called multiple times to define multiple search paths .
243,011
def sub ( self , key ) : subv = Vyper ( ) data = self . get ( key ) if isinstance ( data , dict ) : subv . _config = data return subv else : return None
Returns new Vyper instance representing a sub tree of this instance .
243,012
def unmarshall_key ( self , key , cls ) : return setattr ( cls , key , self . get ( key ) )
Takes a single key and unmarshalls it into a class .
243,013
def unmarshall ( self , cls ) : for k , v in self . all_settings ( ) . items ( ) : setattr ( cls , k , v ) return cls
Unmarshalls the config into a class . Make sure that the tags on the attributes of the class are properly set .
243,014
def bind_env ( self , * input_ ) : if len ( input_ ) == 0 : return "bind_env missing key to bind to" key = input_ [ 0 ] . lower ( ) if len ( input_ ) == 1 : env_key = self . _merge_with_env_prefix ( key ) else : env_key = input_ [ 1 ] self . _env [ key ] = env_key if self . _key_delimiter in key : parts = input_ [ 0 ] ...
Binds a Vyper key to a ENV variable . ENV variables are case sensitive . If only a key is provided it will use the env key matching the key uppercased . env_prefix will be used when set when env name is not provided .
243,015
def is_set ( self , key ) : path = key . split ( self . _key_delimiter ) lower_case_key = key . lower ( ) val = self . _find ( lower_case_key ) if val is None : source = self . _find ( path [ 0 ] . lower ( ) ) if source is not None and isinstance ( source , dict ) : val = self . _search_dict ( source , path [ 1 : : ] )...
Check to see if the key has been set in any of the data locations .
243,016
def register_alias ( self , alias , key ) : alias = alias . lower ( ) key = key . lower ( ) if alias != key and alias != self . _real_key ( key ) : exists = self . _aliases . get ( alias ) if exists is None : val = self . _config . get ( alias ) if val : self . _config . pop ( alias ) self . _config [ key ] = val val =...
Aliases provide another accessor for the same key . This enables one to change a name without breaking the application .
243,017
def set_default ( self , key , value ) : k = self . _real_key ( key . lower ( ) ) self . _defaults [ k ] = value
Set the default value for this key . Default only used when no value is provided by the user via arg config or env .
243,018
def _unmarshall_reader ( self , file_ , d ) : return util . unmarshall_config_reader ( file_ , d , self . _get_config_type ( ) )
Unmarshall a file into a dict .
243,019
def _get_key_value_config ( self ) : for rp in self . _remote_providers : val = self . _get_remote_config ( rp ) self . _kvstore = val return None raise errors . RemoteConfigError ( "No Files Found" )
Retrieves the first found remote configuration .
243,020
def all_keys ( self , uppercase_keys = False ) : d = { } for k in self . _override . keys ( ) : d [ k . upper ( ) if uppercase_keys else k . lower ( ) ] = { } for k in self . _args . keys ( ) : d [ k . upper ( ) if uppercase_keys else k . lower ( ) ] = { } for k in self . _env . keys ( ) : d [ k . upper ( ) if uppercas...
Return all keys regardless where they are set .
243,021
def all_settings ( self , uppercase_keys = False ) : d = { } for k in self . all_keys ( uppercase_keys ) : d [ k ] = self . get ( k ) return d
Return all settings as a dict .
243,022
def debug ( self ) : print ( "Aliases:" ) pprint . pprint ( self . _aliases ) print ( "Override:" ) pprint . pprint ( self . _override ) print ( "Args:" ) pprint . pprint ( self . _args ) print ( "Env:" ) pprint . pprint ( self . _env ) print ( "Config:" ) pprint . pprint ( self . _config ) print ( "Key/Value Store:" )...
Prints all configuration registries for debugging purposes .
243,023
def server ( ** kwargs ) : start_server ( ** { k : v for k , v in kwargs . items ( ) if v } , blocking = True )
Starts the Clearly Server .
243,024
def start_server ( broker , backend = None , port = 12223 , max_tasks = 10000 , max_workers = 100 , blocking = False , debug = False ) : _setup_logging ( debug ) queue_listener_dispatcher = Queue ( ) listener = EventListener ( broker , queue_listener_dispatcher , backend = backend , max_tasks_in_memory = max_tasks , ma...
Starts a Clearly Server programmatically .
243,025
def _event_to_pb ( event ) : if isinstance ( event , ( TaskData , Task ) ) : key , klass = 'task' , clearly_pb2 . TaskMessage elif isinstance ( event , ( WorkerData , Worker ) ) : key , klass = 'worker' , clearly_pb2 . WorkerMessage else : raise ValueError ( 'unknown event' ) keys = klass . DESCRIPTOR . fields_by_name ...
Supports converting internal TaskData and WorkerData as well as celery Task and Worker to proto buffers messages .
243,026
def filter_tasks ( self , request , context ) : _log_request ( request , context ) tasks_pattern , tasks_negate = PATTERN_PARAMS_OP ( request . tasks_filter ) state_pattern = request . state_pattern limit , reverse = request . limit , request . reverse pregex = re . compile ( tasks_pattern ) sregex = re . compile ( sta...
Filter tasks by matching patterns to name routing key and state .
243,027
def filter_workers ( self , request , context ) : _log_request ( request , context ) workers_pattern , workers_negate = PATTERN_PARAMS_OP ( request . workers_filter ) hregex = re . compile ( workers_pattern ) def hcondition ( worker ) : return accepts ( hregex , workers_negate , worker . hostname ) found_workers = ( wo...
Filter workers by matching a pattern to hostname .
243,028
def seen_tasks ( self , request , context ) : _log_request ( request , context ) result = clearly_pb2 . SeenTasksMessage ( ) result . task_types . extend ( self . listener . memory . task_types ( ) ) return result
Returns all seen task types .
243,029
def reset_tasks ( self , request , context ) : _log_request ( request , context ) self . listener . memory . clear_tasks ( ) return clearly_pb2 . Empty ( )
Resets all captured tasks .
243,030
def get_stats ( self , request , context ) : _log_request ( request , context ) m = self . listener . memory return clearly_pb2 . StatsMessage ( task_count = m . task_count , event_count = m . event_count , len_tasks = len ( m . tasks ) , len_workers = len ( m . workers ) )
Returns the server statistics .
243,031
def accepts ( regex , negate , * values ) : return any ( v and regex . search ( v ) for v in values ) != negate
Given a compiled regex and a negate find if any of the values match .
243,032
def copy_update ( pb_message , ** kwds ) : result = pb_message . __class__ ( ) result . CopyFrom ( pb_message ) for k , v in kwds . items ( ) : setattr ( result , k , v ) return result
Returns a copy of the PB object with some fields updated .
243,033
def __start ( self ) : assert not self . dispatcher_thread self . dispatcher_thread = threading . Thread ( target = self . __run_dispatcher , name = 'clearly-dispatcher' ) self . dispatcher_thread . daemon = True self . running = True self . dispatcher_thread . start ( )
Starts the real - time engine that captures tasks .
243,034
def streaming_client ( self , tasks_regex , tasks_negate , workers_regex , workers_negate ) : cc = CapturingClient ( Queue ( ) , re . compile ( tasks_regex ) , tasks_negate , re . compile ( workers_regex ) , workers_negate ) self . observers . append ( cc ) yield cc . queue self . observers . remove ( cc )
Connects a client to the streaming capture filtering the events that are sent to it .
243,035
def __start ( self ) : assert not self . _listener_thread self . _listener_thread = threading . Thread ( target = self . __run_listener , name = 'clearly-listener' ) self . _listener_thread . daemon = True self . _listener_thread . start ( ) self . _wait_event . wait ( ) self . _wait_event . clear ( )
Starts the real - time engine that captures events .
243,036
def capture ( self , pattern = None , negate = False , workers = None , negate_workers = False , params = None , success = False , error = True , stats = False ) : request = clearly_pb2 . CaptureRequest ( tasks_capture = clearly_pb2 . PatternFilter ( pattern = pattern or '.' , negate = negate ) , workers_capture = clea...
Starts capturing selected events in real - time . You can filter exactly what you want to see as the Clearly Server handles all tasks and workers updates being sent to celery . Several clients can see different sets of events at the same time .
243,037
def tasks ( self , pattern = None , negate = False , state = None , limit = None , reverse = True , params = None , success = False , error = True ) : request = clearly_pb2 . FilterTasksRequest ( tasks_filter = clearly_pb2 . PatternFilter ( pattern = pattern or '.' , negate = negate ) , state_pattern = state or '.' , l...
Filters stored tasks and displays their current statuses .
243,038
def seen_tasks ( self ) : print ( '\n' . join ( self . _stub . seen_tasks ( clearly_pb2 . Empty ( ) ) . task_types ) )
Shows a list of seen task types .
243,039
def detail_action ( ** kwargs ) : def decorator ( func ) : func . action = True func . detail = True func . kwargs = kwargs return func return decorator
Used to mark a method on a ResourceBinding that should be routed for detail actions .
243,040
def list_action ( ** kwargs ) : def decorator ( func ) : func . action = True func . detail = False func . kwargs = kwargs return func return decorator
Used to mark a method on a ResourceBinding that should be routed for list actions .
243,041
def broadcast_to ( array , shape , subok = False ) : return _broadcast_to ( array , shape , subok = subok , readonly = True )
Broadcast an array to a new shape .
243,042
def _K ( m ) : M = m * ( m - 1 ) // 2 K = np . zeros ( ( M , m ** 2 ) , dtype = np . int64 ) row = 0 for j in range ( 1 , m ) : col = ( j - 1 ) * m + j s = m - j K [ row : ( row + s ) , col : ( col + s ) ] = np . eye ( s ) row += s return K
matrix K_m from Wiktorsson2001
243,043
def wait_for_compactions ( self , timeout = 600 ) : for node in list ( self . nodes . values ( ) ) : if node . is_running ( ) : node . wait_for_compactions ( timeout ) return self
Wait for all compactions to finish on all nodes .
243,044
def watch_log_for_alive ( self , nodes , from_mark = None , timeout = 720 , filename = 'system.log' ) : super ( DseNode , self ) . watch_log_for_alive ( nodes , from_mark = from_mark , timeout = timeout , filename = filename )
Watch the log of this node until it detects that the provided other nodes are marked UP . This method works similarly to watch_log_for_death .
243,045
def load ( path , name , cluster ) : node_path = os . path . join ( path , name ) filename = os . path . join ( node_path , 'node.conf' ) with open ( filename , 'r' ) as f : data = yaml . safe_load ( f ) try : itf = data [ 'interfaces' ] initial_token = None if 'initial_token' in data : initial_token = data [ 'initial_...
Load a node from from the path on disk to the config files the node name and the cluster the node is part of .
243,046
def get_install_dir ( self ) : if self . __install_dir is None : return self . cluster . get_install_dir ( ) else : common . validate_install_dir ( self . __install_dir ) return self . __install_dir
Returns the path to the cassandra source directory used by this node .
243,047
def set_install_dir ( self , install_dir = None , version = None , verbose = False ) : if version is None : self . __install_dir = install_dir if install_dir is not None : common . validate_install_dir ( install_dir ) else : self . __install_dir = self . node_setup ( version , verbose = verbose ) self . _cassandra_vers...
Sets the path to the cassandra source directory for use by this node .
243,048
def show ( self , only_status = False , show_cluster = True ) : self . __update_status ( ) indent = '' . join ( [ " " for i in xrange ( 0 , len ( self . name ) + 2 ) ] ) print_ ( "{}: {}" . format ( self . name , self . __get_status_string ( ) ) ) if not only_status : if show_cluster : print_ ( "{}{}={}" . format ( ind...
Print infos on this node configuration .
243,049
def is_running ( self ) : self . __update_status ( ) return self . status == Status . UP or self . status == Status . DECOMMISSIONED
Return true if the node is running
243,050
def grep_log ( self , expr , filename = 'system.log' , from_mark = None ) : matchings = [ ] pattern = re . compile ( expr ) with open ( os . path . join ( self . get_path ( ) , 'logs' , filename ) ) as f : if from_mark : f . seek ( from_mark ) for line in f : m = pattern . search ( line ) if m : matchings . append ( ( ...
Returns a list of lines matching the regular expression in parameter in the Cassandra log of this node
243,051
def wait_for_binary_interface ( self , ** kwargs ) : if self . cluster . version ( ) >= '1.2' : self . watch_log_for ( "Starting listening for CQL clients" , ** kwargs ) binary_itf = self . network_interfaces [ 'binary' ] if not common . check_socket_listening ( binary_itf , timeout = 30 ) : warnings . warn ( "Binary i...
Waits for the Binary CQL interface to be listening . If > 1 . 2 will check log for Starting listening for CQL clients before checking for the interface to be listening .
243,052
def wait_for_thrift_interface ( self , ** kwargs ) : if self . cluster . version ( ) >= '4' : return self . watch_log_for ( "Listening for thrift clients..." , ** kwargs ) thrift_itf = self . network_interfaces [ 'thrift' ] if not common . check_socket_listening ( thrift_itf , timeout = 30 ) : warnings . warn ( "Thrift...
Waits for the Thrift interface to be listening .
243,053
def wait_for_compactions ( self , timeout = 120 ) : pattern = re . compile ( "pending tasks: 0" ) start = time . time ( ) while time . time ( ) - start < timeout : output , err , rc = self . nodetool ( "compactionstats" ) if pattern . search ( output ) : return time . sleep ( 1 ) raise TimeoutError ( "{} [{}] Compactio...
Wait for all compactions to finish on this node .
243,054
def update_startup_byteman_script ( self , byteman_startup_script ) : if self . byteman_port == '0' : raise common . LoadError ( 'Byteman is not installed' ) self . byteman_startup_script = byteman_startup_script self . import_config_files ( )
Update the byteman startup script i . e . rule injected before the node starts .
243,055
def _find_cmd ( self , cmd ) : cdir = self . get_install_cassandra_root ( ) if self . get_base_cassandra_version ( ) >= 2.1 : fcmd = common . join_bin ( cdir , os . path . join ( 'tools' , 'bin' ) , cmd ) else : fcmd = common . join_bin ( cdir , 'bin' , cmd ) try : if os . path . exists ( fcmd ) : os . chmod ( fcmd , s...
Locates command under cassandra root and fixes permissions if needed
243,056
def data_size ( self , live_data = None ) : if live_data is not None : warnings . warn ( "The 'live_data' keyword argument is deprecated." , DeprecationWarning ) output = self . nodetool ( 'info' ) [ 0 ] return _get_load_from_info_output ( output )
Uses nodetool info to get the size of a node s data in KB .
243,057
def get_sstable_data_files ( self , ks , table ) : p = self . get_sstable_data_files_process ( ks = ks , table = table ) out , _ , _ = handle_external_tool_process ( p , [ "sstableutil" , '--type' , 'final' , ks , table ] ) return sorted ( filter ( lambda s : s . endswith ( '-Data.db' ) , out . splitlines ( ) ) )
Read sstable data files by using sstableutil so we ignore temporary files
243,058
def is_modern_windows_install ( version ) : version = LooseVersion ( str ( version ) ) if is_win ( ) and version >= LooseVersion ( '2.1' ) : return True else : return False
The 2 . 1 release line was when Cassandra received beta windows support . Many features are gated based on that added compatibility .
243,059
def get_jdk_version ( ) : try : version = subprocess . check_output ( [ 'java' , '-version' ] , stderr = subprocess . STDOUT ) except OSError : print_ ( "ERROR: Could not find java. Is it in your path?" ) exit ( 1 ) return _get_jdk_version ( version )
Retrieve the Java version as reported in the quoted string returned by invoking java - version .
243,060
def wait_for_any_log ( nodes , pattern , timeout , filename = 'system.log' , marks = None ) : if marks is None : marks = { } for _ in range ( timeout ) : for node in nodes : found = node . grep_log ( pattern , filename = filename , from_mark = marks . get ( node , None ) ) if found : return node time . sleep ( 1 ) rais...
Look for a pattern in the system . log of any in a given list of nodes .
243,061
def download_version ( version , url = None , verbose = False , binary = False ) : assert_jdk_valid_for_cassandra_version ( version ) archive_url = ARCHIVE if CCM_CONFIG . has_option ( 'repositories' , 'cassandra' ) : archive_url = CCM_CONFIG . get ( 'repositories' , 'cassandra' ) if binary : archive_url = "%s/%s/apach...
Download extract and build Cassandra tarball .
243,062
def get_tagged_version_numbers ( series = 'stable' ) : releases = [ ] if series == 'testing' : tag_regex = re . compile ( '^refs/tags/cassandra-([0-9]+\.[0-9]+\.[0-9]+-.*$)' ) else : tag_regex = re . compile ( '^refs/tags/cassandra-([0-9]+\.[0-9]+\.[0-9]+$)' ) tag_url = urllib . request . urlopen ( GITHUB_TAGS ) for re...
Retrieve git tags and find version numbers for a release series
243,063
def __connect ( host , port , username , password , private_key ) : ssh = paramiko . SSHClient ( ) ssh . set_missing_host_key_policy ( paramiko . AutoAddPolicy ( ) ) if private_key is not None and password is not None : private_key = paramiko . RSAKey . from_private_key_file ( private_key , password ) elif private_key ...
Establish remote connection
243,064
def execute_ccm_command ( self , ccm_args , is_displayed = True ) : return self . execute ( [ "ccm" ] + ccm_args , profile = self . profile )
Execute a CCM command on the remote server
243,065
def execute_python_script ( self , script ) : file_handle , filename = tempfile . mkstemp ( ) temp_file = os . fdopen ( file_handle , "wt" ) temp_file . write ( script ) temp_file . close ( ) self . put ( filename , "python_execute.py" ) command = [ "python" , "python_execute.py" ] output = self . execute ( command , F...
Execute a python script of the remote server
243,066
def __put_dir ( self , ftp , local_path , remote_path = None ) : if remote_path is None : remote_path = os . path . basename ( local_path ) remote_path += self . separator for current_path , directories , files in os . walk ( local_path ) : try : ftp . listdir ( remote_path ) except IOError : ftp . mkdir ( remote_path ...
Helper function to perform copy operation to remote server
243,067
def remove ( self , remote_path ) : ftp = self . ssh . open_sftp ( ) if stat . S_ISDIR ( ftp . stat ( remote_path ) . st_mode ) : self . __remove_dir ( ftp , remote_path ) else : ftp . remove ( remote_path ) ftp . close ( )
Delete a file or directory recursively on the remote server
243,068
def __remove_dir ( self , ftp , remote_path ) : files = ftp . listdir ( remote_path ) for filename in files : path = remote_path + self . separator + filename try : ftp . remove ( path ) except IOError : self . __remove_dir ( ftp , path ) ftp . rmdir ( remote_path )
Helper function to perform delete operation on the remote server
243,069
def usage ( self ) : usage = self . parser . format_help ( ) . split ( "optional arguments:" ) [ 1 ] return "Remote Options:" + os . linesep + os . linesep . join ( [ s for s in usage . splitlines ( ) if s ] )
Get the usage for the remote exectuion options
243,070
def jwt_required ( realm = None ) : def wrapper ( fn ) : @ wraps ( fn ) def decorator ( * args , ** kwargs ) : _jwt_required ( realm or current_app . config [ 'JWT_DEFAULT_REALM' ] ) return fn ( * args , ** kwargs ) return decorator return wrapper
View decorator that requires a valid JWT token to be present in the request
243,071
def auth_request_handler ( self , callback ) : warnings . warn ( "This handler is deprecated. The recommended approach to have control over " "the authentication resource is to disable the built-in resource by " "setting JWT_AUTH_URL_RULE=None and registering your own authentication " "resource directly on your applic...
Specifies the authentication response handler function .
243,072
def _svg_path ( self , pathcodes , data ) : def gen_path_elements ( pathcodes , data ) : counts = { 'M' : 1 , 'L' : 1 , 'C' : 3 , 'Z' : 0 } it = iter ( data ) for code in pathcodes : yield code for _ in range ( counts [ code ] ) : p = next ( it ) yield str ( p [ 0 ] ) yield str ( p [ 1 ] ) return ' ' . join ( gen_path_...
Return the SVG path s d element .
243,073
def fig_to_html ( fig = None , template = 'base.html' , tiles = None , crs = None , epsg = None , embed_links = False , float_precision = 6 ) : if tiles is None : tiles = maptiles . osm elif isinstance ( tiles , six . string_types ) : if tiles not in maptiles . tiles : raise ValueError ( 'Unknown tile source "{}"' . fo...
Convert a Matplotlib Figure to a Leaflet map
243,074
def fig_to_geojson ( fig = None , ** kwargs ) : if fig is None : fig = plt . gcf ( ) renderer = LeafletRenderer ( ** kwargs ) exporter = Exporter ( renderer ) exporter . run ( fig ) return renderer . geojson ( )
Returns a figure s GeoJSON representation as a dictionary
243,075
def display ( fig = None , closefig = True , ** kwargs ) : from IPython . display import HTML if fig is None : fig = plt . gcf ( ) if closefig : plt . close ( fig ) html = fig_to_html ( fig , ** kwargs ) iframe_html = '<iframe src="data:text/html;base64,{html}" width="{width}" height="{height}"></iframe>' . format ( ht...
Convert a Matplotlib Figure to a Leaflet map . Embed in IPython notebook .
243,076
def show ( fig = None , path = '_map.html' , ** kwargs ) : import webbrowser fullpath = os . path . abspath ( path ) with open ( fullpath , 'w' ) as f : save_html ( fig , fileobj = f , ** kwargs ) webbrowser . open ( 'file://' + fullpath )
Convert a Matplotlib Figure to a Leaflet map . Open in a browser
243,077
def create_incident ( ** kwargs ) : incidents = cachet . Incidents ( endpoint = ENDPOINT , api_token = API_TOKEN ) if 'component_id' in kwargs : return incidents . post ( name = kwargs [ 'name' ] , message = kwargs [ 'message' ] , status = kwargs [ 'status' ] , component_id = kwargs [ 'component_id' ] , component_statu...
Creates an incident
243,078
def incident_exists ( name , message , status ) : incidents = cachet . Incidents ( endpoint = ENDPOINT ) all_incidents = json . loads ( incidents . get ( ) ) for incident in all_incidents [ 'data' ] : if name == incident [ 'name' ] and status == incident [ 'status' ] and message . strip ( ) == incident [ 'message' ] . ...
Check if an incident with these attributes already exists
243,079
def get_component ( id ) : components = cachet . Components ( endpoint = ENDPOINT ) component = json . loads ( components . get ( id = id ) ) return component [ 'data' ]
Gets a Cachet component by id
243,080
def api_token_required ( f , * args , ** kwargs ) : try : if args [ 0 ] . api_token is None : raise AttributeError ( 'Parameter api_token is required.' ) except AttributeError : raise AttributeError ( 'Parameter api_token is required.' ) return f ( * args , ** kwargs )
Decorator helper function to ensure some methods aren t needlessly called without an api_token configured .
243,081
def is_true ( self , item = None ) : if item : values = [ item ] else : values = [ ] self . _get_item_and_att_names ( * values ) return self . _passes_all
If you are filtering on object values you need to pass that object here .
243,082
def new_from_url ( cls , url , verify = True ) : response = requests . get ( url , verify = verify , timeout = 2.5 ) return cls . new_from_response ( response )
Constructs a new WebPage object for the URL using the requests module to fetch the HTML .
243,083
def new_from_response ( cls , response ) : return cls ( response . url , html = response . text , headers = response . headers )
Constructs a new WebPage object for the response using the BeautifulSoup module to parse the HTML .
243,084
def _prepare_app ( self , app ) : for key in [ 'url' , 'html' , 'script' , 'implies' ] : try : value = app [ key ] except KeyError : app [ key ] = [ ] else : if not isinstance ( value , list ) : app [ key ] = [ value ] for key in [ 'headers' , 'meta' ] : try : value = app [ key ] except KeyError : app [ key ] = { } obj...
Normalize app data preparing it for the detection phase .
243,085
def _has_app ( self , app , webpage ) : for regex in app [ 'url' ] : if regex . search ( webpage . url ) : return True for name , regex in app [ 'headers' ] . items ( ) : if name in webpage . headers : content = webpage . headers [ name ] if regex . search ( content ) : return True for regex in app [ 'script' ] : for s...
Determine whether the web page matches the app signature .
243,086
def _get_implied_apps ( self , detected_apps ) : def __get_implied_apps ( apps ) : _implied_apps = set ( ) for app in apps : try : _implied_apps . update ( set ( self . apps [ app ] [ 'implies' ] ) ) except KeyError : pass return _implied_apps implied_apps = __get_implied_apps ( detected_apps ) all_implied_apps = set (...
Get the set of apps implied by detected_apps .
243,087
def get_categories ( self , app_name ) : cat_nums = self . apps . get ( app_name , { } ) . get ( "cats" , [ ] ) cat_names = [ self . categories . get ( "%s" % cat_num , "" ) for cat_num in cat_nums ] return cat_names
Returns a list of the categories for an app name .
243,088
def analyze ( self , webpage ) : detected_apps = set ( ) for app_name , app in self . apps . items ( ) : if self . _has_app ( app , webpage ) : detected_apps . add ( app_name ) detected_apps |= self . _get_implied_apps ( detected_apps ) return detected_apps
Return a list of applications that can be detected on the web page .
243,089
def analyze_with_categories ( self , webpage ) : detected_apps = self . analyze ( webpage ) categorised_apps = { } for app_name in detected_apps : cat_names = self . get_categories ( app_name ) categorised_apps [ app_name ] = { "categories" : cat_names } return categorised_apps
Return a list of applications and categories that can be detected on the web page .
243,090
def clean ( self ) : if self . _initialized : logger . info ( "brace yourselves, removing %r" , self . path ) shutil . rmtree ( self . path )
remove the directory we operated on
243,091
def initialize ( self ) : if not self . _initialized : logger . info ( "initializing %r" , self ) if not os . path . exists ( self . path ) : if self . mode is not None : os . makedirs ( self . path , mode = self . mode ) else : os . makedirs ( self . path ) self . _set_mode ( ) self . _add_facl_rules ( ) self . _set_s...
create the directory if needed and configure it
243,092
def _set_selinux_context ( self ) : chcon_command_exists ( ) if self . selinux_context : logger . debug ( "setting SELinux context of %s to %s" , self . path , self . selinux_context ) run_cmd ( [ "chcon" , self . selinux_context , self . path ] ) if any ( [ self . selinux_user , self . selinux_role , self . selinux_ty...
Set SELinux context or fields using chcon program . Raises CommandDoesNotExistException if the command is not present on the system .
243,093
def _set_mode ( self ) : if self . mode is not None : logger . debug ( "changing permission bits of %s to %s" , self . path , oct ( self . mode ) ) os . chmod ( self . path , self . mode )
set permission bits if needed using python API os . chmod
243,094
def _add_facl_rules ( self ) : setfacl_command_exists ( ) if self . facl_rules : logger . debug ( "adding ACLs %s to %s" , self . facl_rules , self . path ) r = "," . join ( self . facl_rules ) run_cmd ( [ "setfacl" , "-m" , r , self . path ] )
Apply ACL rules on the directory using setfacl program . Raises CommandDoesNotExistException if the command is not present on the system .
243,095
def get_volume_options ( volumes ) : if not isinstance ( volumes , list ) : volumes = [ volumes ] volumes = [ Volume . create_from_tuple ( v ) for v in volumes ] result = [ ] for v in volumes : result += [ "-v" , str ( v ) ] return result
Generates volume options to run methods .
243,096
def layers ( self , rev = True ) : image_layers = [ PodmanImage ( None , identifier = x , pull_policy = PodmanImagePullPolicy . NEVER ) for x in self . get_layer_ids ( ) ] if not rev : image_layers . reverse ( ) return image_layers
Get list of PodmanImage for every layer in image
243,097
def get_metadata ( self ) : if self . _metadata is None : self . _metadata = ImageMetadata ( ) inspect_to_metadata ( self . _metadata , self . inspect ( refresh = True ) ) return self . _metadata
Provide metadata about this image .
243,098
def is_running ( self ) : try : return graceful_get ( self . inspect ( refresh = True ) , "State" , "Running" ) except subprocess . CalledProcessError : return False
returns True if the container is running
243,099
def is_port_open ( self , port , timeout = 2 ) : addresses = self . get_IPv4s ( ) if not addresses : return False return check_port ( port , host = addresses [ 0 ] , timeout = timeout )
check if given port is open and receiving connections on container ip_address