idx
int64
0
252k
question
stringlengths
48
5.28k
target
stringlengths
5
1.23k
246,400
def install_from_zip ( pkgpath , install_path , register_func , delete_after_install = False ) : logger . debug ( "%s is a file, attempting to load zip" , pkgpath ) pkgtempdir = tempfile . mkdtemp ( prefix = "honeycomb_" ) try : with zipfile . ZipFile ( pkgpath ) as pkgzip : pkgzip . extractall ( pkgtempdir ) except zipfile . BadZipfile as exc : logger . debug ( str ( exc ) ) raise click . ClickException ( str ( exc ) ) if delete_after_install : logger . debug ( "deleting %s" , pkgpath ) os . remove ( pkgpath ) logger . debug ( "installing from unzipped folder %s" , pkgtempdir ) return install_dir ( pkgtempdir , install_path , register_func , delete_after_install = True )
Install plugin from zipfile .
246,401
def install_from_repo ( pkgname , plugin_type , install_path , register_func ) : rsession = requests . Session ( ) rsession . mount ( "https://" , HTTPAdapter ( max_retries = 3 ) ) logger . debug ( "trying to install %s from online repo" , pkgname ) pkgurl = "{}/{}s/{}.zip" . format ( defs . GITHUB_RAW , plugin_type , pkgname ) try : logger . debug ( "Requesting HTTP HEAD: %s" , pkgurl ) r = rsession . head ( pkgurl ) r . raise_for_status ( ) total_size = int ( r . headers . get ( "content-length" , 0 ) ) pkgsize = _sizeof_fmt ( total_size ) with click . progressbar ( length = total_size , label = "Downloading {} {} ({}).." . format ( plugin_type , pkgname , pkgsize ) ) as bar : r = rsession . get ( pkgurl , stream = True ) with tempfile . NamedTemporaryFile ( delete = False ) as f : downloaded_bytes = 0 for chunk in r . iter_content ( chunk_size = 1 ) : if chunk : f . write ( chunk ) downloaded_bytes += len ( chunk ) bar . update ( downloaded_bytes ) return install_from_zip ( f . name , install_path , register_func , delete_after_install = True ) except requests . exceptions . HTTPError as exc : logger . debug ( str ( exc ) ) raise exceptions . PluginNotFoundInOnlineRepo ( pkgname ) except requests . exceptions . ConnectionError as exc : logger . debug ( str ( exc ) ) raise exceptions . PluginRepoConnectionError ( )
Install plugin from online repo .
246,402
def uninstall_plugin ( pkgpath , force ) : pkgname = os . path . basename ( pkgpath ) if os . path . exists ( pkgpath ) : if not force : click . confirm ( "[?] Are you sure you want to delete `{}` from honeycomb?" . format ( pkgname ) , abort = True ) try : shutil . rmtree ( pkgpath ) logger . debug ( "successfully uninstalled {}" . format ( pkgname ) ) click . secho ( "[*] Uninstalled {}" . format ( pkgname ) ) except OSError as exc : logger . exception ( str ( exc ) ) else : click . secho ( "[-] doh! I cannot seem to find `{}`, are you sure it's installed?" . format ( pkgname ) )
Uninstall a plugin .
246,403
def list_remote_plugins ( installed_plugins , plugin_type ) : click . secho ( "\n[*] Additional plugins from online repository:" ) try : rsession = requests . Session ( ) rsession . mount ( "https://" , HTTPAdapter ( max_retries = 3 ) ) r = rsession . get ( "{0}/{1}s/{1}s.txt" . format ( defs . GITHUB_RAW , plugin_type ) ) logger . debug ( "fetching %ss from remote repo" , plugin_type ) plugins = [ _ for _ in r . text . splitlines ( ) if _ not in installed_plugins ] click . secho ( " " . join ( plugins ) ) except requests . exceptions . ConnectionError as exc : logger . debug ( str ( exc ) , exc_info = True ) raise click . ClickException ( "Unable to fetch {} information from online repository" . format ( plugin_type ) )
List remote plugins from online repo .
246,404
def list_local_plugins ( plugin_type , plugins_path , plugin_details ) : installed_plugins = list ( ) for plugin in next ( os . walk ( plugins_path ) ) [ 1 ] : s = plugin_details ( plugin ) installed_plugins . append ( plugin ) click . secho ( s ) if not installed_plugins : click . secho ( "[*] You do not have any {0}s installed, " "try installing one with `honeycomb {0} install`" . format ( plugin_type ) ) return installed_plugins
List local plugins with details .
246,405
def parse_plugin_args ( command_args , config_args ) : parsed_args = dict ( ) for arg in command_args : kv = arg . split ( "=" ) if len ( kv ) != 2 : raise click . UsageError ( "Invalid parameter '{}', must be in key=value format" . format ( arg ) ) parsed_args [ kv [ 0 ] ] = config_utils . get_truetype ( kv [ 1 ] ) for arg in config_args : value = arg [ defs . VALUE ] value_type = arg [ defs . TYPE ] if value in parsed_args : config_utils . validate_field_matches_type ( value , parsed_args [ value ] , value_type , arg . get ( defs . ITEMS ) , arg . get ( defs . MIN ) , arg . get ( defs . MAX ) ) elif defs . DEFAULT in arg : parsed_args [ value ] = arg [ defs . DEFAULT ] elif arg [ defs . REQUIRED ] : raise exceptions . RequiredFieldMissing ( value ) return parsed_args
Parse command line arguments based on the plugin s parameters config .
246,406
def get_select_items ( items ) : option_items = list ( ) for item in items : if isinstance ( item , dict ) and defs . VALUE in item and defs . LABEL in item : option_items . append ( item [ defs . VALUE ] ) else : raise exceptions . ParametersFieldError ( item , "a dictionary with {} and {}" . format ( defs . LABEL , defs . VALUE ) ) return option_items
Return list of possible select items .
246,407
def print_plugin_args ( plugin_path ) : args = config_utils . get_config_parameters ( plugin_path ) args_format = "{:20} {:10} {:^15} {:^10} {:25}" title = args_format . format ( defs . NAME . upper ( ) , defs . TYPE . upper ( ) , defs . DEFAULT . upper ( ) , defs . REQUIRED . upper ( ) , defs . DESCRIPTION . upper ( ) ) click . secho ( title ) click . secho ( "-" * len ( title ) ) for arg in args : help_text = " ({})" . format ( arg [ defs . HELP_TEXT ] ) if defs . HELP_TEXT in arg else "" options = _parse_select_options ( arg ) description = arg [ defs . LABEL ] + options + help_text click . secho ( args_format . format ( arg [ defs . VALUE ] , arg [ defs . TYPE ] , str ( arg . get ( defs . DEFAULT , None ) ) , str ( arg . get ( defs . REQUIRED , False ) ) , description ) )
Print plugin parameters table .
246,408
def configure_integration ( path ) : integration = register_integration ( path ) integration_args = { } try : with open ( os . path . join ( path , ARGS_JSON ) ) as f : integration_args = json . loads ( f . read ( ) ) except Exception as exc : logger . debug ( str ( exc ) , exc_info = True ) raise click . ClickException ( "Cannot load {} integration args, please configure it first." . format ( os . path . basename ( path ) ) ) click . secho ( "[*] Adding integration {}" . format ( integration . name ) ) logger . debug ( "Adding integration %s" , integration . name , extra = { "integration" : integration . name , "args" : integration_args } ) configured_integration = ConfiguredIntegration ( name = integration . name , integration = integration , path = path ) configured_integration . data = integration_args configured_integration . integration . module = get_integration_module ( path ) . IntegrationActionsClass ( integration_args ) configured_integrations . append ( configured_integration )
Configure and enable an integration .
246,409
def send_alert_to_subscribed_integrations ( alert ) : valid_configured_integrations = get_valid_configured_integrations ( alert ) for configured_integration in valid_configured_integrations : threading . Thread ( target = create_integration_alert_and_call_send , args = ( alert , configured_integration ) ) . start ( )
Send Alert to relevant integrations .
246,410
def get_valid_configured_integrations ( alert ) : if not configured_integrations : return [ ] valid_configured_integrations = [ _ for _ in configured_integrations if _ . integration . integration_type == IntegrationTypes . EVENT_OUTPUT . name and ( not _ . integration . supported_event_types or alert . alert_type in _ . integration . supported_event_types ) ] return valid_configured_integrations
Return a list of integrations for alert filtered by alert_type .
246,411
def create_integration_alert_and_call_send ( alert , configured_integration ) : integration_alert = IntegrationAlert ( alert = alert , configured_integration = configured_integration , status = IntegrationAlertStatuses . PENDING . name , retries = configured_integration . integration . max_send_retries ) send_alert_to_configured_integration ( integration_alert )
Create an IntegrationAlert object and send it to Integration .
246,412
def send_alert_to_configured_integration ( integration_alert ) : try : alert = integration_alert . alert configured_integration = integration_alert . configured_integration integration = configured_integration . integration integration_actions_instance = configured_integration . integration . module alert_fields = dict ( ) if integration . required_fields : if not all ( [ hasattr ( alert , _ ) for _ in integration . required_fields ] ) : logger . debug ( "Alert does not have all required_fields (%s) for integration %s, skipping" , integration . required_fields , integration . name ) return exclude_fields = [ "alert_type" , "service_type" ] alert_fields = { } for field in alert . __slots__ : if hasattr ( alert , field ) and field not in exclude_fields : alert_fields [ field ] = getattr ( alert , field ) logger . debug ( "Sending alert %s to %s" , alert_fields , integration . name ) output_data , output_file_content = integration_actions_instance . send_event ( alert_fields ) if integration . polling_enabled : integration_alert . status = IntegrationAlertStatuses . POLLING . name polling_integration_alerts . append ( integration_alert ) else : integration_alert . status = IntegrationAlertStatuses . DONE . name integration_alert . send_time = get_current_datetime_utc ( ) integration_alert . output_data = json . dumps ( output_data ) except exceptions . IntegrationMissingRequiredFieldError as exc : logger . exception ( "Send response formatting for integration alert %s failed. Missing required fields" , integration_alert , exc . message ) integration_alert . status = IntegrationAlertStatuses . ERROR_MISSING_SEND_FIELDS . name except exceptions . IntegrationOutputFormatError : logger . exception ( "Send response formatting for integration alert %s failed" , integration_alert ) integration_alert . status = IntegrationAlertStatuses . ERROR_SENDING_FORMATTING . name except exceptions . IntegrationSendEventError as exc : integration_send_retries = integration_alert . retries if integration_alert . retries <= MAX_SEND_RETRIES else MAX_SEND_RETRIES send_retries_left = integration_send_retries - 1 integration_alert . retries = send_retries_left logger . error ( "Sending integration alert %s failed. Message: %s. Retries left: %s" , integration_alert , exc . message , send_retries_left ) if send_retries_left == 0 : integration_alert . status = IntegrationAlertStatuses . ERROR_SENDING . name if send_retries_left > 0 : sleep ( SEND_ALERT_DATA_INTERVAL ) send_alert_to_configured_integration ( integration_alert )
Send IntegrationAlert to configured integration .
246,413
def poll_integration_alert_data ( integration_alert ) : logger . info ( "Polling information for integration alert %s" , integration_alert ) try : configured_integration = integration_alert . configured_integration integration_actions_instance = configured_integration . integration . module output_data , output_file_content = integration_actions_instance . poll_for_updates ( json . loads ( integration_alert . output_data ) ) integration_alert . status = IntegrationAlertStatuses . DONE . name integration_alert . output_data = json . dumps ( output_data ) polling_integration_alerts . remove ( integration_alert ) except exceptions . IntegrationNoMethodImplementationError : logger . error ( "No poll_for_updates function found for integration alert %s" , integration_alert ) integration_alert . status = IntegrationAlertStatuses . ERROR_POLLING . name except exceptions . IntegrationPollEventError : logger . debug ( "Polling for integration alert %s failed" , integration_alert ) except exceptions . IntegrationOutputFormatError : logger . error ( "Integration alert %s formatting error" , integration_alert ) integration_alert . status = IntegrationAlertStatuses . ERROR_POLLING_FORMATTING . name except Exception : logger . exception ( "Error polling integration alert %s" , integration_alert ) integration_alert . status = IntegrationAlertStatuses . ERROR_POLLING . name
Poll for updates on waiting IntegrationAlerts .
246,414
def wait_until ( func , check_return_value = True , total_timeout = 60 , interval = 0.5 , exc_list = None , error_message = "" , * args , ** kwargs ) : start_function = time . time ( ) while time . time ( ) - start_function < total_timeout : try : logger . debug ( "executing {} with args {} {}" . format ( func , args , kwargs ) ) return_value = func ( * args , ** kwargs ) if not check_return_value or ( check_return_value and return_value ) : return return_value except Exception as exc : if exc_list and any ( [ isinstance ( exc , x ) for x in exc_list ] ) : pass else : raise time . sleep ( interval ) raise TimeoutException ( error_message )
Run a command in a loop until desired result or timeout occurs .
246,415
def search_json_log ( filepath , key , value ) : try : with open ( filepath , "r" ) as fh : for line in fh . readlines ( ) : log = json . loads ( line ) if key in log and log [ key ] == value : return log except IOError : pass return False
Search json log file for a key = value pair .
246,416
def list_commands ( self , ctx ) : rv = [ ] files = [ _ for _ in next ( os . walk ( self . folder ) ) [ 2 ] if not _ . startswith ( "_" ) and _ . endswith ( ".py" ) ] for filename in files : rv . append ( filename [ : - 3 ] ) rv . sort ( ) return rv
List commands from folder .
246,417
def get_command ( self , ctx , name ) : plugin = os . path . basename ( self . folder ) try : command = importlib . import_module ( "honeycomb.commands.{}.{}" . format ( plugin , name ) ) except ImportError : raise click . UsageError ( "No such command {} {}\n\n{}" . format ( plugin , name , self . get_help ( ctx ) ) ) return getattr ( command , name )
Fetch command from folder .
246,418
def cli ( ctx , home , iamroot , config , verbose ) : _mkhome ( home ) setup_logging ( home , verbose ) logger . debug ( "Honeycomb v%s" , __version__ , extra = { "version" : __version__ } ) logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) try : is_admin = os . getuid ( ) == 0 except AttributeError : is_admin = ctypes . windll . shell32 . IsUserAnAdmin ( ) if is_admin : if not iamroot : raise click . ClickException ( "Honeycomb should not run as a privileged user, if you are just " "trying to bind to a low port try running `setcap 'cap_net_bind_service=+ep' " "$(which honeycomb)` instead. If you insist, use --iamroot" ) logger . warn ( "running as root!" ) ctx . obj [ "HOME" ] = home logger . debug ( "ctx: {}" . format ( ctx . obj ) ) if config : return process_config ( ctx , config )
Honeycomb is a honeypot framework .
246,419
def setup_logging ( home , verbose ) : logging . setLoggerClass ( MyLogger ) logging . config . dictConfig ( { "version" : 1 , "disable_existing_loggers" : False , "formatters" : { "console" : { "format" : "%(levelname)-8s [%(asctime)s %(name)s] %(filename)s:%(lineno)s %(funcName)s: %(message)s" , } , "json" : { "()" : jsonlogger . JsonFormatter , "format" : "%(levelname)s %(asctime)s %(name)s %(filename)s %(lineno)s %(funcName)s %(message)s" , } , } , "handlers" : { "default" : { "level" : "DEBUG" if verbose else "INFO" , "class" : "logging.StreamHandler" , "formatter" : "console" , } , "file" : { "level" : "DEBUG" , "class" : "logging.handlers.WatchedFileHandler" , "filename" : os . path . join ( home , DEBUG_LOG_FILE ) , "formatter" : "json" , } , } , "loggers" : { "" : { "handlers" : [ "default" , "file" ] , "level" : "DEBUG" , "propagate" : True , } , } } )
Configure logging for honeycomb .
246,420
def makeRecord ( self , name , level , fn , lno , msg , args , exc_info , func = None , extra = None , sinfo = None ) : if six . PY2 : rv = logging . LogRecord ( name , level , fn , lno , msg , args , exc_info , func ) else : rv = logging . LogRecord ( name , level , fn , lno , msg , args , exc_info , func , sinfo ) if extra is None : extra = dict ( ) extra . update ( { "pid" : os . getpid ( ) , "uid" : os . getuid ( ) , "gid" : os . getgid ( ) , "ppid" : os . getppid ( ) } ) for key in extra : rv . __dict__ [ key ] = extra [ key ] return rv
Override default logger to allow overriding of internal attributes .
246,421
def stop ( ctx , service , editable ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] service_path = plugin_utils . get_plugin_path ( home , SERVICES , service , editable ) logger . debug ( "loading {}" . format ( service ) ) service = register_service ( service_path ) try : with open ( os . path . join ( service_path , ARGS_JSON ) ) as f : service_args = json . loads ( f . read ( ) ) except IOError as exc : logger . debug ( str ( exc ) , exc_info = True ) raise click . ClickException ( "Cannot load service args, are you sure server is running?" ) service_module = get_service_module ( service_path ) service_obj = service_module . service_class ( alert_types = service . alert_types , service_args = service_args ) runner = myRunner ( service_obj , pidfile = service_path + ".pid" , stdout = open ( os . path . join ( service_path , "stdout.log" ) , "ab" ) , stderr = open ( os . path . join ( service_path , "stderr.log" ) , "ab" ) ) click . secho ( "[*] Stopping {}" . format ( service . name ) ) try : runner . _stop ( ) except daemon . runner . DaemonRunnerStopFailureError as exc : logger . debug ( str ( exc ) , exc_info = True ) raise click . ClickException ( "Unable to stop service, are you sure it is running?" )
Stop a running service daemon .
246,422
def logs ( ctx , services , num , follow ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] services_path = os . path . join ( home , SERVICES ) tail_threads = [ ] for service in services : logpath = os . path . join ( services_path , service , LOGS_DIR , STDOUTLOG ) if os . path . exists ( logpath ) : logger . debug ( "tailing %s" , logpath ) t = threading . Thread ( target = Tailer , kwargs = { "name" : service , "nlines" : num , "filepath" : logpath , "follow" : follow } ) t . daemon = True t . start ( ) tail_threads . append ( t ) if tail_threads : while tail_threads [ 0 ] . isAlive ( ) : tail_threads [ 0 ] . join ( 0.1 )
Show logs of daemonized service .
246,423
def get_integration_module ( integration_path ) : paths = [ os . path . join ( __file__ , ".." , ".." ) , os . path . join ( integration_path , ".." ) , os . path . join ( integration_path , DEPS_DIR ) , ] for path in paths : path = os . path . realpath ( path ) logger . debug ( "adding %s to path" , path ) sys . path . insert ( 0 , path ) integration_name = os . path . basename ( integration_path ) logger . debug ( "importing %s" , "." . join ( [ integration_name , INTEGRATION ] ) ) return importlib . import_module ( "." . join ( [ integration_name , INTEGRATION ] ) )
Add custom paths to sys and import integration module .
246,424
def register_integration ( package_folder ) : logger . debug ( "registering integration %s" , package_folder ) package_folder = os . path . realpath ( package_folder ) if not os . path . exists ( package_folder ) : raise IntegrationNotFound ( os . path . basename ( package_folder ) ) json_config_path = os . path . join ( package_folder , CONFIG_FILE_NAME ) if not os . path . exists ( json_config_path ) : raise ConfigFileNotFound ( json_config_path ) with open ( json_config_path , "r" ) as f : config_json = json . load ( f ) validate_config ( config_json , defs . INTEGRATION_VALIDATE_CONFIG_FIELDS ) validate_config_parameters ( config_json , defs . INTEGRATION_PARAMETERS_ALLOWED_KEYS , defs . INTEGRATION_PARAMETERS_ALLOWED_TYPES ) integration_type = _create_integration_object ( config_json ) return integration_type
Register a honeycomb integration .
246,425
def list ( ctx , remote ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) click . secho ( "[*] Installed integrations:" ) home = ctx . obj [ "HOME" ] integrations_path = os . path . join ( home , INTEGRATIONS ) plugin_type = "integration" def get_integration_details ( integration_name ) : logger . debug ( "loading {}" . format ( integration_name ) ) integration = register_integration ( os . path . join ( integrations_path , integration_name ) ) supported_event_types = integration . supported_event_types if not supported_event_types : supported_event_types = "All" return "{:s} ({:s}) [Supported event types: {}]" . format ( integration . name , integration . description , supported_event_types ) installed_integrations = list_local_plugins ( plugin_type , integrations_path , get_integration_details ) if remote : list_remote_plugins ( installed_integrations , plugin_type ) else : click . secho ( "\n[*] Try running `honeycomb integrations list -r` " "to see integrations available from our repository" )
List integrations .
246,426
def run ( ctx , service , args , show_args , daemon , editable , integration ) : home = ctx . obj [ "HOME" ] service_path = plugin_utils . get_plugin_path ( home , SERVICES , service , editable ) service_log_path = os . path . join ( service_path , LOGS_DIR ) logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) logger . debug ( "loading {} ({})" . format ( service , service_path ) ) service = register_service ( service_path ) if show_args : return plugin_utils . print_plugin_args ( service_path ) service_module = get_service_module ( service_path ) service_args = plugin_utils . parse_plugin_args ( args , config_utils . get_config_parameters ( service_path ) ) service_obj = service_module . service_class ( alert_types = service . alert_types , service_args = service_args ) if not os . path . exists ( service_log_path ) : os . mkdir ( service_log_path ) if daemon : runner = myRunner ( service_obj , pidfile = service_path + ".pid" , stdout = open ( os . path . join ( service_log_path , STDOUTLOG ) , "ab" ) , stderr = open ( os . path . join ( service_log_path , STDERRLOG ) , "ab" ) ) files_preserve = [ ] for handler in logging . getLogger ( ) . handlers : if hasattr ( handler , "stream" ) : if hasattr ( handler . stream , "fileno" ) : files_preserve . append ( handler . stream . fileno ( ) ) if hasattr ( handler , "socket" ) : files_preserve . append ( handler . socket . fileno ( ) ) runner . daemon_context . files_preserve = files_preserve runner . daemon_context . signal_map . update ( { signal . SIGTERM : service_obj . _on_server_shutdown , signal . SIGINT : service_obj . _on_server_shutdown , } ) logger . debug ( "daemon_context" , extra = { "daemon_context" : vars ( runner . daemon_context ) } ) for integration_name in integration : integration_path = plugin_utils . get_plugin_path ( home , INTEGRATIONS , integration_name , editable ) configure_integration ( integration_path ) click . secho ( "[+] Launching {} {}" . format ( service . name , "in daemon mode" if daemon else "" ) ) try : with open ( os . path . join ( service_path , ARGS_JSON ) , "w" ) as f : f . write ( json . dumps ( service_args ) ) runner . _start ( ) if daemon else service_obj . run ( ) except KeyboardInterrupt : service_obj . _on_server_shutdown ( ) click . secho ( "[*] {} has stopped" . format ( service . name ) )
Load and run a specific service .
246,427
def read_lines ( self , file_path , empty_lines = False , signal_ready = True ) : file_handler , file_id = self . _get_file ( file_path ) file_handler . seek ( 0 , os . SEEK_END ) if signal_ready : self . signal_ready ( ) while self . thread_server . is_alive ( ) : line = six . text_type ( file_handler . readline ( ) , "utf-8" ) if line : yield line continue elif empty_lines : yield line time . sleep ( 0.1 ) if file_id != self . _get_file_id ( os . stat ( file_path ) ) and os . path . isfile ( file_path ) : file_handler , file_id = self . _get_file ( file_path )
Fetch lines from file .
246,428
def on_server_start ( self ) : self . _container = self . _docker_client . containers . run ( self . docker_image_name , detach = True , ** self . docker_params ) self . signal_ready ( ) for log_line in self . get_lines ( ) : try : alert_dict = self . parse_line ( log_line ) if alert_dict : self . add_alert_to_queue ( alert_dict ) except Exception : self . logger . exception ( None )
Service run loop function .
246,429
def on_server_shutdown ( self ) : if not self . _container : return self . _container . stop ( ) self . _container . remove ( v = True , force = True )
Stop the container before shutting down .
246,430
def uninstall ( ctx , yes , integrations ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] for integration in integrations : integration_path = plugin_utils . get_plugin_path ( home , INTEGRATIONS , integration ) plugin_utils . uninstall_plugin ( integration_path , yes )
Uninstall a integration .
246,431
def install ( ctx , services , delete_after_install = False ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] services_path = os . path . join ( home , SERVICES ) installed_all_plugins = True for service in services : try : plugin_utils . install_plugin ( service , SERVICE , services_path , register_service ) except exceptions . PluginAlreadyInstalled as exc : click . echo ( exc ) installed_all_plugins = False if not installed_all_plugins : raise ctx . exit ( errno . EEXIST )
Install a honeypot service from the online library local path or zipfile .
246,432
def uninstall ( ctx , yes , services ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] for service in services : service_path = plugin_utils . get_plugin_path ( home , SERVICES , service ) plugin_utils . uninstall_plugin ( service_path , yes )
Uninstall a service .
246,433
def get_service_module ( service_path ) : paths = [ os . path . dirname ( __file__ ) , os . path . realpath ( os . path . join ( service_path , ".." ) ) , os . path . realpath ( os . path . join ( service_path ) ) , os . path . realpath ( os . path . join ( service_path , DEPS_DIR ) ) , ] for path in paths : path = os . path . realpath ( path ) logger . debug ( "adding %s to path" , path ) sys . path . insert ( 0 , path ) service_name = os . path . basename ( service_path ) module = "." . join ( [ service_name , service_name + "_service" ] ) logger . debug ( "importing %s" , module ) return importlib . import_module ( module )
Add custom paths to sys and import service module .
246,434
def register_service ( package_folder ) : logger . debug ( "registering service %s" , package_folder ) package_folder = os . path . realpath ( package_folder ) if not os . path . exists ( package_folder ) : raise ServiceNotFound ( os . path . basename ( package_folder ) ) json_config_path = os . path . join ( package_folder , CONFIG_FILE_NAME ) if not os . path . exists ( json_config_path ) : raise ConfigFileNotFound ( json_config_path ) with open ( json_config_path , "r" ) as f : config_json = json . load ( f ) config_utils . validate_config ( config_json , defs . SERVICE_ALERT_VALIDATE_FIELDS ) config_utils . validate_config ( config_json . get ( defs . SERVICE_CONFIG_SECTION_KEY , { } ) , defs . SERVICE_CONFIG_VALIDATE_FIELDS ) _validate_supported_platform ( config_json ) _validate_alert_configs ( config_json ) config_utils . validate_config_parameters ( config_json , defs . SERVICE_ALLOWED_PARAMTER_KEYS , defs . SERVICE_ALLOWED_PARAMTER_TYPES ) service_type = _create_service_object ( config_json ) service_type . alert_types = _create_alert_types ( config_json , service_type ) return service_type
Register a honeycomb service .
246,435
def install ( ctx , integrations , delete_after_install = False ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] integrations_path = os . path . join ( home , INTEGRATIONS ) installed_all_plugins = True for integration in integrations : try : plugin_utils . install_plugin ( integration , INTEGRATION , integrations_path , register_integration ) except exceptions . PluginAlreadyInstalled as exc : click . echo ( exc ) installed_all_plugins = False if not installed_all_plugins : raise ctx . exit ( errno . EEXIST )
Install a honeycomb integration from the online library local path or zipfile .
246,436
def configure ( ctx , integration , args , show_args , editable ) : home = ctx . obj [ "HOME" ] integration_path = plugin_utils . get_plugin_path ( home , defs . INTEGRATIONS , integration , editable ) logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) logger . debug ( "loading {} ({})" . format ( integration , integration_path ) ) integration = register_integration ( integration_path ) if show_args : return plugin_utils . print_plugin_args ( integration_path ) integration_args = plugin_utils . parse_plugin_args ( args , config_utils . get_config_parameters ( integration_path ) ) args_file = os . path . join ( integration_path , defs . ARGS_JSON ) with open ( args_file , "w" ) as f : data = json . dumps ( integration_args ) logger . debug ( "writing %s to %s" , data , args_file ) f . write ( json . dumps ( integration_args ) ) click . secho ( "[*] {0} has been configured, make sure to test it with `honeycomb integration test {0}`" . format ( integration . name ) )
Configure an integration with default parameters .
246,437
def get_match_history ( self , account_id = None , ** kwargs ) : if 'account_id' not in kwargs : kwargs [ 'account_id' ] = account_id url = self . __build_url ( urls . GET_MATCH_HISTORY , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a list of the most recent Dota matches
246,438
def get_match_history_by_seq_num ( self , start_at_match_seq_num = None , ** kwargs ) : if 'start_at_match_seq_num' not in kwargs : kwargs [ 'start_at_match_seq_num' ] = start_at_match_seq_num url = self . __build_url ( urls . GET_MATCH_HISTORY_BY_SEQ_NUM , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a list of Dota matches in the order they were recorded
246,439
def get_match_details ( self , match_id = None , ** kwargs ) : if 'match_id' not in kwargs : kwargs [ 'match_id' ] = match_id url = self . __build_url ( urls . GET_MATCH_DETAILS , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing the details for a Dota 2 match
246,440
def get_league_listing ( self ) : url = self . __build_url ( urls . GET_LEAGUE_LISTING ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a list of all ticketed leagues
246,441
def get_live_league_games ( self ) : url = self . __build_url ( urls . GET_LIVE_LEAGUE_GAMES ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a list of ticked games in progress
246,442
def get_team_info_by_team_id ( self , start_at_team_id = None , ** kwargs ) : if 'start_at_team_id' not in kwargs : kwargs [ 'start_at_team_id' ] = start_at_team_id url = self . __build_url ( urls . GET_TEAM_INFO_BY_TEAM_ID , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a in - game teams
246,443
def get_player_summaries ( self , steamids = None , ** kwargs ) : if not isinstance ( steamids , collections . Iterable ) : steamids = [ steamids ] base64_ids = list ( map ( convert_to_64_bit , filter ( lambda x : x is not None , steamids ) ) ) if 'steamids' not in kwargs : kwargs [ 'steamids' ] = base64_ids url = self . __build_url ( urls . GET_PLAYER_SUMMARIES , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a player summaries
246,444
def get_heroes ( self , ** kwargs ) : url = self . __build_url ( urls . GET_HEROES , language = self . language , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary of in - game heroes used to parse ids into localised names
246,445
def get_tournament_prize_pool ( self , leagueid = None , ** kwargs ) : if 'leagueid' not in kwargs : kwargs [ 'leagueid' ] = leagueid url = self . __build_url ( urls . GET_TOURNAMENT_PRIZE_POOL , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary that includes community funded tournament prize pools
246,446
def get_top_live_games ( self , partner = '' , ** kwargs ) : if 'partner' not in kwargs : kwargs [ 'partner' ] = partner url = self . __build_url ( urls . GET_TOP_LIVE_GAME , ** kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary that includes top MMR live games
246,447
def __build_url ( self , api_call , ** kwargs ) : kwargs [ 'key' ] = self . api_key if 'language' not in kwargs : kwargs [ 'language' ] = self . language if 'format' not in kwargs : kwargs [ 'format' ] = self . __format api_query = urlencode ( kwargs ) return "{0}{1}?{2}" . format ( urls . BASE_URL , api_call , api_query )
Builds the api query
246,448
def __check_http_err ( self , status_code ) : if status_code == 403 : raise exceptions . APIAuthenticationError ( self . api_key ) elif status_code == 503 : raise exceptions . APITimeoutError ( ) else : return False
Raises an exception if we get a http error
246,449
def item_id ( response ) : dict_keys = [ 'item_0' , 'item_1' , 'item_2' , 'item_3' , 'item_4' , 'item_5' ] new_keys = [ 'item_0_name' , 'item_1_name' , 'item_2_name' , 'item_3_name' , 'item_4_name' , 'item_5_name' ] for player in response [ 'players' ] : for key , new_key in zip ( dict_keys , new_keys ) : for item in items [ 'items' ] : if item [ 'id' ] == player [ key ] : player [ new_key ] = item [ 'localized_name' ] return response
Parse the item ids will be available as item_0_name item_1_name item_2_name and so on
246,450
def get_reviews ( obj ) : ctype = ContentType . objects . get_for_model ( obj ) return models . Review . objects . filter ( content_type = ctype , object_id = obj . id )
Simply returns the reviews for an object .
246,451
def get_review_average ( obj ) : total = 0 reviews = get_reviews ( obj ) if not reviews : return False for review in reviews : average = review . get_average_rating ( ) if average : total += review . get_average_rating ( ) if total > 0 : return total / reviews . count ( ) return False
Returns the review average for an object .
246,452
def render_category_averages ( obj , normalize_to = 100 ) : context = { 'reviewed_item' : obj } ctype = ContentType . objects . get_for_model ( obj ) reviews = models . Review . objects . filter ( content_type = ctype , object_id = obj . id ) category_averages = { } for review in reviews : review_category_averages = review . get_category_averages ( normalize_to ) if review_category_averages : for category , average in review_category_averages . items ( ) : if category not in category_averages : category_averages [ category ] = review_category_averages [ category ] else : category_averages [ category ] += review_category_averages [ category ] if reviews and category_averages : for category , average in category_averages . items ( ) : category_averages [ category ] = category_averages [ category ] / models . Rating . objects . filter ( category = category , value__isnull = False , review__content_type = ctype , review__object_id = obj . id ) . exclude ( value = '' ) . count ( ) else : category_averages = { } for category in models . RatingCategory . objects . filter ( counts_for_average = True ) : category_averages [ category ] = 0.0 context . update ( { 'category_averages' : category_averages } ) return context
Renders all the sub - averages for each category .
246,453
def total_review_average ( obj , normalize_to = 100 ) : ctype = ContentType . objects . get_for_model ( obj ) total_average = 0 reviews = models . Review . objects . filter ( content_type = ctype , object_id = obj . id ) for review in reviews : total_average += review . get_average_rating ( normalize_to ) if reviews : total_average /= reviews . count ( ) return total_average
Returns the average for all reviews of the given object .
246,454
def user_has_reviewed ( obj , user ) : ctype = ContentType . objects . get_for_model ( obj ) try : models . Review . objects . get ( user = user , content_type = ctype , object_id = obj . id ) except models . Review . DoesNotExist : return False return True
Returns True if the user has already reviewed the object .
246,455
def str_to_bytes ( value : str , expected_length : int ) -> bytes : length = len ( value ) if length != expected_length : raise ValueError ( 'Expects {} characters for decoding; got {}' . format ( expected_length , length ) ) try : encoded = value . encode ( 'ascii' ) except UnicodeEncodeError as ex : raise ValueError ( 'Expects value that can be encoded in ASCII charset: {}' . format ( ex ) ) decoding = DECODING for byte in encoded : if decoding [ byte ] > 31 : raise ValueError ( 'Non-base32 character found: "{}"' . format ( chr ( byte ) ) ) return encoded
Convert the given string to bytes and validate it is within the Base32 character set .
246,456
def package_version ( ) : version_path = os . path . join ( os . path . dirname ( __file__ ) , 'version.py' ) version = read_version ( version_path ) write_version ( version_path , version ) return version
Get the package version via Git Tag .
246,457
def synchronized ( * args ) : if callable ( args [ 0 ] ) : return decorate_synchronized ( args [ 0 ] , _synchronized_lock ) else : def wrap ( function ) : return decorate_synchronized ( function , args [ 0 ] ) return wrap
A synchronized function prevents two or more callers to interleave its execution preventing race conditions .
246,458
def worker_thread ( context ) : queue = context . task_queue parameters = context . worker_parameters if parameters . initializer is not None : if not run_initializer ( parameters . initializer , parameters . initargs ) : context . state = ERROR return for task in get_next_task ( context , parameters . max_tasks ) : execute_next_task ( task ) queue . task_done ( )
The worker thread routines .
246,459
def stop_process ( process ) : process . terminate ( ) process . join ( 3 ) if process . is_alive ( ) and os . name != 'nt' : try : os . kill ( process . pid , signal . SIGKILL ) process . join ( ) except OSError : return if process . is_alive ( ) : raise RuntimeError ( "Unable to terminate PID %d" % os . getpid ( ) )
Does its best to stop the process .
246,460
def send_result ( pipe , data ) : try : pipe . send ( data ) except ( pickle . PicklingError , TypeError ) as error : error . traceback = format_exc ( ) pipe . send ( RemoteException ( error , error . traceback ) )
Send result handling pickling and communication errors .
246,461
def process ( * args , ** kwargs ) : timeout = kwargs . get ( 'timeout' ) if len ( args ) == 1 and len ( kwargs ) == 0 and callable ( args [ 0 ] ) : return _process_wrapper ( args [ 0 ] , timeout ) else : if timeout is not None and not isinstance ( timeout , ( int , float ) ) : raise TypeError ( 'Timeout expected to be None or integer or float' ) def decorating_function ( function ) : return _process_wrapper ( function , timeout ) return decorating_function
Runs the decorated function in a concurrent process taking care of the result and error management .
246,462
def _worker_handler ( future , worker , pipe , timeout ) : result = _get_result ( future , pipe , timeout ) if isinstance ( result , BaseException ) : if isinstance ( result , ProcessExpired ) : result . exitcode = worker . exitcode future . set_exception ( result ) else : future . set_result ( result ) if worker . is_alive ( ) : stop_process ( worker )
Worker lifecycle manager .
246,463
def _function_handler ( function , args , kwargs , pipe ) : signal . signal ( signal . SIGINT , signal . SIG_IGN ) result = process_execute ( function , * args , ** kwargs ) send_result ( pipe , result )
Runs the actual function in separate process and returns its result .
246,464
def _get_result ( future , pipe , timeout ) : counter = count ( step = SLEEP_UNIT ) try : while not pipe . poll ( SLEEP_UNIT ) : if timeout is not None and next ( counter ) >= timeout : return TimeoutError ( 'Task Timeout' , timeout ) elif future . cancelled ( ) : return CancelledError ( ) return pipe . recv ( ) except ( EOFError , OSError ) : return ProcessExpired ( 'Abnormal termination' ) except Exception as error : return error
Waits for result and handles communication errors .
246,465
def _trampoline ( name , module , * args , ** kwargs ) : function = _function_lookup ( name , module ) return function ( * args , ** kwargs )
Trampoline function for decorators .
246,466
def _function_lookup ( name , module ) : try : return _registered_functions [ name ] except KeyError : __import__ ( module ) mod = sys . modules [ module ] getattr ( mod , name ) return _registered_functions [ name ]
Searches the function between the registered ones . If not found it imports the module forcing its registration .
246,467
def worker_process ( params , channel ) : signal ( SIGINT , SIG_IGN ) if params . initializer is not None : if not run_initializer ( params . initializer , params . initargs ) : os . _exit ( 1 ) try : for task in worker_get_next_task ( channel , params . max_tasks ) : payload = task . payload result = process_execute ( payload . function , * payload . args , ** payload . kwargs ) send_result ( channel , Result ( task . id , result ) ) except ( EnvironmentError , OSError , RuntimeError ) as error : os . _exit ( error . errno if error . errno else 1 ) except EOFError : os . _exit ( 0 )
The worker process routines .
246,468
def task_transaction ( channel ) : with channel . lock : if channel . poll ( 0 ) : task = channel . recv ( ) channel . send ( Acknowledgement ( os . getpid ( ) , task . id ) ) else : raise RuntimeError ( "Race condition between workers" ) return task
Ensures a task is fetched and acknowledged atomically .
246,469
def schedule ( self , task ) : self . task_manager . register ( task ) self . worker_manager . dispatch ( task )
Schedules a new Task in the PoolManager .
246,470
def process_next_message ( self , timeout ) : message = self . worker_manager . receive ( timeout ) if isinstance ( message , Acknowledgement ) : self . task_manager . task_start ( message . task , message . worker ) elif isinstance ( message , Result ) : self . task_manager . task_done ( message . task , message . result )
Processes the next message coming from the workers .
246,471
def update_tasks ( self ) : for task in self . task_manager . timeout_tasks ( ) : self . task_manager . task_done ( task . id , TimeoutError ( "Task timeout" , task . timeout ) ) self . worker_manager . stop_worker ( task . worker_id ) for task in self . task_manager . cancelled_tasks ( ) : self . task_manager . task_done ( task . id , CancelledError ( ) ) self . worker_manager . stop_worker ( task . worker_id )
Handles timing out Tasks .
246,472
def update_workers ( self ) : for expiration in self . worker_manager . inspect_workers ( ) : self . handle_worker_expiration ( expiration ) self . worker_manager . create_workers ( )
Handles unexpected processes termination .
246,473
def task_done ( self , task_id , result ) : try : task = self . tasks . pop ( task_id ) except KeyError : return else : if task . future . cancelled ( ) : task . set_running_or_notify_cancel ( ) elif isinstance ( result , BaseException ) : task . future . set_exception ( result ) else : task . future . set_result ( result ) self . task_done_callback ( )
Set the tasks result and run the callback .
246,474
def inspect_workers ( self ) : workers = tuple ( self . workers . values ( ) ) expired = tuple ( w for w in workers if not w . is_alive ( ) ) for worker in expired : self . workers . pop ( worker . pid ) return ( ( w . pid , w . exitcode ) for w in expired if w . exitcode != 0 )
Updates the workers status .
246,475
def iter_chunks ( chunksize , * iterables ) : iterables = iter ( zip ( * iterables ) ) while 1 : chunk = tuple ( islice ( iterables , chunksize ) ) if not chunk : return yield chunk
Iterates over zipped iterables in chunks .
246,476
def run_initializer ( initializer , initargs ) : try : initializer ( * initargs ) return True except Exception as error : logging . exception ( error ) return False
Runs the Pool initializer dealing with errors .
246,477
def join ( self , timeout = None ) : if self . _context . state == RUNNING : raise RuntimeError ( 'The Pool is still running' ) if self . _context . state == CLOSED : self . _wait_queue_depletion ( timeout ) self . stop ( ) self . join ( ) else : self . _context . task_queue . put ( None ) self . _stop_pool ( )
Joins the pool waiting until all workers exited .
246,478
def thread ( function ) : @ wraps ( function ) def wrapper ( * args , ** kwargs ) : future = Future ( ) launch_thread ( _function_handler , function , args , kwargs , future ) return future return wrapper
Runs the decorated function within a concurrent thread taking care of the result and error management .
246,479
def _function_handler ( function , args , kwargs , future ) : future . set_running_or_notify_cancel ( ) try : result = function ( * args , ** kwargs ) except BaseException as error : error . traceback = format_exc ( ) future . set_exception ( error ) else : future . set_result ( result )
Runs the actual function in separate thread and returns its result .
246,480
def create_cities_csv ( filename = "places2k.txt" , output = "cities.csv" ) : with open ( filename , 'r' ) as city_file : with open ( output , 'w' ) as out : for line in city_file : if line [ 0 : 2 ] == "PR" : continue out . write ( " " . join ( line [ 9 : 72 ] . split ( ) [ : - 1 ] ) + '\n' )
Takes the places2k . txt from USPS and creates a simple file of all cities .
246,481
def parse_address ( self , address , line_number = - 1 ) : return Address ( address , self , line_number , self . logger )
Return an Address object from the given address . Passes itself to the Address constructor to use all the custom loaded suffixes cities etc .
246,482
def load_cities ( self , filename ) : with open ( filename , 'r' ) as f : for line in f : self . cities . append ( line . strip ( ) . lower ( ) )
Load up all cities in lowercase for easier matching . The file should have one city per line with no extra characters . This isn t strictly required but will vastly increase the accuracy .
246,483
def load_streets ( self , filename ) : with open ( filename , 'r' ) as f : for line in f : self . streets . append ( line . strip ( ) . lower ( ) )
Load up all streets in lowercase for easier matching . The file should have one street per line with no extra characters . This isn t strictly required but will vastly increase the accuracy .
246,484
def preprocess_address ( self , address ) : address = address . replace ( "# " , "#" ) address = address . replace ( " & " , "&" ) if re . search ( r"-?-?\w+ units" , address , re . IGNORECASE ) : address = re . sub ( r"-?-?\w+ units" , "" , address , flags = re . IGNORECASE ) apartment_regexes = [ r'#\w+ & \w+' , '#\w+ rm \w+' , "#\w+-\w" , r'apt #{0,1}\w+' , r'apartment #{0,1}\w+' , r'#\w+' , r'# \w+' , r'rm \w+' , r'unit #?\w+' , r'units #?\w+' , r'- #{0,1}\w+' , r'no\s?\d+\w*' , r'style\s\w{1,2}' , r'townhouse style\s\w{1,2}' ] for regex in apartment_regexes : apartment_match = re . search ( regex , address , re . IGNORECASE ) if apartment_match : self . apartment = self . _clean ( apartment_match . group ( ) ) address = re . sub ( regex , "" , address , flags = re . IGNORECASE ) address = re . sub ( r"\,\s*\," , "," , address ) return address
Takes a basic address and attempts to clean it up extract reasonably assured bits that may throw off the rest of the parsing and return the cleaned address .
246,485
def check_state ( self , token ) : if len ( token ) == 2 and self . state is None : if token . capitalize ( ) in self . parser . states . keys ( ) : self . state = self . _clean ( self . parser . states [ token . capitalize ( ) ] ) return True elif token . upper ( ) in self . parser . states . values ( ) : self . state = self . _clean ( token . upper ( ) ) return True if self . state is None and self . street_suffix is None and len ( self . comma_separated_address ) > 1 : if token . capitalize ( ) in self . parser . states . keys ( ) : self . state = self . _clean ( self . parser . states [ token . capitalize ( ) ] ) return True elif token . upper ( ) in self . parser . states . values ( ) : self . state = self . _clean ( token . upper ( ) ) return True return False
Check if state is in either the keys or values of our states list . Must come before the suffix .
246,486
def check_city ( self , token ) : shortened_cities = { 'saint' : 'st.' } if self . city is None and self . state is not None and self . street_suffix is None : if token . lower ( ) in self . parser . cities : self . city = self . _clean ( token . capitalize ( ) ) return True return False if self . city is None and self . apartment is None and self . street_suffix is None and len ( self . comma_separated_address ) > 1 : if token . lower ( ) in self . parser . cities : self . city = self . _clean ( token . capitalize ( ) ) return True return False if self . city is not None and self . street_suffix is None and self . street is None : print "Checking for multi part city" , token . lower ( ) , token . lower ( ) in shortened_cities . keys ( ) if token . lower ( ) + ' ' + self . city in self . parser . cities : self . city = self . _clean ( ( token . lower ( ) + ' ' + self . city ) . capitalize ( ) ) return True if token . lower ( ) in shortened_cities . keys ( ) : token = shortened_cities [ token . lower ( ) ] print "Checking for shorted multi part city" , token . lower ( ) + ' ' + self . city if token . lower ( ) + ' ' + self . city . lower ( ) in self . parser . cities : self . city = self . _clean ( token . capitalize ( ) + ' ' + self . city . capitalize ( ) ) return True
Check if there is a known city from our city list . Must come before the suffix .
246,487
def check_street_suffix ( self , token ) : if self . street_suffix is None and self . street is None : if token . upper ( ) in self . parser . suffixes . keys ( ) : suffix = self . parser . suffixes [ token . upper ( ) ] self . street_suffix = self . _clean ( suffix . capitalize ( ) + '.' ) return True elif token . upper ( ) in self . parser . suffixes . values ( ) : self . street_suffix = self . _clean ( token . capitalize ( ) + '.' ) return True return False
Attempts to match a street suffix . If found it will return the abbreviation with the first letter capitalized and a period after it . E . g . St . or Ave .
246,488
def check_street ( self , token ) : if self . street is None and self . street_suffix is not None and self . street_prefix is None and self . house_number is None : self . street = self . _clean ( token . capitalize ( ) ) return True elif self . street is not None and self . street_suffix is not None and self . street_prefix is None and self . house_number is None : self . street = self . _clean ( token . capitalize ( ) + ' ' + self . street ) return True if not self . street_suffix and not self . street and token . lower ( ) in self . parser . streets : self . street = self . _clean ( token ) return True return False
Let s assume a street comes before a prefix and after a suffix . This isn t always the case but we ll deal with that in our guessing game . Also two word street names ... well ...
246,489
def check_street_prefix ( self , token ) : if self . street and not self . street_prefix and token . lower ( ) . replace ( '.' , '' ) in self . parser . prefixes . keys ( ) : self . street_prefix = self . _clean ( self . parser . prefixes [ token . lower ( ) . replace ( '.' , '' ) ] ) return True return False
Finds street prefixes such as N . or Northwest before a street name . Standardizes to 1 or two letters followed by a period .
246,490
def check_house_number ( self , token ) : if self . street and self . house_number is None and re . match ( street_num_regex , token . lower ( ) ) : if '/' in token : token = token . split ( '/' ) [ 0 ] if '-' in token : token = token . split ( '-' ) [ 0 ] self . house_number = self . _clean ( str ( token ) ) return True return False
Attempts to find a house number generally the first thing in an address . If anything is in front of it we assume it is a building name .
246,491
def check_building ( self , token ) : if self . street and self . house_number : if not self . building : self . building = self . _clean ( token ) else : self . building = self . _clean ( token + ' ' + self . building ) return True return False
Building name check . If we have leftover and everything else is set probably building names . Allows for multi word building names .
246,492
def guess_unmatched ( self , token ) : if token . lower ( ) in [ 'apt' , 'apartment' ] : return False if token . strip ( ) == '-' : return True if len ( token ) <= 2 : return False if self . street_suffix is None and self . street is None and self . street_prefix is None and self . house_number is None : if re . match ( r"[A-Za-z]" , token ) : if self . line_number >= 0 : pass else : pass self . street = self . _clean ( token . capitalize ( ) ) return True return False
When we find something that doesn t match we can make an educated guess and log it as such .
246,493
def full_address ( self ) : addr = "" if self . house_number : addr = addr + self . house_number if self . street_prefix : addr = addr + " " + self . street_prefix if self . street : addr = addr + " " + self . street if self . street_suffix : addr = addr + " " + self . street_suffix if self . apartment : addr = addr + " " + self . apartment if self . city : addr = addr + ", " + self . city if self . state : addr = addr + ", " + self . state if self . zip : addr = addr + " " + self . zip return addr
Print the address in a human readable format
246,494
def _get_dstk_intersections ( self , address , dstk_address ) : normalized_address = self . _normalize ( address ) normalized_dstk_address = self . _normalize ( dstk_address ) address_uniques = set ( normalized_address ) - set ( normalized_dstk_address ) dstk_address_uniques = set ( normalized_dstk_address ) - set ( normalized_address ) if self . logger : self . logger . debug ( "Address Uniques {0}" . format ( address_uniques ) ) if self . logger : self . logger . debug ( "DSTK Address Uniques {0}" . format ( dstk_address_uniques ) ) return ( len ( address_uniques ) , len ( dstk_address_uniques ) )
Find the unique tokens in the original address and the returned address .
246,495
def _normalize ( self , address ) : normalized_address = [ ] if self . logger : self . logger . debug ( "Normalizing Address: {0}" . format ( address ) ) for token in address . split ( ) : if token . upper ( ) in self . parser . suffixes . keys ( ) : normalized_address . append ( self . parser . suffixes [ token . upper ( ) ] . lower ( ) ) elif token . upper ( ) in self . parser . suffixes . values ( ) : normalized_address . append ( token . lower ( ) ) elif token . upper ( ) . replace ( '.' , '' ) in self . parser . suffixes . values ( ) : normalized_address . append ( token . lower ( ) . replace ( '.' , '' ) ) elif token . lower ( ) in self . parser . prefixes . keys ( ) : normalized_address . append ( self . parser . prefixes [ token . lower ( ) ] . lower ( ) ) elif token . upper ( ) in self . parser . prefixes . values ( ) : normalized_address . append ( token . lower ( ) [ : - 1 ] ) elif token . upper ( ) + '.' in self . parser . prefixes . values ( ) : normalized_address . append ( token . lower ( ) ) else : normalized_address . append ( token . lower ( ) ) return normalized_address
Normalize prefixes suffixes and other to make matching original to returned easier .
246,496
def empty ( ) : if not hasattr ( empty , '_instance' ) : empty . _instance = Interval ( AtomicInterval ( OPEN , inf , - inf , OPEN ) ) return empty . _instance
Create an empty set .
246,497
def from_data ( data , conv = None , pinf = float ( 'inf' ) , ninf = float ( '-inf' ) ) : intervals = [ ] conv = ( lambda v : v ) if conv is None else conv def _convert ( bound ) : if bound == pinf : return inf elif bound == ninf : return - inf else : return conv ( bound ) for item in data : left , lower , upper , right = item intervals . append ( AtomicInterval ( left , _convert ( lower ) , _convert ( upper ) , right ) ) return Interval ( * intervals )
Import an interval from a piece of data .
246,498
def is_empty ( self ) : return ( self . _lower > self . _upper or ( self . _lower == self . _upper and ( self . _left == OPEN or self . _right == OPEN ) ) )
Test interval emptiness .
246,499
def to_atomic ( self ) : lower = self . _intervals [ 0 ] . lower left = self . _intervals [ 0 ] . left upper = self . _intervals [ - 1 ] . upper right = self . _intervals [ - 1 ] . right return AtomicInterval ( left , lower , upper , right )
Return the smallest atomic interval containing this interval .